lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
C++
treap_vis_multiset.cc
varqox/benchmarks
7713013f1a6af368da4cf634a72ae69ca6d17334
#include <bits/stdc++.h> using namespace std; #define FOR(i,a,n) for (decltype(a) i = (a), i##__ = (n); i <= i##__; ++i) #define REP(i,n) FOR(i,0,(n)-1) #define FORD(i,a,n) for (decltype(n) i = (a), i##__ = (n); i >= i##__; --i) #define ALL(x) x.begin(), x.end() #define EB emplace_back #define ST first #define ND second #define OO(A) template<class... T> ostream& operator<<(ostream& os, const A<T...>& x) { return __o(os, ALL(x)); } #define SZ(x) ((int)x.size()) typedef long long LL; typedef pair<int, int> PII; typedef vector<int> VI; typedef vector<VI> VVI; typedef vector<PII> VPII; template<class A, class B> ostream& operator<<(ostream&, const pair<A, B>&); template<class I> ostream& __o(ostream&, I, I); template<class T, size_t N> ostream& operator<<(ostream& os, const array<T, N>& x) { return __o(os, ALL(x)); } OO(vector) OO(deque) OO(set) OO(multiset) OO(map) OO(multimap) template<class A, class B> ostream& operator<<(ostream& os, const pair<A, B>& p) { return os << "(" << p.ST << ", " << p.ND << ")"; } template<class I> ostream& __o(ostream& os, I a, I b) { os << "{"; for (; a != b;) os << *a++, os << (a == b ? "" : " "); return os << "}"; } template<class I> ostream& __d(ostream& os, I a, I b) { os << "{\n"; for (I c = a; a != b; ++a) os << " " << distance(c, a) << ": " << *a << endl; return os << "}"; } template<class... T> void __e(T&&... a) { int t[] = {(cerr << forward<T>(a), 0)...}; (void)t; cerr << endl; } template<class A, class B> inline void mini(A& a, B&& b) { if (b < a) a = b; } template<class A, class B> inline void maxi(A& a, B&& b) { if (b > a) a = b; } inline int ceil2(int x) { return (x < 2 ? 1 : 1 << (sizeof(x) * 8 - __builtin_clz(x - 1))); } #ifdef DEBUG # define D(...) __VA_ARGS__ #else # define D(...) #endif #define LOG(x) D(cerr << #x ": " << x) #define LOGN(x) D(LOG(x) << endl) #define DUMP(x) D(cerr << #x ": ", __d(cerr, ALL(x)) << endl) #define E(...) D(__e(__VA_ARGS__)) constexpr char nl = '\n'; struct treap { treap* left = nullptr; treap* right = nullptr; int val; int sz = 1; int rank = rand(); treap(int x = 0) : val(x) {} }; inline int sz(treap* x) { return (x ? x->sz : 0); } inline void update(treap* a) { a->sz = 1 + sz(a->left) + sz(a->right); } inline treap* merge(treap* a, treap* b) { if (!a) return b; if (!b) return a; if (a->rank > b->rank) { a->right = merge(a->right, b); update(a); return a; } else { b->left = merge(a, b->left); update(b); return b; } } inline pair<treap*, treap*> split(treap* a, int k) { if (!a) return {nullptr, nullptr}; int sl = sz(a->left); if (sl >= k) { auto p = split(a->left, k); a->left = p.ND; update(a); return {p.ST, a}; } else { auto p = split(a->right, k - sl - 1); a->right = p.ST; update(a); return {a, p.ND}; } } inline treap* insert(treap* a, int pos, int val) { auto p = split(a, pos); return merge(p.ST, merge(new treap(val), p.ND)); } inline treap* erase(treap* a, int pos) { auto p = split(a, pos); auto p1 = split(p.ND, 1); delete p1.ST; return merge(p.ST, p1.ND); } inline void _print(treap* a) { if (a) { _print(a->left); cerr << a->val << ' '; _print(a->right); } } inline void print(treap* a) { cerr << "{", _print(a), cerr << "}\n"; } inline int find(treap* a, int val) { if (!a) return -0x3f3f3f3f; if (a->val == val) return sz(a->left); else if (a->val > val) return find(a->left, val); else return sz(a->left) + 1 + find(a->right, val); } inline int upper_bound(treap* a, int val) { if (!a) return 0; if (a->val <= val) return sz(a->left) + 1 + upper_bound(a->right, val); else return upper_bound(a->left, val); } inline treap* insertVal(treap* a, int val) { return insert(a, upper_bound(a, val), val); } inline treap* eraseVal(treap* a, int val) { int pos = find(a, val); return (pos >= 0 ? erase(a, pos) : a); } #include <sys/time.h> class Timer { struct timeval begin; public: Timer() { start(); } void start() { gettimeofday(&begin, NULL); } long long microtime() { struct timeval end; gettimeofday(&end, NULL); return (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec - begin.tv_usec; } double time() { return microtime() * 0.000001; } }; int main() { ios::sync_with_stdio(false); cin.tie(nullptr); srand(10101029); constexpr int N = 1e6; VI v(N); for (int& x : v) x = rand(); VI v1 = v; random_shuffle(ALL(v1)); Timer timer; treap *t = nullptr; timer.start(); for (int x : v) t = insertVal(t, x); cout << "Treap - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) t = eraseVal(t, x); cout << "Treap - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; multiset<int> S; timer.start(); for (int x : v) S.insert(x); cout << "Multiset - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) S.erase(x); cout << "Multiset - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; return 0; }
#include <bits/stdc++.h> using namespace std; #define FOR(i,a,n) for (decltype(a) i = (a), i##__ = (n); i <= i##__; ++i) #define REP(i,n) FOR(i,0,(n)-1) #define FORD(i,a,n) for (decltype(n) i = (a), i##__ = (n); i >= i##__; --i) #define ALL(x) x.begin(), x.end() #define EB emplace_back #define ST first #define ND second #define OO(A) template<class... T> ostream& operator<<(ostream& os, const A<T...>& x) { return __o(os, ALL(x)); } #define SZ(x) ((int)x.size()) typedef long long LL; typedef pair<int, int> PII; typedef vector<int> VI; typedef vector<VI> VVI; typedef vector<PII> VPII; template<class A, class B> ostream& operator<<(ostream&, const pair<A, B>&); template<class I> ostream& __o(ostream&, I, I); template<class T, size_t N> ostream& operator<<(ostream& os, const array<T, N>& x) { return __o(os, ALL(x)); } OO(vector) OO(deque) OO(set) OO(multiset) OO(map) OO(multimap) template<class A, class B> ostream& operator<<(ostream& os, const pair<A, B>& p) { return os << "(" << p.ST << ", " << p.ND << ")"; } template<class I> ostream& __o(ostream& os, I a, I b) { os << "{"; for (; a != b;) os << *a++, os << (a == b ? "" : " "); return os << "}"; } template<class I>
template<class... T> void __e(T&&... a) { int t[] = {(cerr << forward<T>(a), 0)...}; (void)t; cerr << endl; } template<class A, class B> inline void mini(A& a, B&& b) { if (b < a) a = b; } template<class A, class B> inline void maxi(A& a, B&& b) { if (b > a) a = b; } inline int ceil2(int x) { return (x < 2 ? 1 : 1 << (sizeof(x) * 8 - __builtin_clz(x - 1))); } #ifdef DEBUG # define D(...) __VA_ARGS__ #else # define D(...) #endif #define LOG(x) D(cerr << #x ": " << x) #define LOGN(x) D(LOG(x) << endl) #define DUMP(x) D(cerr << #x ": ", __d(cerr, ALL(x)) << endl) #define E(...) D(__e(__VA_ARGS__)) constexpr char nl = '\n'; struct treap { treap* left = nullptr; treap* right = nullptr; int val; int sz = 1; int rank = rand(); treap(int x = 0) : val(x) {} }; inline int sz(treap* x) { return (x ? x->sz : 0); } inline void update(treap* a) { a->sz = 1 + sz(a->left) + sz(a->right); } inline treap* merge(treap* a, treap* b) { if (!a) return b; if (!b) return a; if (a->rank > b->rank) { a->right = merge(a->right, b); update(a); return a; } else { b->left = merge(a, b->left); update(b); return b; } } inline pair<treap*, treap*> split(treap* a, int k) { if (!a) return {nullptr, nullptr}; int sl = sz(a->left); if (sl >= k) { auto p = split(a->left, k); a->left = p.ND; update(a); return {p.ST, a}; } else { auto p = split(a->right, k - sl - 1); a->right = p.ST; update(a); return {a, p.ND}; } } inline treap* insert(treap* a, int pos, int val) { auto p = split(a, pos); return merge(p.ST, merge(new treap(val), p.ND)); } inline treap* erase(treap* a, int pos) { auto p = split(a, pos); auto p1 = split(p.ND, 1); delete p1.ST; return merge(p.ST, p1.ND); } inline void _print(treap* a) { if (a) { _print(a->left); cerr << a->val << ' '; _print(a->right); } } inline void print(treap* a) { cerr << "{", _print(a), cerr << "}\n"; } inline int find(treap* a, int val) { if (!a) return -0x3f3f3f3f; if (a->val == val) return sz(a->left); else if (a->val > val) return find(a->left, val); else return sz(a->left) + 1 + find(a->right, val); } inline int upper_bound(treap* a, int val) { if (!a) return 0; if (a->val <= val) return sz(a->left) + 1 + upper_bound(a->right, val); else return upper_bound(a->left, val); } inline treap* insertVal(treap* a, int val) { return insert(a, upper_bound(a, val), val); } inline treap* eraseVal(treap* a, int val) { int pos = find(a, val); return (pos >= 0 ? erase(a, pos) : a); } #include <sys/time.h> class Timer { struct timeval begin; public: Timer() { start(); } void start() { gettimeofday(&begin, NULL); } long long microtime() { struct timeval end; gettimeofday(&end, NULL); return (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec - begin.tv_usec; } double time() { return microtime() * 0.000001; } }; int main() { ios::sync_with_stdio(false); cin.tie(nullptr); srand(10101029); constexpr int N = 1e6; VI v(N); for (int& x : v) x = rand(); VI v1 = v; random_shuffle(ALL(v1)); Timer timer; treap *t = nullptr; timer.start(); for (int x : v) t = insertVal(t, x); cout << "Treap - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) t = eraseVal(t, x); cout << "Treap - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; multiset<int> S; timer.start(); for (int x : v) S.insert(x); cout << "Multiset - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) S.erase(x); cout << "Multiset - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; return 0; }
ostream& __d(ostream& os, I a, I b) { os << "{\n"; for (I c = a; a != b; ++a) os << " " << distance(c, a) << ": " << *a << endl; return os << "}"; }
function_block-function_prefix_line
[ { "content": "class Cstring {\n\nprivate:\n\n\tsize_t len_, real_len_;\n\npublic:\n\n\tchar *p;\n\n\n\n\texplicit Cstring(size_t len = 0, char c = '\\0'): len_(len),\n\n\t\t\treal_len_(len + 1), p((char*)malloc(real_len_)) {\n\n\t\tif (p == NULL)\n\n\t\t\tthrow std::bad_alloc();\n\n\n\n\t\tmemset(p, c, len);\n\...
C++
src/lib/drishti/eye/EyeModelEyelids.cpp
ZJCRT/drishti
7c0da7e71cd4cff838b0b8ef195855cb68951839
#include "drishti/eye/EyeModelEstimatorImpl.h" #include "drishti/core/drishti_stdlib_string.h" #include "drishti/eye/EyeIO.h" #define DRISHTI_EYE_DEBUG_INITS 0 #if DRISHTI_EYE_DEBUG_INITS # include <opencv2/highgui.hpp> #endif DRISHTI_EYE_NAMESPACE_BEGIN using PointVec = std::vector<cv::Point2f>; static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n); static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n); static PointVec getMedianOfPoses(const std::vector<PointVec>& poses); #if DRISHTI_EYE_DEBUG_INITS static std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points); static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name = "eyes"); static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S); #endif void EyeModelEstimator::Impl::segmentEyelids(const cv::Mat& I, EyeModel& eye) const { PointVec mu = m_eyeEstimator->getMeanShape(); cv::Rect roi({ 0, 0 }, I.size()); std::vector<cv::Rect> rois = { roi }; if (m_eyelidInits > 1) { jitter(roi, m_jitterEyelidParams, rois, m_eyelidInits - 1); } std::vector<PointVec> poses(rois.size(), mu); std::vector<bool> mask; for (int i = 0; i < rois.size(); i++) { (*m_eyeEstimator)(I(rois[i]), poses[i], mask); cv::Point2f shift = rois[i].tl(); for (auto& p : poses[i]) { p += shift; } } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } void EyeModelEstimator::Impl::segmentEyelids_(const cv::Mat& I, EyeModel& eye) const { std::vector<PointVec> poses{ m_eyeEstimator->getMeanShape() }; if (m_eyelidInits > 1) { auto toShape = [&](const EyeModel& e) { return eyeToShape(e, m_eyeSpec); }; std::vector<EyeModel> jittered; jitter(shapeToEye(m_eyeEstimator->getMeanShape(), m_eyeSpec), m_jitterEyelidParams, jittered, m_eyelidInits - 1); std::transform(jittered.begin(), jittered.end(), std::back_inserter(poses), toShape); } #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::diag({ I.cols, I.cols, 1.f })), "poses-in"); #endif std::vector<bool> mask; for (auto & pose : poses) { (*m_eyeEstimator)(I, pose, mask); } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } #if DRISHTI_EYE_DEBUG_INITS static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S) { auto toEye = [&](const PointVec& shape) { return S * shapeToEye(shape, spec); }; std::vector<EyeModel> eyes; std::transform(shapes.begin(), shapes.end(), std::back_inserter(eyes), toEye); return eyes; }; std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points) { std::vector<cv::Point2f> points_ = points; for (auto& p : points_) { cv::Point3f q = H * cv::Point3f(p.x, p.y, 1.f); p = { q.x / q.z, q.y / q.z }; } return points_; } static float getMaxSeparation(const PointVec& points) { float maxSeparation = 0.f; for (int i = 0; i < points.size(); i++) { for (int j = i + 1; j < points.size(); j++) { float separation = cv::norm(points[i] - points[j]); if (separation > maxSeparation) { maxSeparation = separation; } } } return maxSeparation; } static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name) { cv::Mat canvas; cv::cvtColor(I, canvas, cv::COLOR_GRAY2BGR); for (const auto& v : eyes) { cv::Matx41d color = cv::Scalar::randu(100, 255); v.draw(canvas, 0, 0, { color(0), color(1), color(2) }, 1); } cv::imshow(name, canvas); cv::waitKey(0); } #endif static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n) { cv::Point2f center = drishti::geometry::centroid<int, float>(roi); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { bool hasRoi = false; cv::Rect roi2; for (int j = 0; j < 100; j++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center, false); roi2 = H * roi; cv::Rect valid = roi2 & roi; if (roi.contains(valid.tl()) && roi.contains(valid.br())) { hasRoi = true; break; } } if (hasRoi) { poses.push_back(roi2); } } } static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n) { cv::Point2f center = drishti::core::centroid(eye.eyelids); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center); poses.push_back(H * eye); } } static PointVec getMedianOfPoses(const std::vector<PointVec>& poses) { std::vector<std::vector<float>> params[2]; params[0].resize(poses[0].size()); params[1].resize(poses[0].size()); for (const auto & pose : poses) { for (int j = 0; j < pose.size(); j++) { params[0][j].push_back(pose[j].x); params[1][j].push_back(pose[j].y); } } std::vector<cv::Point2f> pose(poses[0].size()); for (int i = 0; i < params[0].size(); i++) { pose[i] = { median(params[0][i]), median(params[1][i]) }; } return pose; } DRISHTI_EYE_NAMESPACE_END
#include "drishti/eye/EyeModelEstimatorImpl.h" #include "drishti/core/drishti_stdlib_string.h" #include "drishti/eye/EyeIO.h" #define DRISHTI_EYE_DEBUG_INITS 0 #if DRISHTI_EYE_DEBUG_INITS # include <opencv2/highgui.hpp> #endif DRISHTI_EYE_NAMESPACE_BEGIN using PointVec = std::vector<cv::Point2f>; static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n); static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n); static PointVec getMedianOfPoses(const std::vector<PointVec>& poses); #if DRISHTI_EYE_DEBUG_INITS static std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points); static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name = "eyes"); static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S); #endif void EyeModelEstimator::Impl::segmentEyelids(const cv::Mat& I, EyeModel& eye) const { PointVec mu = m_eyeEstimator->getMeanShape(); cv::Rect roi({ 0, 0 }, I.size()); std::vector<cv::Rect> rois = { roi }; if (m_eyelidInits > 1) { jitter(roi, m_jitterEyelidParams, rois, m_eyelidInits - 1); } std::vector<PointVec> poses(rois.size(), mu); std::vector<bool> mask; for (int i = 0; i < rois.size(); i++) { (*m_eyeEstimator)(I(rois[i]), poses[i], mask); cv::Point2f shift = rois[i].tl(); for (auto& p : poses[i]) { p += shift; } } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } void EyeModelEstimator::Impl::segmentEyelids_(const cv::Mat& I, EyeModel& eye) const { std::vector<PointVec> poses{ m_eyeEstimator->getMeanShape() }; if (m_eyelidInits > 1) { auto toShape = [&](const EyeModel& e) { return eyeToShape(e, m_eyeSpec); }; std::vector<EyeModel> jittered; jitter(shapeToEye(m_eyeEstimator->getMeanShape(), m_eyeSpec), m_jitterEyelidParams, jittered, m_eyelidInits - 1); std::transform(jittered.begin(), jittered.end(), std::back_inserter(poses), toShape); } #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::diag({ I.cols, I.cols, 1.f })), "poses-in"); #endif std::vector<bool> mask; for (auto & pose : poses) { (*m_eyeEstimator)(I, pose, mask); } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } #if DRISHTI_EYE_DEBUG_INITS static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S) { auto toEye = [&](const PointVec& shape) { return S * shapeToEye(shape, spec); }; std::vector<EyeModel> eyes; std::transform(shapes.begin(), shapes.end(), std::back_inserter(eyes), toEye); return eyes; }; std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points) { std::vector<cv::Point2f> points_ = points; for (auto& p : points_) { cv::Point3f q = H * cv::Point3f(p.x, p.y, 1.f); p = { q.x / q.z, q.y / q.z }; } return points_; } static float getMaxSeparation(const PointVec& points) { float maxSeparation = 0.f; for (int i = 0; i < points.size(); i++) { for (int j = i + 1; j < points.size(); j++) { float separation = cv::norm(points[i] - points[j]); if (separation > maxSeparation) { maxSeparation = separation; } } } return maxSeparation; } static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name) { cv::Mat canvas; cv::cvtColor(I, canvas, cv::COLOR_GRAY2BGR); for (const auto& v : eyes) { cv::Matx41d color = cv::Scalar::randu(100, 255); v.draw(canvas, 0, 0, { color(0), color(1), color(2) }, 1); } cv::imshow(name, canvas); cv::waitKey(0); } #endif static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n) { cv::Point2f center = drishti::geometry::centroid<int, float>(roi); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { bool hasRoi = false; cv::Rect roi2; for (int j = 0; j < 100; j++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center, false); roi2 = H * roi; cv::Rect valid = roi2 & roi; if (roi.contains(valid.tl()) && roi.contains(valid.br())) { hasRoi = true; break; } } if (hasRoi) { poses.push_back(roi2); } } }
static PointVec getMedianOfPoses(const std::vector<PointVec>& poses) { std::vector<std::vector<float>> params[2]; params[0].resize(poses[0].size()); params[1].resize(poses[0].size()); for (const auto & pose : poses) { for (int j = 0; j < pose.size(); j++) { params[0][j].push_back(pose[j].x); params[1][j].push_back(pose[j].y); } } std::vector<cv::Point2f> pose(poses[0].size()); for (int i = 0; i < params[0].size(); i++) { pose[i] = { median(params[0][i]), median(params[1][i]) }; } return pose; } DRISHTI_EYE_NAMESPACE_END
static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n) { cv::Point2f center = drishti::core::centroid(eye.eyelids); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center); poses.push_back(H * eye); } }
function_block-full_function
[]
C++
Development/OrignalDev/Util/EventTimeLine.cc
isuhao/ravl2
317e0ae1cb51e320b877c3bad6a362447b5e52ec
#include "Jack/EventTimeLine.hh" #include "Ravl/GUI/Manager.hh" #include "Ravl/Image/Font.hh" #include <gdk/gdk.h> #define DODEBUG 0 #if DODEBUG #define ONDEBUG(x) x #else #define ONDEBUG(x) #endif namespace RavlGUIN { using namespace RavlAudioN; EventTimeLineBodyC::EventTimeLineBodyC() : RawCanvasBodyC(15,15), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(srow,scol), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) { } EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(15,15), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng) : RawCanvasBodyC(srow,scol), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng) : RawCanvasBodyC(15,15), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), label("label") {} static bool DestroyGc(GdkGC *gc) { g_object_unref(gc); return true; } bool EventTimeLineBodyC::SetMarkers(RealT time,RealT left, RealT right){ markerRight = right; markerLeft = left; SetMarker(time); return true; } EventTimeLineBodyC::~EventTimeLineBodyC() { if(markerGc != 0) { Manager.Queue(Trigger(DestroyGc,markerGc)); markerGc = 0; } } bool EventTimeLineBodyC::Create() { ConnectRef(Signal("expose_event"),*this,&EventTimeLineBodyC::EventExpose); ConnectRef(Signal("configure_event"),*this,&EventTimeLineBodyC::EventConfigure); ConnectRef(Signal("button_press_event"),*this,&EventTimeLineBodyC::EventMousePress); if(!RawCanvasBodyC::Create()) return false; return true; } bool EventTimeLineBodyC::SetDisplayRange(RealRangeC &rng) { Manager.QueueOnGUI(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplayRange,rng)); return true; } bool EventTimeLineBodyC::GUISetDisplayRange(RealRangeC &rng) { displayRange = rng; ONDEBUG(cerr << "EventTimeLineBodyC::GUISetDisplayRange(). Range=" << displayRange << " \n"); GUIDraw(); return true; } bool EventTimeLineBodyC::SetMarker(RealT time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetMarker,time)); return true; } bool EventTimeLineBodyC::GUISetMarker(RealT time) { atMarker = time; GUIDraw(); return true; } bool EventTimeLineBodyC::Goto(RealT &time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUIGoto,time)); return true; } bool EventTimeLineBodyC::GUIGoto(RealT &time) { ONDEBUG(cerr << "EventTimeLineBodyC::GUIGotot(). Time=" << time << " \n"); RealT size = displayRange.Size()/2; displayRange = RealRangeC(time - size ,time + size); GUIDraw(); return true; } bool EventTimeLineBodyC::SetDisplaySpan(RealT &size) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplaySpan,size)); return true; } bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size) { RealT time = displayRange.Center(); RealT val = size / 2; displayRange = RealRangeC(time - val,time + val); GUIDraw(); return true; } bool EventTimeLineBodyC::SetEvents(TranscriptionBaseListC &_events) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetEvents,_events)); return true; } bool EventTimeLineBodyC::GUISetEvents(TranscriptionBaseListC &_events) { events = _events; GUIDraw(); return true; } bool EventTimeLineBodyC::EventConfigure(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventConfigure(). \n"); IndexRange2dC newRec; TranslateConfigureEvent(event,newRec); if(newRec == displayArea) return true; displayArea = newRec; if(markerGc == 0) { markerGc = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGc,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGc,&colour); } if(markerGcL == 0) { markerGcL = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcL,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGcL,&colour); } if(markerGcR == 0) { markerGcR = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcR,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 0; colour.blue = 255 * 255; gdk_gc_set_rgb_fg_color (markerGcR,&colour); } if(text == 0) { text = gdk_gc_new(DrawArea()); gdk_gc_copy(text,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 200 * 255; colour.blue = 100 * 255; gdk_gc_set_rgb_fg_color (text,&colour); } return true; } bool EventTimeLineBodyC::EventExpose(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventExpose(). \n"); IntT toFollow; IndexRange2dC newRec; TranslateExposeEvent(event,newRec,toFollow); GUIDraw(); return true; } bool EventTimeLineBodyC::EventMousePress(MouseEventC &mousePress) { ONDEBUG(cerr << "EventTimeLineBodyC::EventMousePress(). \n"); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); if(mousePress.HasChanged(1) && mousePress.IsCntrl()){ DeleteEvent(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(1)&&mousePress.IsShift()) SetEventVal(mousePress.At()[1] / scale + displayRange.Min()); else if(mousePress.HasChanged(1)){ atMarker = (static_cast<RealT>(mousePress.At()[1]) / scale) + displayRange.Min(); #if 0 for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) { if(it->Data2().Contains(time)) cerr << "Time " << time << " in " << it->Data2() << "\n"; } #endif timeSelected(atMarker); } else if(mousePress.HasChanged(0)&&mousePress.IsShift()) AddEvent(); else if(mousePress.HasChanged(0)){ markerLeft = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(2)){ markerRight = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } GUIDraw(); return true; } bool EventTimeLineBodyC::GUIDraw() { ONDEBUG(cerr << "EventTimeLineBodyC::GUIDraw(). Range=" << displayRange << " Events=" << events.Size() << "\n"); if(displayArea.Cols() < 1 || displayArea.Rows() < 1) return true; GUIDrawRectangle(GUIDrawGCWhite(),displayArea,true); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); IndexRangeC vertRange = displayArea.Range1().Shrink(4); #if 1 if(markerGc != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((atMarker - displayRange.Min()) * scale, ((atMarker+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGc,markRange,true); } #endif #if 1 if(markerGcL != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerLeft - displayRange.Min()) * scale, ((markerLeft+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcL,markRange,true); } #endif #if 1 if(markerGcR != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerRight - displayRange.Min()) * scale, ((markerRight+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcR,markRange,true); } #endif DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events); ONDEBUG(cerr << "VertRange=" << vertRange << " Scale=" << scale << "\n"); IndexC midV = vertRange.Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(midV,displayArea.Range2().Min()),Index2dC(midV,displayArea.Range2().Max())); for(;it;it++) { if(displayRange.IsOverlapping(it->Data2())){ IndexRangeC rng2((it->Data2().Min() - displayRange.Min()) * scale, (it->Data2().Max() - displayRange.Min()) * scale); IndexRange2dC box(vertRange,rng2); if(box.Range2().Size() == 0) box.Range2().Max()++; box.ClipBy(displayArea); GdkGC* drawContext = gdk_gc_new(DrawArea()); gdk_gc_copy(drawContext,GUIDrawGC()); GdkColor colour; if(it->Data1() == 0){ colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; } else if(it->Data1() == 4){ colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; } else{ colour.pixel = 0; colour.red = -255 * 255*it->Data1()/3; colour.green = -255 * 255*it->Data1()/3; colour.blue = -255 * 255*it->Data1()/3; } gdk_gc_set_rgb_fg_color (drawContext,&colour); GUIDrawRectangle(drawContext,box,true); } } GUIDrawText(text,GUIDrawFont(),Point2dC(10,10),label); #if 0 IndexC midH = displayArea.Range2().Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(displayArea.Range1().Min(),midH),Index2dC(displayArea.Range1().Max(),midH)); #endif return true; } RealT EventTimeLineBodyC::GetMarkerRight(){return markerRight;} RealT EventTimeLineBodyC::GetMarkerLeft(){return markerLeft;} RealT EventTimeLineBodyC::GetMarkerTime(){return atMarker;} bool EventTimeLineBodyC::AddEvent(){ if(markerLeft < markerRight) AddEvent(RealRangeC(markerLeft,markerRight)); else AddEvent(RealRangeC(markerRight,markerLeft)); return true; } bool EventTimeLineBodyC::AddEvent(RealRangeC data){ events+=Tuple3C<IntT,RealRangeC,StringC>(0,data,""); return true; } bool EventTimeLineBodyC::DeleteEvent(){ if(markerLeft < markerRight) for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerLeft && it.Data().Data2().Min() < markerRight) it.Del(); else for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerRight && it.Data().Data2().Min() < markerLeft) it.Del(); return true; } TranscriptionBaseListC &EventTimeLineBodyC::GetEvents(){return events;} bool EventTimeLineBodyC::SetEventVal(RealT time){ for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Contains(time)){ it.Data().Data1() = (it.Data().Data1()+1) % 5; GUIDraw(); } return true; } }
#include "Jack/EventTimeLine.hh" #include "Ravl/GUI/Manager.hh" #include "Ravl/Image/Font.hh" #include <gdk/gdk.h> #define DODEBUG 0 #if DODEBUG #define ONDEBUG(x) x #else #define ONDEBUG(x) #endif namespace RavlGUIN { using namespace RavlAudioN; EventTimeLineBodyC::EventTimeLineBodyC() : RawCanvasBodyC(15,15), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(srow,scol), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) { } EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(15,15), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng) : RawCanvasBodyC(srow,scol), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng) : RawCanvasBodyC(15,15), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), label("label") {} static bool DestroyGc(GdkGC *gc) { g_object_unref(gc); return true; } bool EventTimeLineBodyC::SetMarkers(RealT time,RealT left, RealT right){ markerRight = right; markerLeft = left; SetMarker(time); return true; } EventTimeLineBodyC::~EventTimeLineBodyC() { if(markerGc != 0) { Manager.Queue(Trigger(DestroyGc,markerGc)); markerGc = 0; } } bool EventTimeLineBodyC::Create() { ConnectRef(Signal("expose_event"),*this,&EventTimeLineBodyC::EventExpose); ConnectRef(Signal("configure_event"),*this,&EventTimeLineBodyC::EventConfigure); ConnectRef(Signal("button_press_event"),*this,&EventTimeLineBodyC::EventMousePress); if(!RawCanvasBodyC::Create()) return false; return true; } bool EventTimeLineBodyC::SetDisplayRange(RealRangeC &rng) { Manager.QueueOnGUI(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplayRange,rng)); return true; } bool EventTimeLineBodyC::GUISetDisplayRange(RealRangeC &rng) { displayRange = rng; ONDEBUG(cerr << "EventTimeLineBodyC::GUISetDisplayRange(). Range=" << displayRange << " \n"); GUIDraw(); return true; } bool EventTimeLineBodyC::SetMarker(RealT time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetMarker,time)); return true; } bool EventTimeLineBodyC::GUISetMarker(RealT time) { atMarker = time; GUIDraw(); return true; } bool EventTimeLineBodyC::Goto(RealT &time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUIGoto,time)); return true; } bool EventTimeLineBodyC::GUIGoto(RealT &time) { ONDEBUG(cerr << "EventTimeLineBodyC::GUIGotot(). Time=" << time << " \n"); RealT size = displayRange.Size()/2; displayRange = RealRangeC(time - size ,time + size); GUIDraw(); return true; } bool EventTimeLineBodyC::SetDisplaySpan(RealT &size) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplaySpan,size)); return true; } bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size) { RealT time = displayRange.Center(); RealT val = size / 2; displayRange = RealRangeC(time - val,time + val); GUIDraw(); return true; } bool EventTimeLineBodyC::SetEvents(TranscriptionBaseListC &_events) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetEvents,_events)); return true; } bool EventTimeLineBodyC::GUISetEvents(TranscriptionBaseListC &_events) { events = _events; GUIDraw(); return true; } bool EventTimeLineBodyC::EventConfigure(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventConfigure(). \n"); IndexRange2dC newRec; TranslateConfigureEvent(event,newRec); if(newRec == displayArea) return true; displayArea = newRec; if(markerGc == 0) { markerGc = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGc,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGc,&colour); } if(markerGcL == 0) { markerGcL = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcL,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGcL,&colour); } if(markerGcR == 0) { markerGcR = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcR,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 0; colour.blue = 255 * 255; gdk_gc_set_rgb_fg_color (markerGcR,&colour); } if(text == 0) { text = gdk_gc_new(DrawArea()); gdk_gc_copy(text,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 200 * 255; colour.blue = 100 * 255; gdk_gc_set_rgb_fg_color (text,&colour); } return true; } bool EventTimeLineBodyC::EventExpose(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventExpose(). \n"); IntT toFollow; IndexRange2dC newRec; TranslateExposeEvent(event,newRec,toFollow); GUIDraw(); return true; } bool EventTimeLineBodyC::EventMousePress(MouseEventC &mousePress) { ONDEBUG(cerr << "EventTimeLineBodyC::EventMousePress(). \n"); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); if(mousePress.HasChanged(1) && mousePress.IsCntrl()){ DeleteEvent(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(1)&&mousePress.IsShift()) SetEventVal(mousePress.At()[1] / scale + displayRange.Min()); else if(mousePress.HasChanged(1)){ atMarker = (static_cast<RealT>(mousePress.At()[1]) / scale) + displayRange.Min(); #if 0 for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) { if(it->Data2().Contains(time)) cerr << "Time " << time << " in " << it->Data2() << "\n"; } #endif timeSelected(atMarker); } else if(mousePress.HasChanged(0)&&mousePress.IsShift()) AddEvent(); else if(mousePress.HasChanged(0)){ markerLeft = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(2)){ markerRight = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } GUIDraw(); return true; } bool EventTimeLineBodyC::GUIDraw() { ONDEBUG(cerr << "EventTimeLineBodyC::GUIDraw(). Range=" << displayRange << " Events=" << events.Size() << "\n"); if(displayArea.Cols() < 1 || displayArea.Rows() < 1) return true; GUIDrawRectangle(GUIDrawGCWhite(),displayArea,true); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); IndexRangeC vertRange = displayArea.Range1().Shrink(4); #if 1 if(markerGc != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((atMarker - displayRange.Min()) * scale, ((atMarker+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGc,markRange,true); } #endif #if 1
#endif #if 1 if(markerGcR != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerRight - displayRange.Min()) * scale, ((markerRight+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcR,markRange,true); } #endif DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events); ONDEBUG(cerr << "VertRange=" << vertRange << " Scale=" << scale << "\n"); IndexC midV = vertRange.Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(midV,displayArea.Range2().Min()),Index2dC(midV,displayArea.Range2().Max())); for(;it;it++) { if(displayRange.IsOverlapping(it->Data2())){ IndexRangeC rng2((it->Data2().Min() - displayRange.Min()) * scale, (it->Data2().Max() - displayRange.Min()) * scale); IndexRange2dC box(vertRange,rng2); if(box.Range2().Size() == 0) box.Range2().Max()++; box.ClipBy(displayArea); GdkGC* drawContext = gdk_gc_new(DrawArea()); gdk_gc_copy(drawContext,GUIDrawGC()); GdkColor colour; if(it->Data1() == 0){ colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; } else if(it->Data1() == 4){ colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; } else{ colour.pixel = 0; colour.red = -255 * 255*it->Data1()/3; colour.green = -255 * 255*it->Data1()/3; colour.blue = -255 * 255*it->Data1()/3; } gdk_gc_set_rgb_fg_color (drawContext,&colour); GUIDrawRectangle(drawContext,box,true); } } GUIDrawText(text,GUIDrawFont(),Point2dC(10,10),label); #if 0 IndexC midH = displayArea.Range2().Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(displayArea.Range1().Min(),midH),Index2dC(displayArea.Range1().Max(),midH)); #endif return true; } RealT EventTimeLineBodyC::GetMarkerRight(){return markerRight;} RealT EventTimeLineBodyC::GetMarkerLeft(){return markerLeft;} RealT EventTimeLineBodyC::GetMarkerTime(){return atMarker;} bool EventTimeLineBodyC::AddEvent(){ if(markerLeft < markerRight) AddEvent(RealRangeC(markerLeft,markerRight)); else AddEvent(RealRangeC(markerRight,markerLeft)); return true; } bool EventTimeLineBodyC::AddEvent(RealRangeC data){ events+=Tuple3C<IntT,RealRangeC,StringC>(0,data,""); return true; } bool EventTimeLineBodyC::DeleteEvent(){ if(markerLeft < markerRight) for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerLeft && it.Data().Data2().Min() < markerRight) it.Del(); else for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerRight && it.Data().Data2().Min() < markerLeft) it.Del(); return true; } TranscriptionBaseListC &EventTimeLineBodyC::GetEvents(){return events;} bool EventTimeLineBodyC::SetEventVal(RealT time){ for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Contains(time)){ it.Data().Data1() = (it.Data().Data1()+1) % 5; GUIDraw(); } return true; } }
if(markerGcL != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerLeft - displayRange.Min()) * scale, ((markerLeft+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcL,markRange,true); }
if_condition
[ { "content": "\n\n#include \"../.././GUI/Util/EventTimeLine.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/EventTimeLine.hh", "rank": 0, "score": 176136.96804029416 }, { "content": " class EventTimeLineC;\n\n \n\n //! userlevel=Develop\n\n //: Event time line.\n\n \n", "file...
C++
printscan/print/drivers/usermode/tools/uni/minidev.new/tips.cpp
npocmaka/Windows-Server-2003
5c6fe3db626b63a384230a1aa6b92ac416b0765f
#include "StdAfx.H" #include "Resource.H" #include <WinReg.H> #include <Sys\Stat.H> #include <Sys\Types.H> #include "tips.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif #define MAX_BUFLEN 1000 static const TCHAR szSection[] = _T("Tip"); static const TCHAR szIntFilePos[] = _T("FilePos"); static const TCHAR szTimeStamp[] = _T("TimeStamp"); static const TCHAR szIntStartup[] = _T("StartUp"); CTipOfTheDay::CTipOfTheDay(CWnd* pParent ) : CDialog(IDD_TIP, pParent) { m_bStartup = TRUE; CWinApp* pApp = AfxGetApp(); m_bStartup = !pApp->GetProfileInt(szSection, szIntStartup, 0); UINT iFilePos = pApp->GetProfileInt(szSection, szIntFilePos, 0); CString csTipFile = pApp->m_pszHelpFilePath; csTipFile = csTipFile.Left(csTipFile.ReverseFind(_T('\\'))); csTipFile = csTipFile + _T("\\tips.txt"); m_pStream = fopen(csTipFile, "r"); if (m_pStream == NULL) { m_strTip.LoadString(CG_IDS_FILE_ABSENT); return; } struct _stat buf; _fstat(_fileno(m_pStream), &buf); CString strCurrentTime = ctime(&buf.st_ctime); strCurrentTime.TrimRight(); CString strStoredTime = pApp->GetProfileString(szSection, szTimeStamp, NULL); if (strCurrentTime != strStoredTime) { iFilePos = 0; pApp->WriteProfileString(szSection, szTimeStamp, strCurrentTime); } if (fseek(m_pStream, iFilePos, SEEK_SET) != 0) { AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { GetNextTipString(m_strTip); } } CTipOfTheDay::~CTipOfTheDay() { if (m_pStream != NULL) { CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntFilePos, ftell(m_pStream)); fclose(m_pStream); } } void CTipOfTheDay::DoDataExchange(CDataExchange* pDX) { CDialog::DoDataExchange(pDX); DDX_Check(pDX, IDC_STARTUP, m_bStartup); DDX_Text(pDX, IDC_TIPSTRING, m_strTip); } BEGIN_MESSAGE_MAP(CTipOfTheDay, CDialog) ON_BN_CLICKED(IDC_NEXTTIP, OnNextTip) ON_WM_CTLCOLOR() ON_WM_PAINT() END_MESSAGE_MAP() void CTipOfTheDay::OnNextTip() { GetNextTipString(m_strTip); UpdateData(FALSE); } void CTipOfTheDay::GetNextTipString(CString& strNext) { LPTSTR lpsz = strNext.GetBuffer(MAX_BUFLEN); BOOL bStop = FALSE; while (!bStop) { if (_fgetts(lpsz, MAX_BUFLEN, m_pStream) == NULL) { if (fseek(m_pStream, 0, SEEK_SET) != 0) AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { if (*lpsz != ' ' && *lpsz != '\t' && *lpsz != '\n' && *lpsz != ';' && *lpsz != '*') { bStop = TRUE; } } } strNext.ReleaseBuffer(); } HBRUSH CTipOfTheDay::OnCtlColor(CDC* pDC, CWnd* pWnd, UINT nCtlColor) { if (pWnd->GetDlgCtrlID() == IDC_TIPSTRING) return (HBRUSH)GetStockObject(WHITE_BRUSH); return CDialog::OnCtlColor(pDC, pWnd, nCtlColor); } void CTipOfTheDay::OnOK() { CDialog::OnOK(); CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntStartup, !m_bStartup); } BOOL CTipOfTheDay::OnInitDialog() { CDialog::OnInitDialog(); if (m_pStream == NULL) GetDlgItem(IDC_NEXTTIP)->EnableWindow(FALSE); return TRUE; } void CTipOfTheDay::OnPaint() { CPaintDC dc(this); CWnd* pStatic = GetDlgItem(IDC_BULB); CRect rect; pStatic->GetWindowRect(&rect); ScreenToClient(&rect); CBrush brush; brush.CreateStockObject(WHITE_BRUSH); dc.FillRect(rect, &brush); CBitmap bmp; bmp.LoadBitmap(IDB_LIGHTBULB); BITMAP bmpInfo; bmp.GetBitmap(&bmpInfo); CDC dcTmp; dcTmp.CreateCompatibleDC(&dc); dcTmp.SelectObject(&bmp); rect.bottom = bmpInfo.bmHeight + rect.top; dc.BitBlt(rect.left, rect.top, rect.Width(), rect.Height(), &dcTmp, 0, 0, SRCCOPY); CString strMessage; strMessage.LoadString(CG_IDS_DIDYOUKNOW); rect.left += bmpInfo.bmWidth; dc.DrawText(strMessage, rect, DT_VCENTER | DT_SINGLELINE); }
#include "StdAfx.H" #include "Resource.H" #include <WinReg.H> #include <Sys\Stat.H> #include <Sys\Types.H> #include "tips.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif #define MAX_BUFLEN 1000 static const TCHAR szSection[] = _T("Tip"); static const TCHAR szIntFilePos[] = _T("FilePos"); static const TCHAR szTimeStamp[] = _T("TimeStamp"); static const TCHAR szIntStartup[] = _T("StartUp"); CTipOfTheDay::CTipOfTheDay(CWnd* pParent ) : CDialog(IDD_TIP, pParent) { m_bStartup = TRUE; CWinApp* pApp = AfxGetApp(); m_bStartup = !pApp->GetProfileInt(szSection, szIntStartup, 0); UINT iFilePos = pApp->GetProfileInt(szSection, szIntFilePos, 0); CString csTipFile = pApp->m_pszHelpFilePath; csTipFile = csTipFile.Left(csTipFile.ReverseFind(_T('\\'))); csTipFile = csTipFile + _T("\\tips.txt"); m_pStream = fopen(csTipFile, "r"); if (m_pStream == NULL) { m_strTip.LoadString(CG_IDS_FILE_ABSENT); return; } struct _stat buf; _fstat(_fileno(m_pStream), &buf); CString strCurrentTime = ctime(&buf.st_ctime); strCurrentTime.TrimRight(); CString strStoredTime = pApp->GetProfileString(szSection, szTimeStamp, NULL); if (strCurrentTime != strStoredTime) { iFilePos = 0; pApp->WriteProfileString(szSection, szTimeStamp, strCurrentTime); } if (fseek(m_pStream, iFilePos, SEEK_SET) != 0) { AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { GetNextTipString(m_strTip); } } CTipOfTheDay::~CTipOfTheDay() {
} } void CTipOfTheDay::DoDataExchange(CDataExchange* pDX) { CDialog::DoDataExchange(pDX); DDX_Check(pDX, IDC_STARTUP, m_bStartup); DDX_Text(pDX, IDC_TIPSTRING, m_strTip); } BEGIN_MESSAGE_MAP(CTipOfTheDay, CDialog) ON_BN_CLICKED(IDC_NEXTTIP, OnNextTip) ON_WM_CTLCOLOR() ON_WM_PAINT() END_MESSAGE_MAP() void CTipOfTheDay::OnNextTip() { GetNextTipString(m_strTip); UpdateData(FALSE); } void CTipOfTheDay::GetNextTipString(CString& strNext) { LPTSTR lpsz = strNext.GetBuffer(MAX_BUFLEN); BOOL bStop = FALSE; while (!bStop) { if (_fgetts(lpsz, MAX_BUFLEN, m_pStream) == NULL) { if (fseek(m_pStream, 0, SEEK_SET) != 0) AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { if (*lpsz != ' ' && *lpsz != '\t' && *lpsz != '\n' && *lpsz != ';' && *lpsz != '*') { bStop = TRUE; } } } strNext.ReleaseBuffer(); } HBRUSH CTipOfTheDay::OnCtlColor(CDC* pDC, CWnd* pWnd, UINT nCtlColor) { if (pWnd->GetDlgCtrlID() == IDC_TIPSTRING) return (HBRUSH)GetStockObject(WHITE_BRUSH); return CDialog::OnCtlColor(pDC, pWnd, nCtlColor); } void CTipOfTheDay::OnOK() { CDialog::OnOK(); CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntStartup, !m_bStartup); } BOOL CTipOfTheDay::OnInitDialog() { CDialog::OnInitDialog(); if (m_pStream == NULL) GetDlgItem(IDC_NEXTTIP)->EnableWindow(FALSE); return TRUE; } void CTipOfTheDay::OnPaint() { CPaintDC dc(this); CWnd* pStatic = GetDlgItem(IDC_BULB); CRect rect; pStatic->GetWindowRect(&rect); ScreenToClient(&rect); CBrush brush; brush.CreateStockObject(WHITE_BRUSH); dc.FillRect(rect, &brush); CBitmap bmp; bmp.LoadBitmap(IDB_LIGHTBULB); BITMAP bmpInfo; bmp.GetBitmap(&bmpInfo); CDC dcTmp; dcTmp.CreateCompatibleDC(&dc); dcTmp.SelectObject(&bmp); rect.bottom = bmpInfo.bmHeight + rect.top; dc.BitBlt(rect.left, rect.top, rect.Width(), rect.Height(), &dcTmp, 0, 0, SRCCOPY); CString strMessage; strMessage.LoadString(CG_IDS_DIDYOUKNOW); rect.left += bmpInfo.bmWidth; dc.DrawText(strMessage, rect, DT_VCENTER | DT_SINGLELINE); }
if (m_pStream != NULL) { CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntFilePos, ftell(m_pStream)); fclose(m_pStream);
function_block-random_span
[]
C++
src/kdtree.cpp
pillowsofwind/mini_renderer
a02b16c4a96d21fc5920479ea496b31f462a6407
#include "kdtree.hpp" #include <climits> #include <iostream> #include <algorithm> using namespace std; void KDTree::load(int _size, HitPoint *_data) { m_nNode = 0; m_size = _size; m_data = _data; m_index = new int[m_size]; m_memory = new Node[m_size]; for (int i = 0; i < m_size; ++i) m_index[i] = i; } void KDTree::medianPartition(int l, int r, int dim, int k) { int mid = (l + r) >> 1, temp = m_index[mid]; int i = l, j = r; while (i < j) { while (m_data[m_index[i]].position[dim] < m_data[temp].position[dim]) ++i; while (m_data[m_index[j]].position[dim] > m_data[temp].position[dim]) --j; if (i <= j) { int t = m_index[i]; m_index[i] = m_index[j]; m_index[j] = t; ++i; --j; } } if (l < j && l <= k && k <= j) medianPartition(l, j, dim, k); if (i < r && i <= k && k <= r) medianPartition(i, r, dim, k); } KDTree::Node *KDTree::build(int l, int r, double *min, double *max) { if (r <= l) return nullptr; float temp = -1; int split; for (int i = 0; i < K; ++i) if (max[i] - min[i] > temp) { temp = max[i] - min[i]; split = i; } int mid = (l + r) >> 1; medianPartition(l, r - 1, split, mid); m_memory[m_nNode].value = m_index[mid]; m_memory[m_nNode].split = split; Node *node = &m_memory[m_nNode++]; temp = max[split]; max[split] = m_data[m_index[mid]].position[split]; node->left = build(l, mid, min, max); max[split] = temp; temp = min[split]; min[split] = m_data[m_index[mid]].position[split]; node->right = build(mid + 1, r, min, max); min[split] = temp; m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; return node; } void KDTree::build() { m_nNode = 0; double *min = new double[K], *max = new double[K]; for (int i = 0; i < K; ++i) min[i] = LONG_MAX, max[i] = LONG_MIN; for (int i = 0; i < m_size; ++i) { Vector3f temp = m_data[i].position;; for (int j = 0; j < K; ++j) { if (m_data[i].position[j] < min[j]) min[j] = m_data[i].position[j]; if (m_data[i].position[j] > max[j]) max[j] = m_data[i].position[j]; } } m_root = build(0, m_size, min, max); } void KDTree::insertPhoton(Node *node, const Photon &photon) { if (node == NULL) return; int pos = node->value; if (Vector3f::dot(m_data[pos].position - photon.P, m_data[pos].position - photon.P) < m_data[pos].radius2) if (photon.object == m_data[pos].object) { m_data[pos].nNew += 1; m_data[pos].phi += photon.color; } int split = node->split; Node *another; if (photon.P[split] < m_data[pos].position[split]) { another = node->right; insertPhoton(node->left, photon); } else { another = node->left; insertPhoton(node->right, photon); } if ((another) && (m_data[pos].position[split] - photon.P[split]) * (m_data[pos].position[split] - photon.P[split]) < m_data[another->value].maxRadius2 + 1e6) insertPhoton(another, photon); } void KDTree::insertPhoton(const Photon &photon) { insertPhoton(m_root, photon); } void KDTree::update(Node *node) { if (node->left) update(node->left); if (node->right) update(node->right); m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; } void KDTree::update() { update(m_root); } KDTree::~KDTree() { }
#include "kdtree.hpp" #include <climits> #include <iostream> #include <algorithm> using namespace std; void KDTree::load(int _size, HitPoint *_data) { m_nNode = 0; m_size = _size; m_data = _data; m_index = new int[m_size]; m_memory = new Node[m_size]; for (int i = 0; i < m_size; ++i) m_index[i] = i; } void KDTree::medianPartition(int l, int r, int dim, int k) { int mid = (l + r) >> 1, temp = m_index[mid]; int i = l, j = r; while (i < j) { while (m_data[m_index[i]].position[dim] < m_data[temp].position[dim]) ++i; while (m_data[m_index[j]].position[dim] > m_data[temp].position[dim]) --j; if (i <= j) { int t = m_index[i]; m_index[i] = m_index[j]; m_index[j] = t; ++i; --j; } } if (l < j && l <= k && k <= j) medianPartition(l, j, dim, k); if (i < r && i <= k && k <= r) medianPartition(i, r, dim, k); } KDTree::Node *KDTree::build(int l, int r, double *min, double *max) { if (r <= l) return nullptr; float temp = -1; int split; for (int i = 0; i < K; ++i) if (max[i] - min[i] > temp) { temp = max[i] - min[i]; split = i; } int mid = (l + r) >> 1; medianPartition(l, r - 1, split, mid); m_memory[m_nNode].value = m_index[mid]; m_memory[m_nNode].split = split; Node *node = &m_memory[m_nNode++]; temp = max[split]; max[split] = m_data[m_index[mid]].position[split]; node->left = build(l, mid, min, max); max[split] = temp; temp = min[split]; min[split] = m_data[m_index[mid]].position[split]; node->right = build(mid + 1, r, min, max); min[split] = temp; m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; return node; } void KDTree::build() { m_nNode = 0; double *min = new double[K], *max = new double[K]; for (int i = 0; i < K; ++i) min[i] = LONG_MAX, m
e->left) update(node->left); if (node->right) update(node->right); m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; } void KDTree::update() { update(m_root); } KDTree::~KDTree() { }
ax[i] = LONG_MIN; for (int i = 0; i < m_size; ++i) { Vector3f temp = m_data[i].position;; for (int j = 0; j < K; ++j) { if (m_data[i].position[j] < min[j]) min[j] = m_data[i].position[j]; if (m_data[i].position[j] > max[j]) max[j] = m_data[i].position[j]; } } m_root = build(0, m_size, min, max); } void KDTree::insertPhoton(Node *node, const Photon &photon) { if (node == NULL) return; int pos = node->value; if (Vector3f::dot(m_data[pos].position - photon.P, m_data[pos].position - photon.P) < m_data[pos].radius2) if (photon.object == m_data[pos].object) { m_data[pos].nNew += 1; m_data[pos].phi += photon.color; } int split = node->split; Node *another; if (photon.P[split] < m_data[pos].position[split]) { another = node->right; insertPhoton(node->left, photon); } else { another = node->left; insertPhoton(node->right, photon); } if ((another) && (m_data[pos].position[split] - photon.P[split]) * (m_data[pos].position[split] - photon.P[split]) < m_data[another->value].maxRadius2 + 1e6) insertPhoton(another, photon); } void KDTree::insertPhoton(const Photon &photon) { insertPhoton(m_root, photon); } void KDTree::update(Node *node) { if (nod
random
[ { "content": "struct OctNode {\n\n OctNode *child[8];\n\n\n\n OctNode() {\n\n child[0] = 0;\n\n }\n\n\n\n bool isTerm() { return child[0] == 0; }\n\n\n\n std::vector<int> obj;\n\n};\n\n\n", "file_path": "include/octree.hpp", "rank": 0, "score": 62447.16502680025 }, { "c...
C++
src/WebInterface/CController.hpp
Fabio3rs/cppapiframework
14f1b1b42b77edbbf72d9d7f949ea6c9fcfa06a5
#pragma once #ifndef CController_hpp #define CController_hpp #include "../stdafx.hpp" #include "pistache.hpp" struct httpStreamPack { const Pistache::Rest::Request &request; Pistache::Http::ResponseWriter &response; httpStreamPack(const Pistache::Rest::Request &req, Pistache::Http::ResponseWriter &resp) : request(req), response(resp) {} }; class CController { size_t min_json_body_size{0}, max_json_body_size{4 * 1024}; protected: std::string defaulthashfield = "_hash"; bool habilita_hash_input_json = false; public: using msg_pair_t = std::pair<bool, std::string>; const static Pistache::Http::Mime::MediaType JSON_RETURN; static void returnPocoJson(Pistache::Http::Code code, const Poco::JSON::Object::Ptr &json, Pistache::Http::ResponseWriter &response); static auto get_ip_host_from_header(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; static auto get_ip_host_from_request(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; auto input_json(httpStreamPack httpdata) -> Poco::JSON::Object::Ptr; static auto get_auth(httpStreamPack httpdata) -> std::optional<std::string>; static auto default_json_return(bool success, const std::string &msg) -> Poco::JSON::Object::Ptr; static auto default_json_return(bool success, const std::string &msg, const Poco::UUID &uuid) -> Poco::JSON::Object::Ptr; static auto default_json_return_as_str(bool success, const std::string &msg) -> std::string; static void throw_json_http_exception [[noreturn]] (Pistache::Http::Code code, bool success, const std::string &msg, Pistache::Http::ResponseWriter &response); static void throw_http_exception [[noreturn]] (Pistache::Http::Code code, const std::string &fullreturndata); auto valida_hash_request(const Poco::JSON::Object::Ptr &param, Pistache::Http::ResponseWriter &response) -> bool; static auto hash_json(const Poco::JSON::Object::Ptr &param, const std::string &ignorefield) -> Poco::DigestEngine::Digest; static auto hash_json_cmp(const Poco::JSON::Object::Ptr &param, const std::string &hashfield) -> bool; static auto response_file(const std::string &fullpath, Pistache::Http::ResponseWriter &response) -> bool; static auto should_response_html(const Pistache::Rest::Request &request) -> bool; template <class stream_t> auto response_stream(const stream_t &inputstream, Pistache::Http::ResponseWriter &response) -> bool { inputstream.seekg(0, std::ios::end); auto filesize = inputstream.tellg(); inputstream.seekg(0, std::ios::beg); response.setMime(Pistache::Http::Mime::MediaType( Pistache::Http::Mime::Type::Application, Pistache::Http::Mime::Subtype::Ext, Pistache::Http::Mime::Suffix::Zip)); auto stream = response.stream(Pistache::Http::Code::Ok, static_cast<size_t>(filesize)); for (auto it = std::istreambuf_iterator<char>(inputstream), end = std::istreambuf_iterator<char>(); it != end; it++) { char ch = *it; stream.write(&ch, 1); } stream << Pistache::Http::ends; return true; } virtual void register_routes(const std::string & , Pistache::Rest::Router & ); auto operator=(const CController &) -> CController & = delete; auto operator=(CController &&) -> CController & = delete; void enableInputHashCheck(bool e) { habilita_hash_input_json = e; } template <class T> void route_get(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Get(router, routepath, routefun); } template <class T> void route_post(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Post(router, routepath, routefun); } CController() = default; CController(const CController &) = default; CController(CController &&) = default; virtual ~CController() = default; }; #endif
#pragma once #ifndef CController_hpp #define CController_hpp #include "../stdafx.hpp" #include "pistache.hpp" struct httpStreamPack { const Pistache::Rest::Request &request; Pistache::Http::ResponseWriter &response; httpStreamPack(const Pistache::Rest::Request &req, Pistache::Http::ResponseWriter &resp) : request(req), response(resp) {} }; class CController { size_t min_json_body_size{0}, max_json_body_size{4 * 1024}; protected: std::string defaulthashfield = "_hash"; bool habilita_hash_input_json = false; public: using msg_pair_t = std::pair<bool, std::string>; const static Pistache::Http::Mime::MediaType JSON_RETURN; static void returnPocoJson(Pistache::Http::Code code, const Poco::JSON::Object::Ptr &json, Pistache::Http::ResponseWriter &response); static auto get_ip_host_from_header(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; static auto get_ip_host_from_request(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; auto input_json(httpStreamPack httpdata) -> Poco::JSON::Object::Ptr; static auto get_auth(httpStreamPack httpdata) -> std::optional<std::string>; static auto default_js
); inputstream.seekg(0, std::ios::beg); response.setMime(Pistache::Http::Mime::MediaType( Pistache::Http::Mime::Type::Application, Pistache::Http::Mime::Subtype::Ext, Pistache::Http::Mime::Suffix::Zip)); auto stream = response.stream(Pistache::Http::Code::Ok, static_cast<size_t>(filesize)); for (auto it = std::istreambuf_iterator<char>(inputstream), end = std::istreambuf_iterator<char>(); it != end; it++) { char ch = *it; stream.write(&ch, 1); } stream << Pistache::Http::ends; return true; } virtual void register_routes(const std::string & , Pistache::Rest::Router & ); auto operator=(const CController &) -> CController & = delete; auto operator=(CController &&) -> CController & = delete; void enableInputHashCheck(bool e) { habilita_hash_input_json = e; } template <class T> void route_get(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Get(router, routepath, routefun); } template <class T> void route_post(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Post(router, routepath, routefun); } CController() = default; CController(const CController &) = default; CController(CController &&) = default; virtual ~CController() = default; }; #endif
on_return(bool success, const std::string &msg) -> Poco::JSON::Object::Ptr; static auto default_json_return(bool success, const std::string &msg, const Poco::UUID &uuid) -> Poco::JSON::Object::Ptr; static auto default_json_return_as_str(bool success, const std::string &msg) -> std::string; static void throw_json_http_exception [[noreturn]] (Pistache::Http::Code code, bool success, const std::string &msg, Pistache::Http::ResponseWriter &response); static void throw_http_exception [[noreturn]] (Pistache::Http::Code code, const std::string &fullreturndata); auto valida_hash_request(const Poco::JSON::Object::Ptr &param, Pistache::Http::ResponseWriter &response) -> bool; static auto hash_json(const Poco::JSON::Object::Ptr &param, const std::string &ignorefield) -> Poco::DigestEngine::Digest; static auto hash_json_cmp(const Poco::JSON::Object::Ptr &param, const std::string &hashfield) -> bool; static auto response_file(const std::string &fullpath, Pistache::Http::ResponseWriter &response) -> bool; static auto should_response_html(const Pistache::Rest::Request &request) -> bool; template <class stream_t> auto response_stream(const stream_t &inputstream, Pistache::Http::ResponseWriter &response) -> bool { inputstream.seekg(0, std::ios::end); auto filesize = inputstream.tellg(
random
[ { "content": "class JsonResponse : public ResponseViaReturn {\n\n\n\n public:\n\n JsonResponse(const JsonResponse &) = default;\n\n JsonResponse(JsonResponse &&) = default;\n\n\n\n auto operator=(const JsonResponse &) -> JsonResponse & = default;\n\n auto operator=(JsonResponse &&) -> JsonResponse ...
C++
src/main.cpp
Laakeri/pace2020-treedepth-exact
1049abbe6fb4012f027c2fcfce15fff5fa8c169f
#include <iostream> #include <vector> #include <memory> #include <fstream> #include <iomanip> #include <set> #include <cassert> #include <random> #include <sys/resource.h> #include "graph.hpp" #include "io.hpp" #include "utils.hpp" #include "mcs.hpp" #include "staticset.hpp" #include "bitset.hpp" #include "chordalsolve.hpp" #include "best.hpp" #include "preprocessor.hpp" #include "ms_solve.hpp" using namespace sms; #define F first #define S second using std::vector; std::mt19937 gen(1337); void SetStackSize(int64_t sz) { struct rlimit rl; assert(getrlimit(RLIMIT_STACK, &rl) == 0); Log::Write(3, "Cur stack size ", rl.rlim_cur); if (rl.rlim_cur < sz) { rl.rlim_cur = sz; Log::Write(3, "Setting stack size ", sz); assert(setrlimit(RLIMIT_STACK, &rl) == 0); } } template<size_t chunks> int HeurComp(const FGraph<chunks>& graph, int best, double time, const Preprocessor& pp) { Timer timer; timer.start(); int it=0; std::set<uint64_t> gs; int vari = 0; int upd_cnt = 0; int last_add = 0; while (timer.get() < time) { double dupls = 0; if (it > 0) { dupls = (double)(it - (int)gs.size()) / (double)it; } if (dupls > 0.5 && upd_cnt == graph.n() && it - last_add > 10) { vari++; last_add = it; } it++; Timer triang_tmr; triang_tmr.start(); FGraph<chunks> lol_g = graph; mcs::LbTriang(lol_g, gen, vari, upd_cnt); triang_tmr.stop(); double est = (double)gs.size() * (double)it / ((double)it - (double)gs.size()); Log::Write(10, "min tri ", triang_tmr.get(), " ", best, " ", est, " ", lol_g.m(), " ", dupls, " ", vari, " ", upd_cnt); upd_cnt = upd_cnt * 2 + 1; upd_cnt = std::min(upd_cnt, graph.n()); if (gs.count(lol_g.Hash())) { Log::Write(10, "Same triang ", gs.size(), " ", it, " ", est); continue; } gs.insert(lol_g.Hash()); { Timer td_tmr; td_tmr.start(); ChordalSolve<chunks> cs(lol_g); int td = cs.Solve(best-1, vari, std::min(time - timer.get(), triang_tmr.get() + 0.01)); if (td < best) { best = td; Log::Write(3, "Treedepth: ", best); auto resu = cs.Get(best); resu = pp.Reconstruct(resu); resu = ColToPar(pp.org_graph, resu); int got = best::SetBest(resu, true); assert(got <= td); best = got; Log::Write(3, "Got ", got); } } } return best; } template<size_t chunks> int DoSolve2(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); FGraph<chunks> ppg(graph); Log::Write(3, "Solve2 n:", ppg.n(), " m:", ppg.m()); { MSSolve<chunks> mss(ppg); mss.incorrect_msenum_ = true; int ans = mss.Solve(best-1, true); if (ans < best) { best = ans; Log::Write(3, "Heur ans ", ans); auto sol = mss.Get(ans); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got <= ans); Log::Write(3, "Ans valid ", got, " ", ans); best = got; Log::Write(3, "Re preprocess"); return best; } } MSSolve<chunks> mss2(ppg); int ans2 = mss2.Solve(best-1, false); if (ans2 < best) { best = ans2; Log::Write(3, "Exact ans ", ans2); auto sol = mss2.Get(ans2); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got == ans2); Log::Write(3, "Ans valid ", ans2); } return -1; } template<size_t chunks> void DoSolve1(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); const FGraph<chunks> ppg(graph); Log::Write(3, "Dosolve1 n:", ppg.n(), " m:", ppg.m()); double pp_time = 40; if (ppg.n() <= 50) { pp_time = 1; } else if (ppg.n() <= 75) { pp_time = 5; } else if (ppg.n() <= 100) { pp_time = 20; } else if (ppg.n() <= 150) { pp_time = 30; } else if (ppg.n() <= 200) { pp_time = 40; } else if (ppg.n() <= 250) { pp_time = 50; } else { pp_time = 60; } best = HeurComp<chunks>(ppg, best, pp_time, pp); while (true) { Preprocessor pp2 = pp; SparseGraph pp_graph = pp2.TamakiRules(SparseGraph(ppg), best-1); int nbest = best; if (pp_graph.n() <= BITS) { nbest = DoSolve2<1>(pp_graph, best, pp2); } else if (pp_graph.n() <= 2*BITS) { nbest = DoSolve2<2>(pp_graph, best, pp2); } else if (pp_graph.n() <= 3*BITS) { nbest = DoSolve2<3>(pp_graph, best, pp2); } else if (pp_graph.n() <= 4*BITS) { nbest = DoSolve2<4>(pp_graph, best, pp2); } else if (pp_graph.n() <= 5*BITS) { nbest = DoSolve2<5>(pp_graph, best, pp2); } else if (pp_graph.n() <= 6*BITS) { nbest = DoSolve2<6>(pp_graph, best, pp2); } else if (pp_graph.n() <= 7*BITS) { nbest = DoSolve2<7>(pp_graph, best, pp2); } else if (pp_graph.n() <= 8*BITS) { nbest = DoSolve2<8>(pp_graph, best, pp2); } else if (pp_graph.n() <= 9*BITS) { nbest = DoSolve2<9>(pp_graph, best, pp2); } else if (pp_graph.n() <= 10*BITS) { nbest = DoSolve2<10>(pp_graph, best, pp2); } else { assert(0); } if (nbest == -1) return; assert(nbest >= 0 && nbest < best); best = nbest; Log::Write(3, "Re solve ", best); } } int main() { SetStackSize(8ll * 1024 * 1024); Log::SetLogLevel(3); Io io; SparseGraph graph = io.ReadGraph(std::cin); Log::Write(3, "Input n:", graph.n(), " m:", graph.m()); best::InitBest(graph); assert(graph.IsConnected()); int best = graph.n(); Preprocessor pp; SparseGraph pp_graph = pp.Preprocess(graph); if (pp_graph.n() <= BITS) { DoSolve1<1>(pp_graph, best, pp); } else if (pp_graph.n() <= 2*BITS) { DoSolve1<2>(pp_graph, best, pp); } else if (pp_graph.n() <= 3*BITS) { DoSolve1<3>(pp_graph, best, pp); } else if (pp_graph.n() <= 4*BITS) { DoSolve1<4>(pp_graph, best, pp); } else if (pp_graph.n() <= 5*BITS) { DoSolve1<5>(pp_graph, best, pp); } else if (pp_graph.n() <= 6*BITS) { DoSolve1<6>(pp_graph, best, pp); } else if (pp_graph.n() <= 7*BITS) { DoSolve1<7>(pp_graph, best, pp); } else if (pp_graph.n() <= 8*BITS) { DoSolve1<8>(pp_graph, best, pp); } else if (pp_graph.n() <= 9*BITS) { DoSolve1<9>(pp_graph, best, pp); } else if (pp_graph.n() <= 10*BITS) { DoSolve1<10>(pp_graph, best, pp); } else { assert(0); } best::PrintBest(); }
#include <iostream> #include <vector> #include <memory> #include <fstream> #include <iomanip> #include <set> #include <cassert> #include <random> #include <sys/resource.h> #include "graph.hpp" #include "io.hpp" #include "utils.hpp" #include "mcs.hpp" #include "staticset.hpp" #include "bitset.hpp" #include "chordalsolve.hpp" #include "best.hpp" #include "preprocessor.hpp" #include "ms_solve.hpp" using namespace sms; #define F first #define S second using std::vector; std::mt19937 gen(1337);
template<size_t chunks> int HeurComp(const FGraph<chunks>& graph, int best, double time, const Preprocessor& pp) { Timer timer; timer.start(); int it=0; std::set<uint64_t> gs; int vari = 0; int upd_cnt = 0; int last_add = 0; while (timer.get() < time) { double dupls = 0; if (it > 0) { dupls = (double)(it - (int)gs.size()) / (double)it; } if (dupls > 0.5 && upd_cnt == graph.n() && it - last_add > 10) { vari++; last_add = it; } it++; Timer triang_tmr; triang_tmr.start(); FGraph<chunks> lol_g = graph; mcs::LbTriang(lol_g, gen, vari, upd_cnt); triang_tmr.stop(); double est = (double)gs.size() * (double)it / ((double)it - (double)gs.size()); Log::Write(10, "min tri ", triang_tmr.get(), " ", best, " ", est, " ", lol_g.m(), " ", dupls, " ", vari, " ", upd_cnt); upd_cnt = upd_cnt * 2 + 1; upd_cnt = std::min(upd_cnt, graph.n()); if (gs.count(lol_g.Hash())) { Log::Write(10, "Same triang ", gs.size(), " ", it, " ", est); continue; } gs.insert(lol_g.Hash()); { Timer td_tmr; td_tmr.start(); ChordalSolve<chunks> cs(lol_g); int td = cs.Solve(best-1, vari, std::min(time - timer.get(), triang_tmr.get() + 0.01)); if (td < best) { best = td; Log::Write(3, "Treedepth: ", best); auto resu = cs.Get(best); resu = pp.Reconstruct(resu); resu = ColToPar(pp.org_graph, resu); int got = best::SetBest(resu, true); assert(got <= td); best = got; Log::Write(3, "Got ", got); } } } return best; } template<size_t chunks> int DoSolve2(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); FGraph<chunks> ppg(graph); Log::Write(3, "Solve2 n:", ppg.n(), " m:", ppg.m()); { MSSolve<chunks> mss(ppg); mss.incorrect_msenum_ = true; int ans = mss.Solve(best-1, true); if (ans < best) { best = ans; Log::Write(3, "Heur ans ", ans); auto sol = mss.Get(ans); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got <= ans); Log::Write(3, "Ans valid ", got, " ", ans); best = got; Log::Write(3, "Re preprocess"); return best; } } MSSolve<chunks> mss2(ppg); int ans2 = mss2.Solve(best-1, false); if (ans2 < best) { best = ans2; Log::Write(3, "Exact ans ", ans2); auto sol = mss2.Get(ans2); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got == ans2); Log::Write(3, "Ans valid ", ans2); } return -1; } template<size_t chunks> void DoSolve1(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); const FGraph<chunks> ppg(graph); Log::Write(3, "Dosolve1 n:", ppg.n(), " m:", ppg.m()); double pp_time = 40; if (ppg.n() <= 50) { pp_time = 1; } else if (ppg.n() <= 75) { pp_time = 5; } else if (ppg.n() <= 100) { pp_time = 20; } else if (ppg.n() <= 150) { pp_time = 30; } else if (ppg.n() <= 200) { pp_time = 40; } else if (ppg.n() <= 250) { pp_time = 50; } else { pp_time = 60; } best = HeurComp<chunks>(ppg, best, pp_time, pp); while (true) { Preprocessor pp2 = pp; SparseGraph pp_graph = pp2.TamakiRules(SparseGraph(ppg), best-1); int nbest = best; if (pp_graph.n() <= BITS) { nbest = DoSolve2<1>(pp_graph, best, pp2); } else if (pp_graph.n() <= 2*BITS) { nbest = DoSolve2<2>(pp_graph, best, pp2); } else if (pp_graph.n() <= 3*BITS) { nbest = DoSolve2<3>(pp_graph, best, pp2); } else if (pp_graph.n() <= 4*BITS) { nbest = DoSolve2<4>(pp_graph, best, pp2); } else if (pp_graph.n() <= 5*BITS) { nbest = DoSolve2<5>(pp_graph, best, pp2); } else if (pp_graph.n() <= 6*BITS) { nbest = DoSolve2<6>(pp_graph, best, pp2); } else if (pp_graph.n() <= 7*BITS) { nbest = DoSolve2<7>(pp_graph, best, pp2); } else if (pp_graph.n() <= 8*BITS) { nbest = DoSolve2<8>(pp_graph, best, pp2); } else if (pp_graph.n() <= 9*BITS) { nbest = DoSolve2<9>(pp_graph, best, pp2); } else if (pp_graph.n() <= 10*BITS) { nbest = DoSolve2<10>(pp_graph, best, pp2); } else { assert(0); } if (nbest == -1) return; assert(nbest >= 0 && nbest < best); best = nbest; Log::Write(3, "Re solve ", best); } } int main() { SetStackSize(8ll * 1024 * 1024); Log::SetLogLevel(3); Io io; SparseGraph graph = io.ReadGraph(std::cin); Log::Write(3, "Input n:", graph.n(), " m:", graph.m()); best::InitBest(graph); assert(graph.IsConnected()); int best = graph.n(); Preprocessor pp; SparseGraph pp_graph = pp.Preprocess(graph); if (pp_graph.n() <= BITS) { DoSolve1<1>(pp_graph, best, pp); } else if (pp_graph.n() <= 2*BITS) { DoSolve1<2>(pp_graph, best, pp); } else if (pp_graph.n() <= 3*BITS) { DoSolve1<3>(pp_graph, best, pp); } else if (pp_graph.n() <= 4*BITS) { DoSolve1<4>(pp_graph, best, pp); } else if (pp_graph.n() <= 5*BITS) { DoSolve1<5>(pp_graph, best, pp); } else if (pp_graph.n() <= 6*BITS) { DoSolve1<6>(pp_graph, best, pp); } else if (pp_graph.n() <= 7*BITS) { DoSolve1<7>(pp_graph, best, pp); } else if (pp_graph.n() <= 8*BITS) { DoSolve1<8>(pp_graph, best, pp); } else if (pp_graph.n() <= 9*BITS) { DoSolve1<9>(pp_graph, best, pp); } else if (pp_graph.n() <= 10*BITS) { DoSolve1<10>(pp_graph, best, pp); } else { assert(0); } best::PrintBest(); }
void SetStackSize(int64_t sz) { struct rlimit rl; assert(getrlimit(RLIMIT_STACK, &rl) == 0); Log::Write(3, "Cur stack size ", rl.rlim_cur); if (rl.rlim_cur < sz) { rl.rlim_cur = sz; Log::Write(3, "Setting stack size ", sz); assert(setrlimit(RLIMIT_STACK, &rl) == 0); } }
function_block-full_function
[ { "content": "class FBitsetSet {\n\n public:\n\n FBitsetSet() {}\n\n FBitsetSet(size_t capacity, double load_factor) {\n\n load_factor_ = load_factor;\n\n assert(chunks > 0);\n\n assert(load_factor_ >= 1.1);\n\n capacity_ = NextPrime((capacity + 1) * load_factor_);\n\n assert((size_t)(capacity_...
C++
video/src/window/message_box_common.cpp
vinders/pandora_toolbox
f32e301ebaa2b281a1ffc3d6d0c556091420520a
#if !defined(_WINDOWS) # include <mutex> # include <thread/spin_lock.h> # include "video/_private/_message_box_common.h" using namespace pandora::video; static pandora::thread::SpinLock __lastErrorLock; static pandora::memory::LightString __lastError; void pandora::video::__MessageBox::setLastError(const char* error) noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); __lastError = error; } pandora::memory::LightString pandora::video::__MessageBox::getLastError() noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); return __lastError; } # ifdef __APPLE__ # define __index(i,maxI) (maxI - i) # else # define __index(i,...) i # endif static const char* __getLabelOK() noexcept { return "OK"; } static const char* __getLabelCancel() noexcept { return "Cancel"; } static const char* __getLabelYes() noexcept { return "Yes"; } static const char* __getLabelNo() noexcept { return "No"; } static const char* __getLabelAbort() noexcept { return "Abort"; } static const char* __getLabelRetry() noexcept { return "Retry"; } static const char* __getLabelIgnore() noexcept { return "Ignore"; } static inline const char* __setActionLabel(const char* action, const char* placeholder) noexcept { return (action != nullptr) ? action : placeholder; } uint32_t pandora::video::__MessageBox::toActionLabels(MessageBox::ActionType actions, const char** outLabels) noexcept { switch (actions) { case MessageBox::ActionType::ok: outLabels[__index(0,0)] = __getLabelOK(); return 1; case MessageBox::ActionType::okCancel: outLabels[__index(0,1)] = __getLabelOK(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::retryCancel: outLabels[__index(0,1)] = __getLabelRetry(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::yesNo: outLabels[__index(0,1)] = __getLabelYes(); outLabels[__index(1,1)] = __getLabelNo(); return 2; case MessageBox::ActionType::yesNoCancel: outLabels[__index(0,2)] = __getLabelYes(); outLabels[__index(1,2)] = __getLabelNo(); outLabels[__index(2,2)] = __getLabelCancel(); return 3; case MessageBox::ActionType::abortRetryIgnore: outLabels[__index(0,2)] = __getLabelAbort(); outLabels[__index(1,2)] = __getLabelRetry(); outLabels[__index(2,2)] = __getLabelIgnore(); return 3; default: outLabels[__index(0,0)] = __getLabelOK(); return 1; } } uint32_t pandora::video::__MessageBox::toActionLabels(const char* action1, const char* action2, const char* action3, const char** outLabels) noexcept { if (action3) { outLabels[__index(0,2)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,2)] = __setActionLabel(action2, __getLabelNo()); outLabels[__index(2,2)] = action3; return 3; } else if (action2) { outLabels[__index(0,1)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,1)] = action2; return 2; } else { outLabels[__index(0,0)] = __setActionLabel(action1, __getLabelOK()); return 1; } } MessageBox::Result pandora::video::__MessageBox::toDialogResult(uint32_t index, uint32_t maxActions) noexcept { # ifdef __APPLE__ if (index == 0 || index > maxActions) return MessageBox::Result::failure; index = maxActions + 1 - index; # else if (index > maxActions) return MessageBox::Result::failure; # endif switch (index) { case 1: return MessageBox::Result::action1; case 2: return MessageBox::Result::action2; case 3: return MessageBox::Result::action3; default: return MessageBox::Result::failure; } } #endif
#if !defined(_WINDOWS) # include <mutex> # include <thread/spin_lock.h> # include "video/_private/_message_box_common.h" using namespace pandora::video; static pandora::thread::SpinLock __lastErrorLock; static pandora::memory::LightString __lastError; void pandora::video::__MessageBox::setLastError(const char* error) noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); __lastError = error; } pandora::memory::LightString pandora::video::__MessageBox::getLastError() noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); return __lastError; } # ifdef __APPLE__ # define __index(i,maxI) (maxI - i) # else # define __index(i,...) i # endif static const char* __getLabelOK() noexcept { return "OK"; } static const char* __getLabelCancel() noexcept { return "Cancel"; } static const char* __getLabelYes() noexcept { return "Yes"; } static const char* __getLabelNo() noexcept { return "No"; } static const char* __getLabelAbort() noexcept { return "Abort"; } static const char* __getLabelRetry() noexcept { return "Retry"; } static const char* __getLabelIgnore() noexcept { return "Ignore"; } static inline const char* __setActionLabel(const char* action, const char* placeholder) noexcept { return (action != nullptr) ? action : placeholder; } uint32_t pandora::video::__MessageBox::toActionLabels(MessageBox::ActionType actions, const char** outLabels) noexcept { switch (actions) { case MessageBox::ActionType::ok: outLabels[__index(0,0)] = __getLabelOK(); return 1; case MessageBox::ActionType::okCancel: outLabels[__index(0,1)] = __getLabelOK(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::retryCancel: outLabels[__index(0,1)] = __getLabelRetry(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::yesNo: outLabels[__index(0,1)] = __getLabelYes(); outLabels[__index(1,1)] = __getLabelNo();
uint32_t pandora::video::__MessageBox::toActionLabels(const char* action1, const char* action2, const char* action3, const char** outLabels) noexcept { if (action3) { outLabels[__index(0,2)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,2)] = __setActionLabel(action2, __getLabelNo()); outLabels[__index(2,2)] = action3; return 3; } else if (action2) { outLabels[__index(0,1)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,1)] = action2; return 2; } else { outLabels[__index(0,0)] = __setActionLabel(action1, __getLabelOK()); return 1; } } MessageBox::Result pandora::video::__MessageBox::toDialogResult(uint32_t index, uint32_t maxActions) noexcept { # ifdef __APPLE__ if (index == 0 || index > maxActions) return MessageBox::Result::failure; index = maxActions + 1 - index; # else if (index > maxActions) return MessageBox::Result::failure; # endif switch (index) { case 1: return MessageBox::Result::action1; case 2: return MessageBox::Result::action2; case 3: return MessageBox::Result::action3; default: return MessageBox::Result::failure; } } #endif
return 2; case MessageBox::ActionType::yesNoCancel: outLabels[__index(0,2)] = __getLabelYes(); outLabels[__index(1,2)] = __getLabelNo(); outLabels[__index(2,2)] = __getLabelCancel(); return 3; case MessageBox::ActionType::abortRetryIgnore: outLabels[__index(0,2)] = __getLabelAbort(); outLabels[__index(1,2)] = __getLabelRetry(); outLabels[__index(2,2)] = __getLabelIgnore(); return 3; default: outLabels[__index(0,0)] = __getLabelOK(); return 1; } }
function_block-function_prefix_line
[ { "content": " __x11_XSync Sync_ = nullptr;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 0, "score": 120923.27754309989 }, { "content": " constexpr inline bool isHyperThreadingCapable() const noexcept { return (logicalCores > ph...
C++
attitude_control/src/attitude_controller.cpp
archipela-go/attitude_control
7124366f405efd5eabeaf93f82dcd5ccc6ccf2e8
#include <ros/ros.h> #include <Eigen/Geometry> #include <mavros_msgs/AttitudeTarget.h> #include <mavros_msgs/ActuatorControl.h> #include <kingfisher_msgs/Drive.h> #include <sensor_msgs/Imu.h> #include <tf/transform_datatypes.h> #include <math.h> using namespace std; namespace attitude_controller { class Node { public: explicit Node(const ros::NodeHandle& pnh); void setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr& msg); void imu_cb(const sensor_msgs::Imu::ConstPtr& msg); private: ros::NodeHandle pnh_; ros::Subscriber setpoint_sub_; ros::Time last_setpoint_time_; mavros_msgs::AttitudeTarget last_setpoint_; double throttle_; Eigen::Quaterniond setpoint_q_; bool setpoint_set_; ros::Publisher drive_pub_; ros::Publisher actuator_pub_; ros::Subscriber imu_sub_; Eigen::Quaterniond imu_q_; double kp_; double kd_; double ki_; double last_error_; ros::Time last_error_time_; }; Node::Node(const ros::NodeHandle& pnh) : pnh_(pnh) { setpoint_sub_ = pnh_.subscribe("/att_control/attitude_target", 10, &Node::setpoint_cb, this); imu_sub_ = pnh_.subscribe("/mavros/imu/data", 10, &Node::imu_cb, this); drive_pub_ = pnh_.advertise<kingfisher_msgs::Drive>("/cmd_drive", 10); actuator_pub_ = pnh_.advertise<mavros_msgs::ActuatorControl>("/mavros/actuator_control", 10); setpoint_set_ = false; kp_ = pnh_.param("kp", kp_, 1.0); kd_ = pnh_.param("kd", kd_, 1.0); ki_ = pnh_.param("ki", ki_, 1.0); last_error_ = 0.0; last_error_time_ = ros::Time::now(); ROS_INFO("init attitude_controller"); } void Node::imu_cb(const sensor_msgs::Imu::ConstPtr &msg) { ROS_INFO("imu recieved"); imu_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); if (setpoint_set_){ tf::Quaternion imu_tf = tf::Quaternion(imu_q_.x(), imu_q_.y(), imu_q_.z(), imu_q_.w()); tf::Quaternion setpoint_tf = tf::Quaternion(setpoint_q_.x(), setpoint_q_.y(), setpoint_q_.z(), setpoint_q_.w()); double imu_roll, imu_pitch, imu_yaw; double setpoint_roll, setpoint_pitch, setpoint_yaw; tf::Matrix3x3(imu_tf).getRPY(imu_roll, imu_pitch, imu_yaw); tf::Matrix3x3(setpoint_tf).getRPY(setpoint_roll, setpoint_pitch, setpoint_yaw); ROS_INFO_STREAM("imu yaw: " << imu_yaw); ROS_INFO_STREAM("setpoint yaw: " << setpoint_yaw); double error = asin(sin(setpoint_yaw - imu_yaw)); double d_error = (error - last_error_)/(ros::Time::now() - last_error_time_).toSec(); double yaw_effort = 1.0 * error + 0.7 * d_error; last_error_ = error; last_error_time_ = ros::Time::now(); yaw_effort = std::min(1.0, yaw_effort); yaw_effort = std::max(-1.0, yaw_effort); double throttle = throttle_; throttle = std::min(1.0, throttle); throttle = std::max(-1.0, throttle); ROS_INFO_STREAM("yaw effort: " << yaw_effort); ROS_INFO_STREAM("throttle: " << throttle); double left = throttle - yaw_effort; double right = throttle + yaw_effort; left = std::min(1.0, left); left = std::max(-1.0, left); right = std::min(1.0, right); right = std::max(-1.0, right); auto drive_msg = boost::make_shared<kingfisher_msgs::Drive>(); drive_msg->left = left; drive_msg->right = right; drive_pub_.publish(drive_msg); float controls[] = {0, 1, 2, 3, 4, 5, 6, 7}; auto control_msg = boost::make_shared<mavros_msgs::ActuatorControl>(); control_msg->group_mix = 0; control_msg->header.stamp = ros::Time::now(); control_msg->controls[1] = -throttle; control_msg->controls[2] = -yaw_effort; if ((ros::Time::now() - last_setpoint_time_).toSec() < 0.5) actuator_pub_.publish(control_msg); } } void Node::setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr &msg) { setpoint_set_ = true; ROS_INFO("setpoint recieved"); last_setpoint_ = *msg; setpoint_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); throttle_ = msg->thrust; last_setpoint_time_ = ros::Time::now(); } } int main(int argc, char** argv) { ros::init(argc, argv, "attitude_controller"); ros::NodeHandle pnh("~"); attitude_controller::Node node(pnh); ros::spin(); return 0; }
#include <ros/ros.h> #include <Eigen/Geometry> #include <mavros_msgs/AttitudeTarget.h> #include <mavros_msgs/ActuatorControl.h> #include <kingfisher_msgs/Drive.h> #include <sensor_msgs/Imu.h> #include <tf/transform_datatypes.h> #include <math.h> using namespace std; namespace attitude_co
ix3x3(setpoint_tf).getRPY(setpoint_roll, setpoint_pitch, setpoint_yaw); ROS_INFO_STREAM("imu yaw: " << imu_yaw); ROS_INFO_STREAM("setpoint yaw: " << setpoint_yaw); double error = asin(sin(setpoint_yaw - imu_yaw)); double d_error = (error - last_error_)/(ros::Time::now() - last_error_time_).toSec(); double yaw_effort = 1.0 * error + 0.7 * d_error; last_error_ = error; last_error_time_ = ros::Time::now(); yaw_effort = std::min(1.0, yaw_effort); yaw_effort = std::max(-1.0, yaw_effort); double throttle = throttle_; throttle = std::min(1.0, throttle); throttle = std::max(-1.0, throttle); ROS_INFO_STREAM("yaw effort: " << yaw_effort); ROS_INFO_STREAM("throttle: " << throttle); double left = throttle - yaw_effort; double right = throttle + yaw_effort; left = std::min(1.0, left); left = std::max(-1.0, left); right = std::min(1.0, right); right = std::max(-1.0, right); auto drive_msg = boost::make_shared<kingfisher_msgs::Drive>(); drive_msg->left = left; drive_msg->right = right; drive_pub_.publish(drive_msg); float controls[] = {0, 1, 2, 3, 4, 5, 6, 7}; auto control_msg = boost::make_shared<mavros_msgs::ActuatorControl>(); control_msg->group_mix = 0; control_msg->header.stamp = ros::Time::now(); control_msg->controls[1] = -throttle; control_msg->controls[2] = -yaw_effort; if ((ros::Time::now() - last_setpoint_time_).toSec() < 0.5) actuator_pub_.publish(control_msg); } } void Node::setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr &msg) { setpoint_set_ = true; ROS_INFO("setpoint recieved"); last_setpoint_ = *msg; setpoint_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); throttle_ = msg->thrust; last_setpoint_time_ = ros::Time::now(); } } int main(int argc, char** argv) { ros::init(argc, argv, "attitude_controller"); ros::NodeHandle pnh("~"); attitude_controller::Node node(pnh); ros::spin(); return 0; }
ntroller { class Node { public: explicit Node(const ros::NodeHandle& pnh); void setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr& msg); void imu_cb(const sensor_msgs::Imu::ConstPtr& msg); private: ros::NodeHandle pnh_; ros::Subscriber setpoint_sub_; ros::Time last_setpoint_time_; mavros_msgs::AttitudeTarget last_setpoint_; double throttle_; Eigen::Quaterniond setpoint_q_; bool setpoint_set_; ros::Publisher drive_pub_; ros::Publisher actuator_pub_; ros::Subscriber imu_sub_; Eigen::Quaterniond imu_q_; double kp_; double kd_; double ki_; double last_error_; ros::Time last_error_time_; }; Node::Node(const ros::NodeHandle& pnh) : pnh_(pnh) { setpoint_sub_ = pnh_.subscribe("/att_control/attitude_target", 10, &Node::setpoint_cb, this); imu_sub_ = pnh_.subscribe("/mavros/imu/data", 10, &Node::imu_cb, this); drive_pub_ = pnh_.advertise<kingfisher_msgs::Drive>("/cmd_drive", 10); actuator_pub_ = pnh_.advertise<mavros_msgs::ActuatorControl>("/mavros/actuator_control", 10); setpoint_set_ = false; kp_ = pnh_.param("kp", kp_, 1.0); kd_ = pnh_.param("kd", kd_, 1.0); ki_ = pnh_.param("ki", ki_, 1.0); last_error_ = 0.0; last_error_time_ = ros::Time::now(); ROS_INFO("init attitude_controller"); } void Node::imu_cb(const sensor_msgs::Imu::ConstPtr &msg) { ROS_INFO("imu recieved"); imu_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); if (setpoint_set_){ tf::Quaternion imu_tf = tf::Quaternion(imu_q_.x(), imu_q_.y(), imu_q_.z(), imu_q_.w()); tf::Quaternion setpoint_tf = tf::Quaternion(setpoint_q_.x(), setpoint_q_.y(), setpoint_q_.z(), setpoint_q_.w()); double imu_roll, imu_pitch, imu_yaw; double setpoint_roll, setpoint_pitch, setpoint_yaw; tf::Matrix3x3(imu_tf).getRPY(imu_roll, imu_pitch, imu_yaw); tf::Matr
random
[ { "content": "# pixhawk_emulator\n\nCollection of packages that emulate various Pixhawk modules.\n\n\n\n## Installation Instructions\n\nRun the following command before trying to build the code.\n\n``` bash\n\nsudo ln -s /usr/include/eigen3/Eigen /usr/local/include/Eigen\n\n```\n", "file_path": "README.md",...
C++
source/D2Common/src/Drlg/DrlgDrlgGrid.cpp
eezstreet/D2MOO
28a30aecc69bf43c80e6757a94d533fb37634b68
#include <Drlg/D2DrlgDrlg.h> #include <Drlg/D2DrlgDrlgGrid.h> #include <Drlg/D2DrlgDrlgRoom.h> void(__fastcall* gpfFlagOperations[])(int*, int) = { DRGLGRID_OrFlag, DRGLGRID_AndFlag, DRGLGRID_XorFlag, DRGLGRID_OverwriteFlag, DRGLGRID_OverwriteFlagIfZero, DRGLGRID_AndNegatedFlag }; void __fastcall DRGLGRID_OverwriteFlag(int* pFlag, int nFlag) { *pFlag = nFlag; } void __fastcall DRGLGRID_OrFlag(int* pFlag, int nFlag) { *pFlag |= nFlag; } void __fastcall DRGLGRID_AndFlag(int* pFlag, int nFlag) { *pFlag &= nFlag; } void __fastcall DRGLGRID_XorFlag(int* pFlag, int nFlag) { *pFlag ^= nFlag; } void __fastcall DRGLGRID_OverwriteFlagIfZero(int* pFlag, int nFlag) { if (*pFlag == 0) { *pFlag = nFlag; } } void __fastcall DRGLGRID_AndNegatedFlag(int* pFlag, int nFlag) { *pFlag &= ~nFlag; } BOOL __fastcall DRLGGRID_IsGridValid(D2DrlgGridStrc* pDrlgGrid) { return pDrlgGrid && pDrlgGrid->pCellsFlags; } BOOL __fastcall DRLGGRID_IsPointInsideGridArea(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight; } void __fastcall DRLGGRID_AlterGridFlag(D2DrlgGridStrc* pDrlgGrid, int nX, int nY, int nFlag, FlagOperation eOperation) { gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]], nFlag); } int* __fastcall DRLGGRID_GetGridFlagsPointer(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return &pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } int __fastcall DRLGGRID_GetGridFlags(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } void __fastcall DRLGGRID_AlterAllGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { for (int nY = 0; nY < pDrlgGrid->nHeight; ++nY) { for (int nX = 0; nX < pDrlgGrid->nWidth; ++nX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); } } } void __fastcall DRLGGRID_AlterEdgeGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { int* pFlagsFirstRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[0]]; int* pFlagsLastRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[pDrlgGrid->nHeight - 1]]; for (int i = 0; i < pDrlgGrid->nWidth; ++i) { gpfFlagOperations[eOperation](&pFlagsFirstRow[i], nFlag); gpfFlagOperations[eOperation](&pFlagsLastRow[i], nFlag); } for (int i = 1; i < pDrlgGrid->nHeight; ++i) { const int nCurRowOffset = pDrlgGrid->pCellsRowOffsets[i]; gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + 0 ], nFlag); gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + pDrlgGrid->nWidth - 1], nFlag); } } void __fastcall sub_6FD75DE0(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag, FlagOperation eOperation, BOOL bAlterNextVertex) { D2DrlgVertexStrc* pNext = pDrlgVertex->pNext; if (pDrlgVertex->nPosX == pNext->nPosX && pDrlgVertex->nPosY == pNext->nPosY) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); return; } int nEndX = 0; int nEndY = 0; int nX = 0; int nY = 0; if (pDrlgVertex->nPosX == pNext->nPosX) { nX = pDrlgVertex->nPosX; if (pDrlgVertex->nPosY >= pNext->nPosY) { nY = pNext->nPosY + 1; nEndY = pDrlgVertex->nPosY; } else { nY = pDrlgVertex->nPosY + 1; nEndY = pNext->nPosY; } while (nY != nEndY) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nY; } } else { nY = pDrlgVertex->nPosY; if (pDrlgVertex->nPosX >= pNext->nPosX) { nEndX = pDrlgVertex->nPosX; nX = pNext->nPosX + 1; } else { nEndX = pNext->nPosX; nX = pDrlgVertex->nPosX + 1; } while (nX != nEndX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nX; } } DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); if (bAlterNextVertex) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->pNext->nPosX, pDrlgVertex->pNext->nPosY, nFlag, eOperation); } } void __fastcall DRLGGRID_SetVertexGridFlags(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag) { int nX = 0; int nY = 0; D2DrlgVertexStrc* pVertex = pDrlgVertex; while (pVertex) { nX = pVertex->nPosX; nY = pVertex->nPosY; pVertex = pVertex->pNext; if (nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, FLAG_OPERATION_OR); } } } void __fastcall sub_6FD75F60(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, D2DrlgCoordStrc* pDrlgCoord, int nFlag, FlagOperation eOperation, int nSize) { int nX = pDrlgVertex->nPosX; int nY = pDrlgVertex->nPosY; int nXDiff = pDrlgVertex->pNext->nPosX - nX; int nYDiff = pDrlgVertex->pNext->nPosY - nY; int nXInc = 0; if (nXDiff >= 0) { nXInc = 1; } else { nXDiff = -nXDiff; nXInc = -1; } int nYInc = 0; if (nYDiff >= 0) { nYInc = 1; } else { nYDiff = -nYDiff; nYInc = -1; } int nIndexX = nX - pDrlgCoord->nPosX; int nIndexY = nY - pDrlgCoord->nPosY; int nCheck = 0; if (nXDiff >= nYDiff) { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } for (int j = 0; j < nXDiff; ++j) { nX += nXInc; nCheck += nYDiff; if (nCheck > nXDiff) { nY += nYInc; nCheck -= nXDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } } } else { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } for (int j = 0; j < nYDiff; ++j) { nY += nYInc; nCheck += nXDiff; if (nCheck > nYDiff) { nX += nXInc; nCheck -= nYDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } } } } void __fastcall DRLGGRID_InitializeGridCells(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsRowOffsets = (int*)D2_CALLOC_SERVER(pMemPool, sizeof(int) * nHeight * (nWidth + 1)); pDrlgGrid->pCellsFlags = &pDrlgGrid->pCellsRowOffsets[nHeight]; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; } void __fastcall DRLGGRID_FillGrid(D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight, int* pCellPos, int* pCellRowOffsets) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsFlags = pCellPos; memset(pCellPos, 0x00, sizeof(int) * nHeight * nWidth); pDrlgGrid->pCellsRowOffsets = pCellRowOffsets; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; } void __fastcall DRLGGRID_FillNewCellFlags(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = (int32_t*)D2_ALLOC_SERVER(pMemPool, sizeof(int) * pDrlgCoord->nHeight); int nOffset = 0; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nOffset; nOffset += nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_AssignCellsOffsetsAndFlags(D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth, int* pCellFlags) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = pCellFlags; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = i * nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_FreeGrid(void* pMemPool, D2DrlgGridStrc* pDrlgGrid) { if (pDrlgGrid->pCellsRowOffsets) { D2_FREE_SERVER(pMemPool, pDrlgGrid->pCellsRowOffsets); } pDrlgGrid->pCellsFlags = NULL; pDrlgGrid->pCellsRowOffsets = NULL; } void __fastcall DRLGGRID_ResetGrid(D2DrlgGridStrc* pDrlgGrid) { pDrlgGrid->pCellsFlags = 0; pDrlgGrid->pCellsRowOffsets = 0; }
#include <Drlg/D2DrlgDrlg.h> #include <Drlg/D2DrlgDrlgGrid.h> #include <Drlg/D2DrlgDrlgRoom.h> void(__fastcall* gpfFlagOperations[])(int*, int) = { DRGLGRID_OrFlag, DRGLGRID_AndFlag, DRGLGRID_XorFlag, DRGLGRID_OverwriteFlag, DRGLGRID_OverwriteFlagIfZero, DRGLGRID_AndNegatedFlag }; void __fastcall DRGLGRID_OverwriteFlag(int* pFlag, int nFlag) { *pFlag = nFlag; } void __fastcall DRGLGRID_OrFlag(int* pFlag, int nFlag) { *pFlag |= nFlag; } void __fastcall DRGLGRID_AndFlag(int* pFlag, int nFlag) { *pFlag &= nFlag; } void __fastcall DRGLGRID_XorFlag(int* pFlag, int nFlag) { *pFlag ^= nFlag; } void __fastcall DRGLGRID_OverwriteFlagIfZero(int* pFlag, int nFlag) { if (*pFlag == 0) { *pFlag = nFlag; } } void __fastcall DRGLGRID_AndNegatedFlag(int* pFlag, int nFlag) { *pFlag &= ~nFlag; } BOOL __fastcall DRLGGRID_IsGridValid(D2DrlgGridStrc* pDrlgGrid) { return pDrlgGrid && pDrlgGrid->pCellsFlags; } BOOL __fastcall DRLGGRID_IsPointInsideGridArea(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight; } void __fastcall DRLGGRID_AlterGridFlag(D2DrlgGridStrc* pDrlgGrid, int nX, int nY, int nFlag, FlagOperation eOperation) { gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]], nFlag); } int* __fastcall DRLGGRID_GetGridFlagsPointer(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return &pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } int __fastcall DRLGGRID_GetGridFlags(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } void __fastcall DRLGGRID_AlterAllGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { for (int nY = 0; nY < pDrlgGrid->nHeight; ++nY) { for (int nX = 0; nX < pDrlgGrid->nWidth; ++nX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); } } } void __fastcall DRLGGRID_AlterEdgeGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { int* pFlagsFirstRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[0]]; int* pFlagsLastRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[pDrlgGrid->nHeight - 1]]; for (int i = 0; i < pDrlgGrid->nWidth; ++i) { gpfFlagOperations[eOperation](&pFlagsFirstRow[i], nFlag); gpfFlagOperations[eOperation](&pFlagsLastRow[i], nFlag); } for (int i = 1; i < pDrlgGrid->nHeight; ++i) { const int nCurRowOffset = pDrlgGrid->pCellsRowOffsets[i]; gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + 0 ], nFlag); gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + pDrlgGrid->nWidth - 1], nFlag); } } void __fastcall sub_6FD75DE0(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag, FlagOperation eOperation, BOOL bAlterNextVertex) { D2DrlgVertexStrc* pNext = pDrlgVertex->pNext; if (pDrlgVertex->nPosX == pNext->nPosX && pDrlgVertex->nPosY == pNext->nPosY) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); return; } int nEndX = 0; int nEndY = 0; int nX = 0; int nY = 0; if (pDrlgVertex->nPosX == pNext->nPosX) { nX = pDrlgVertex->nPosX; if (pDrlgVertex->nPosY >= pNext->nPosY) { nY = pNext->nPosY + 1; nEndY = pDrlgVertex->nPosY; } else { nY = pDrlgVertex->nPosY + 1; nEndY = pNext->nPosY; } while (nY != nEndY) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nY; } } else { nY = pDrlgVertex->nPosY; if (pDrlgVertex->nPosX >= pNext->nPosX) { nEndX = pDrlgVertex->nPosX; nX = pNext->nPosX + 1; } else { nEndX = pNext->nPosX; nX = pDrlgVertex->nPosX + 1; } while (nX != nEndX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nX; } } DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); if (bAlterNextVertex) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->pNext->nPosX, pDrlgVertex->pNext->nPosY, nFlag, eOperation); } } void __fastcall DRLGGRID_SetVertexGridFlags(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag) { int nX = 0; int nY = 0; D2DrlgVertexStrc* pVertex = pDrlgVertex; while (pVertex) { nX = pVertex->nPosX; nY = pVertex->nPosY; pVertex = pVertex->pNext; if (nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, FLAG_OPERATION_OR); } } } void __fastcall sub_6FD75F60(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, D2DrlgCoordStrc* pDrlgCoord, int nFlag, FlagOperation eOperation, int nSize) { int nX = pDrlgVertex->nPosX; int nY = pDrlgVertex->nPosY; int nXDiff = pDrlgVertex->pNext->nPosX - nX; int nYDiff = pDrlgVertex->pNext->nPosY - nY; int nXInc = 0; if (nXDiff >= 0) { nXInc = 1; } else { nXDiff = -nXDiff; nXInc = -1; } int nYInc = 0; if (nYDiff >= 0) { nYInc = 1; } else { nYDiff = -nYDiff; nYInc = -1; } int nIndexX = nX - pDrlgCoord->nPosX; int nIndexY = nY - pDrlgCoord->nPosY; int nCheck = 0; if (nXDiff >= nYDiff) { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } for (int j = 0; j < nXDiff; ++j) { nX += nXInc; nCheck += nYDiff; if (nCheck > nXDiff) { nY += nYInc; nCheck -= nXDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } } } else { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } for (int j = 0; j < nYDiff; ++j) { nY += nYInc; nCheck += nXDiff; if (nCheck > nYDiff) { nX += nXInc; nCheck -= nYDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } } } } void __fastcall DRLGGRID_InitializeGridCells(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsRowOffsets = (int*)D2_CALLOC_SERVER(pMemPool, sizeof(int) * nHeight * (nWidth + 1)); pDrlgGrid->pCellsFlags = &pDrlgGrid->pCellsRowOffsets[nHeight]; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; }
void __fastcall DRLGGRID_FillNewCellFlags(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = (int32_t*)D2_ALLOC_SERVER(pMemPool, sizeof(int) * pDrlgCoord->nHeight); int nOffset = 0; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nOffset; nOffset += nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_AssignCellsOffsetsAndFlags(D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth, int* pCellFlags) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = pCellFlags; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = i * nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_FreeGrid(void* pMemPool, D2DrlgGridStrc* pDrlgGrid) { if (pDrlgGrid->pCellsRowOffsets) { D2_FREE_SERVER(pMemPool, pDrlgGrid->pCellsRowOffsets); } pDrlgGrid->pCellsFlags = NULL; pDrlgGrid->pCellsRowOffsets = NULL; } void __fastcall DRLGGRID_ResetGrid(D2DrlgGridStrc* pDrlgGrid) { pDrlgGrid->pCellsFlags = 0; pDrlgGrid->pCellsRowOffsets = 0; }
void __fastcall DRLGGRID_FillGrid(D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight, int* pCellPos, int* pCellRowOffsets) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsFlags = pCellPos; memset(pCellPos, 0x00, sizeof(int) * nHeight * nWidth); pDrlgGrid->pCellsRowOffsets = pCellRowOffsets; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; }
function_block-full_function
[ { "content": "\tuint8_t nInt;\t\t\t\t\t\t\t//0x32\n", "file_path": "source/D2Common/include/D2DataTbls.h", "rank": 0, "score": 99807.58277220109 }, { "content": "\tBOOL bReturn;\t\t\t\t\t\t\t//0x18\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 1, "sco...
Rust
rust/tests/integration_test.rs
dandyvica/rbf
449a99a30854ad8ca35032dbecfeb3af465a77aa
use rbf::reader::{Reader, ReaderLazyness}; use rbf::record::{AsciiMode, UTF8Mode}; use rbf::vector_of; pub mod setup { use rbf::layout::Layout; use rbf::record::{AsciiMode, UTF8Mode}; pub fn layout_load_layout_ascii() -> Layout<AsciiMode> { Layout::<AsciiMode>::new("./tests/test.xml").unwrap() } pub fn layout_load_layout_utf8() -> Layout<UTF8Mode> { Layout::<UTF8Mode>::new("./tests/test.xml").unwrap() } } #[test] fn record_filter() { let layout = setup::layout_load_layout_ascii(); let r_ll = layout.get("LL").unwrap(); assert_eq!(r_ll.calculated_length, 353); let types = ["A".to_string(), "N".to_string()]; for f in r_ll { assert!(f.len() <= 26); assert!(types.contains(&f.ftype.id)); assert!(f.name.len() <= 4); } let fields = r_ll.filter(|f| f.length >= 25); assert_eq!(fields.unwrap().len(), 2); let r_dup = layout.get("DP").unwrap(); let f_dup = r_dup.filter(|f| f.name == "F5").unwrap(); assert_eq!(f_dup.len(), 4); for (i, f) in f_dup.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[test] fn record_remove() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get_mut("LL").unwrap(); r_ll.remove(|f| f.index == 0); assert_eq!(r_ll[0].name, "W1"); assert_eq!(r_ll.count(), 26); r_ll.remove(|f| f.name.starts_with("W1")); assert_eq!(r_ll.count(), 15); r_ll.remove(|f| f.name != "W2"); assert_eq!(r_ll.count(), 1); } { let r_nb = layout.get_mut("NB").unwrap(); r_nb.remove(|f| !["N1", "N2"].contains(&&*f.name)); assert_eq!(r_nb.count(), 2); } } #[test] fn record_iterator() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get("LL").unwrap(); for f in r_ll { assert!(f.length < 27); } } { { let r_ll = layout.get_mut("LL").unwrap(); for f in r_ll { f.length = 10; } } let r_ll = layout.get("LL").unwrap(); let count = r_ll.count(); let sum: usize = vector_of!(r_ll, length).iter().sum(); assert_eq!(sum, 10 * count); } } #[test] fn field_multiplicity() { let layout = setup::layout_load_layout_ascii(); let r_dp = layout .get("DP") .unwrap() .filter(|f| f.name == "F5") .unwrap(); assert_eq!(r_dp.len(), 4); for (i, f) in r_dp.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[should_panic] #[allow(unused_variables)] #[test] fn reader_stringent() { let layout = setup::layout_load_layout_ascii(); let mut reader = Reader::<AsciiMode>::new("./tests/test_ascii.data", layout); reader.set_lazyness(ReaderLazyness::Strict); while let Some(rec) = reader.next() {} } #[test] fn reader_lazy() { let layout = setup::layout_load_layout_utf8(); let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout); let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; let digits = "123456789"; let greek = "αβγδεζηθικλμνξοπρστυφχψω"; while let Some((_stats, rec)) = reader.next() { match rec.name.as_ref() { "LL" => { assert_eq!(rec.get_value("ID"), "LL"); for (i, l) in letters.chars().enumerate() { let fname = format!("W{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "NB" => { assert_eq!(rec.get_value("ID"), "NB"); for (i, n) in digits.chars().enumerate() { let fname = format!("N{}", i + 1); assert_eq!(rec.get_value(&fname), n.to_string().repeat(i + 1)); } } "GL" => { assert_eq!(rec.get_value("ID"), "GL"); for (i, l) in greek.chars().enumerate() { let fname = format!("G{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "DP" => { assert_eq!(rec.get_value("ID"), "DP"); assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); } _ => panic!( "record name <{}> not found in file <{}>", rec.name, "./tests/test.data" ), } } }
use rbf::reader::{Reader, ReaderLazyness}; use rbf::record::{AsciiMode, UTF8Mode}; use rbf::vector_of; pub mod setup { use rbf::layout::Layout; use rbf::record::{AsciiMode, UTF8Mode}; pub fn layout_load_layout_ascii() -> Layout<AsciiMode> { Layout::<AsciiMode>::new("./tests/test.xml").unwrap() } pub fn layout_load_layout_utf8() -> Layout<UTF8Mode> { Layout::<UTF8Mode>::new("./tests/test.xml").unwrap() } } #[test] fn record_filter() { let layout = setup::layout_load_layout_ascii(); let r_ll = layout.get("LL").unwrap(); assert_eq!(r_ll.calculated_length, 353); let types = ["A".to_string(), "N".to_string()]; for f in r_ll { assert!(f.len() <= 26); assert!(types.contains(&f.ftype.id)); assert!(f.name.len() <= 4); } let fields = r_ll.filter(|f| f.length >= 25); assert_eq!(fields.unwrap().len(), 2); let r_dup = layout.get("DP").unwrap(); let f_dup = r_dup.filter(|f| f.name == "F5").unwrap(); assert_eq!(f_dup.len(), 4); for (i, f) in f_dup.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[test] fn record_remove() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get_mut("LL").unwrap(); r_ll.remove(|f| f.index == 0); assert_eq!(r_ll[0].name, "W1"); assert_eq!(r_ll.count(), 26); r_ll.remove(|f| f.name.starts_with("W1")); assert_eq!(r_ll.count(), 15); r_ll.remove(|f| f.name != "W2"); assert_eq!(r_ll.count(), 1); } { let r_nb = layout.get_mut("NB").unwrap(); r_nb.remove(|f| !["N1", "N2"].contains(&&*f.name)); assert_eq!(r_nb.count(), 2); } } #[test] fn record_iterator() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get("LL").unwrap(); for f in r_ll { assert!(f.length < 27); } } { { let r_ll = layout.get_mut("LL").unwrap(); for f in r_ll { f.length = 10; } } let r_ll = layout.get("LL").unwrap(); let count = r_ll.count(); let sum: usize = vector_of!(r_ll, length).iter().sum(); assert_eq!(sum, 10 * count); } } #[test] fn field_multiplicity() { let layout = setup::layout_load_layout_ascii(); let r_dp = layout .get("DP") .unwrap() .filter(|f| f.name == "F5") .unwrap(); assert_eq!(r_dp.len(), 4); for (i, f) in r_dp.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[should_panic] #[allow(unused_variables)] #[test] fn reader_stringent() { let layout = setup::layout_load_layout_ascii(); let mut reader = Reader::<AsciiMode>::new("./tests/test_ascii.data", layout); reader.set_lazyness(ReaderLazyness::Strict); while let Some(rec) = reader.next() {} } #[test] fn reader_lazy() { let layout = setup::layout_load_layout_utf8(); let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout); let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; let digits = "123456789"; let greek = "αβγδεζηθικλμνξοπρστυφχψω"; while let Some((_stats, rec)) = reader.next() { match rec.name.as_ref() { "LL" => { assert_eq!(rec.get_value("ID"), "LL"); for (i, l) in letters.chars().enumerate() { let fname = format!("W{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "NB" => { assert_eq!(rec.get_value("ID"), "NB"); for (i, n) in digits.chars().enumerate() { let fname = format!("N{}", i + 1); assert_eq!(rec.get_value(&fname), n.to_string().repeat(i + 1)); } } "GL" => { assert_eq!(rec.get_value("ID"), "GL"); for (i, l) in greek.chars().enumerate() { let fname = format!("G{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "DP" => { assert_eq!(rec.get_value("ID"), "DP"); assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); } _ =>
panic!( "record name <{}> not found in file <{}>", rec.name, "./tests/test.data" ), } } }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let mut nb_lines: usize = 0;\n\n let mut nb_records: HashMap<String, usize> = HashMap::new();\n\n\n\n // get arguments\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() == 1 {\n\n println!(\"Usage: {} layout_file data_file\", args[0]);\n...
Rust
query/src/pipelines/new/processors/transforms/hash_join/chaining_hash_table.rs
lichuang/databend
e6a76e22e4d30683a152bdd29b5d5b60c96a9c8f
use std::sync::Arc; use std::sync::Mutex; use std::sync::RwLock; use common_datablocks::DataBlock; use common_datavalues::Column; use common_datavalues::ColumnRef; use common_datavalues::ConstColumn; use common_datavalues::DataSchemaRef; use common_exception::Result; use common_planners::Expression; use crate::common::ExpressionEvaluator; use crate::pipelines::new::processors::transforms::hash_join::hash::HashUtil; use crate::pipelines::new::processors::transforms::hash_join::hash::HashVector; use crate::pipelines::new::processors::transforms::hash_join::row::compare_and_combine; use crate::pipelines::new::processors::transforms::hash_join::row::RowPtr; use crate::pipelines::new::processors::transforms::hash_join::row::RowSpace; use crate::pipelines::new::processors::HashJoinState; use crate::sessions::QueryContext; pub struct ChainingHashTable { ref_count: Mutex<usize>, is_finished: Mutex<bool>, build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, ctx: Arc<QueryContext>, hash_table: RwLock<Vec<Vec<RowPtr>>>, row_space: RowSpace, } impl ChainingHashTable { pub fn try_create( build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, build_data_schema: DataSchemaRef, _probe_data_schema: DataSchemaRef, ctx: Arc<QueryContext>, ) -> Result<Self> { Ok(Self { row_space: RowSpace::new(build_data_schema), ref_count: Mutex::new(0), is_finished: Mutex::new(false), build_expressions, probe_expressions, ctx, hash_table: RwLock::new(vec![]), }) } fn hash(&self, columns: &[ColumnRef], row_count: usize) -> Result<HashVector> { let hash_values = columns .iter() .map(HashUtil::compute_hash) .collect::<Result<Vec<HashVector>>>()?; Ok(HashUtil::combine_hashes(&hash_values, row_count)) } fn apply_bitmask(hash_vector: &HashVector, mask: u64) -> HashVector { let mut result = HashVector::with_capacity(hash_vector.len()); for hash in hash_vector { result.push(*hash & mask); } result } fn compute_hash_table_size(rows_count: usize) -> usize { let mut power = 1; while power < rows_count { power <<= 1; } power } } impl HashJoinState for ChainingHashTable { fn build(&self, input: DataBlock) -> Result<()> { let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, &input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_values = self.hash(&build_keys, input.num_rows())?; self.row_space.push(input, hash_values)?; Ok(()) } fn probe(&self, input: &DataBlock) -> Result<Vec<DataBlock>> { let probe_keys = self .probe_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_table = self.hash_table.read().unwrap(); let hash_values = self.hash(&probe_keys, input.num_rows())?; let hash_values = ChainingHashTable::apply_bitmask(&hash_values, (hash_table.len() - 1) as u64); let mut results: Vec<DataBlock> = vec![]; for (i, hash_value) in hash_values.iter().enumerate().take(input.num_rows()) { let probe_result_ptrs = hash_table[*hash_value as usize].as_slice(); if probe_result_ptrs.is_empty() { continue; } let result_block = self.row_space.gather(probe_result_ptrs)?; let probe_block = DataBlock::block_take_by_indices(input, &[i as u32])?; let mut replicated_probe_block = DataBlock::empty(); for (i, col) in probe_block.columns().iter().enumerate() { let replicated_col = ConstColumn::new(col.clone(), result_block.num_rows()).arc(); replicated_probe_block = replicated_probe_block .add_column(replicated_col, probe_block.schema().field(i).clone())?; } let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval( self.ctx.try_get_function_context()?, expr, &result_block, ) }) .collect::<Result<Vec<ColumnRef>>>()?; let current_probe_keys: Vec<ColumnRef> = probe_keys .iter() .map(|col| { let column = col.slice(i, 1); ConstColumn::new(column, result_block.num_rows()).arc() }) .collect(); let output = compare_and_combine( replicated_probe_block, result_block, &build_keys, &current_probe_keys, self.ctx.clone(), )?; results.push(output); } Ok(results) } fn attach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count += 1; Ok(()) } fn detach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count -= 1; if *count == 0 { self.finish()?; let mut is_finished = self.is_finished.lock().unwrap(); *is_finished = true; Ok(()) } else { Ok(()) } } fn is_finished(&self) -> Result<bool> { Ok(*self.is_finished.lock().unwrap()) } fn finish(&self) -> Result<()> { let mut hash_table = self.hash_table.write().unwrap(); hash_table.resize( ChainingHashTable::compute_hash_table_size(self.row_space.num_rows()), Default::default(), ); { let chunks = self.row_space.chunks.write().unwrap(); for chunk_index in 0..chunks.len() { let chunk = &chunks[chunk_index]; let hash_values = ChainingHashTable::apply_bitmask( &chunk.hash_values, (hash_table.len() - 1) as u64, ); for (row_index, hash_value) in hash_values.iter().enumerate().take(chunk.num_rows()) { let ptr = RowPtr { chunk_index: chunk_index as u32, row_index: row_index as u32, }; hash_table[*hash_value as usize].push(ptr); } } } Ok(()) } }
use std::sync::Arc; use std::sync::Mutex; use std::sync::RwLock; use common_datablocks::DataBlock; use common_datavalues::Column; use common_datavalues::ColumnRef; use common_datavalues::ConstColumn; use common_datavalues::DataSchemaRef; use common_exception::Result; use common_planners::Expression; use crate::common::ExpressionEvaluator; use crate::pipelines::new::processors::transforms::hash_join::hash::HashUtil; use crate::pipelines::new::processors::transforms::hash_join::hash::HashVector; use crate::pipelines::new::processors::transforms::hash_join::row::compare_and_combine; use crate::pipelines::new::processors::transforms::hash_join::row::RowPtr; use crate::pipelines::new::processors::transforms::hash_join::row::RowSpace; use crate::pipelines::new::processors::HashJoinState; use crate::sessions::QueryContext; pub struct ChainingHashTable { ref_count: Mutex<usize>, is_finished: Mutex<bool>, build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, ctx: Arc<QueryContext>, hash_table: RwLock<Vec<Vec<RowPtr>>>, row_space: RowSpace, } impl ChainingHashTable { pub fn try_create( build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, build_data_schema: DataSchemaRef, _probe_data_schema: DataSchemaRef, ctx: Arc<QueryContext>, ) -> Result<Self> { Ok(Self { row_space: RowSpace::new(build_data_schema), ref_count: Mutex::new(0), is_finished: Mutex::new(false), build_expressions, probe_expressions, ctx, hash_table: RwLock::new(vec![]), }) } fn hash(&self, columns: &[ColumnRef
k(HashUtil::combine_hashes(&hash_values, row_count)) } fn apply_bitmask(hash_vector: &HashVector, mask: u64) -> HashVector { let mut result = HashVector::with_capacity(hash_vector.len()); for hash in hash_vector { result.push(*hash & mask); } result } fn compute_hash_table_size(rows_count: usize) -> usize { let mut power = 1; while power < rows_count { power <<= 1; } power } } impl HashJoinState for ChainingHashTable { fn build(&self, input: DataBlock) -> Result<()> { let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, &input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_values = self.hash(&build_keys, input.num_rows())?; self.row_space.push(input, hash_values)?; Ok(()) } fn probe(&self, input: &DataBlock) -> Result<Vec<DataBlock>> { let probe_keys = self .probe_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_table = self.hash_table.read().unwrap(); let hash_values = self.hash(&probe_keys, input.num_rows())?; let hash_values = ChainingHashTable::apply_bitmask(&hash_values, (hash_table.len() - 1) as u64); let mut results: Vec<DataBlock> = vec![]; for (i, hash_value) in hash_values.iter().enumerate().take(input.num_rows()) { let probe_result_ptrs = hash_table[*hash_value as usize].as_slice(); if probe_result_ptrs.is_empty() { continue; } let result_block = self.row_space.gather(probe_result_ptrs)?; let probe_block = DataBlock::block_take_by_indices(input, &[i as u32])?; let mut replicated_probe_block = DataBlock::empty(); for (i, col) in probe_block.columns().iter().enumerate() { let replicated_col = ConstColumn::new(col.clone(), result_block.num_rows()).arc(); replicated_probe_block = replicated_probe_block .add_column(replicated_col, probe_block.schema().field(i).clone())?; } let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval( self.ctx.try_get_function_context()?, expr, &result_block, ) }) .collect::<Result<Vec<ColumnRef>>>()?; let current_probe_keys: Vec<ColumnRef> = probe_keys .iter() .map(|col| { let column = col.slice(i, 1); ConstColumn::new(column, result_block.num_rows()).arc() }) .collect(); let output = compare_and_combine( replicated_probe_block, result_block, &build_keys, &current_probe_keys, self.ctx.clone(), )?; results.push(output); } Ok(results) } fn attach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count += 1; Ok(()) } fn detach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count -= 1; if *count == 0 { self.finish()?; let mut is_finished = self.is_finished.lock().unwrap(); *is_finished = true; Ok(()) } else { Ok(()) } } fn is_finished(&self) -> Result<bool> { Ok(*self.is_finished.lock().unwrap()) } fn finish(&self) -> Result<()> { let mut hash_table = self.hash_table.write().unwrap(); hash_table.resize( ChainingHashTable::compute_hash_table_size(self.row_space.num_rows()), Default::default(), ); { let chunks = self.row_space.chunks.write().unwrap(); for chunk_index in 0..chunks.len() { let chunk = &chunks[chunk_index]; let hash_values = ChainingHashTable::apply_bitmask( &chunk.hash_values, (hash_table.len() - 1) as u64, ); for (row_index, hash_value) in hash_values.iter().enumerate().take(chunk.num_rows()) { let ptr = RowPtr { chunk_index: chunk_index as u32, row_index: row_index as u32, }; hash_table[*hash_value as usize].push(ptr); } } } Ok(()) } }
], row_count: usize) -> Result<HashVector> { let hash_values = columns .iter() .map(HashUtil::compute_hash) .collect::<Result<Vec<HashVector>>>()?; O
function_block-random_span
[ { "content": "// No logical type is specified\n\n// Use Default options\n\npub fn default_column_cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {\n\n let func_ctx = FunctionContext::default();\n\n cast_with_type(\n\n column,\n\n &column.data_type(),\n\n data_t...
Rust
src/crawler.rs
Ayush1325/webcrawler-woc
086941224ead6f814bd02442021db8f742e5c874
/*! Module Containing the Crawler functions. */ use crate::extractors::links; use futures::{stream, StreamExt}; use links::Link; use reqwest::Url; use std::time::Duration; use std::{collections::HashSet, sync::Arc}; use tokio::sync::mpsc; fn init_reqwest_client(timeout: u64) -> Result<reqwest::Client, String> { let client_builder = reqwest::ClientBuilder::new().timeout(Duration::new(timeout, 0)); match client_builder.build() { Ok(x) => Ok(x), Err(_) => Err("Could not build http client".to_string()), } } fn init_dns_resolver() -> Result<trust_dns_resolver::TokioAsyncResolver, String> { match trust_dns_resolver::TokioAsyncResolver::tokio_from_system_conf() { Ok(x) => Ok(x), Err(_) => Err("Could not build dns resolver".to_string()), } } pub async fn crawl_with_depth( origin_url: Link, crawl_depth: usize, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url); for _ in 0..crawl_depth { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } stream::iter(to_crawl) .map(|x| links::Link::new_from_url(&x)) .for_each_concurrent(task_limit, |x| async { let _ = tx_output.send(x).await; }) .await; Ok(()) } pub async fn crawl_no_depth( origin_url: Link, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url.clone()); let mut first_crawl = true; while !to_crawl.is_empty() { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); if first_crawl { let tx_clone = tx_cralwer.clone(); let client_clone = client.clone(); let url = origin_url.url.clone(); tokio::spawn(async move { crawl_sitemaps(url, tx_clone, task_limit, client_clone).await; }); first_crawl = false; } to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } Ok(()) } async fn crawl_page( url: Url, client: reqwest::Client, tx: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, limit: usize, resolver: trust_dns_resolver::TokioAsyncResolver, word_list: Arc<HashSet<String>>, ) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => { link.crawled = true; let _ = tx.send(link.clone()).await; return; } }; link.update_from_response(&resp); if let Some(host) = &link.host { let host = host.to_string(); let ipv4 = links::resolve_ipv4(&resolver, &host).await; let ipv6 = links::resolve_ipv6(&resolver, &host).await; link.update_dns(ipv4, ipv6); }; let is_html = link.check_mime_from_list(&[mime::TEXT_HTML, mime::TEXT_HTML_UTF_8]); if is_html { let html = match resp.text().await { Ok(x) => x, Err(_) => { return; } }; if links::check_words_html(&html, word_list) { link.contains_words = true; let _ = tx_selenium.send(link.url.to_string()).await; } let links = links::get_links_from_html(&html, url.as_str()); let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async move { let _ = tx_ref.send(x).await; }) .await; } if let Err(_) = tx.send(link).await { return; } } async fn crawl_sitemaps(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut robottxt_url = url.clone(); robottxt_url.set_path("robots.txt"); let robottxt = match get_page(robottxt_url.as_str(), &client).await { Ok(x) => match x.text().await { Ok(x) => x, Err(_) => return, }, Err(_) => return, }; let url_str = url.to_string(); robottxt .lines() .filter(|x| x.contains("Sitemap")) .filter_map(|x| x[9..].split_whitespace().next()) .map(|x| x.trim()) .filter_map(|x| links::normalize_url(x, &url_str)) .for_each(|x| { let tx_clone = tx.clone(); let client_clone = client.clone(); let limit_clone = limit.clone(); tokio::spawn(async move { crawl_sitemap(x.url, tx_clone, limit_clone, client_clone).await; }); }); } async fn crawl_sitemap(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => return, }; link.update_from_response(&resp); let text = match resp.text().await { Ok(x) => x, Err(_) => return, }; let links = match link.content_type { Some(x) => match (x.type_(), x.subtype()) { (mime::TEXT, mime::PLAIN) => links::get_links_from_text(&text, url.as_str()), _ => return, }, None => return, }; let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async { let _ = tx_ref.send(x).await; }) .await; } async fn get_page( url: &str, client: &reqwest::Client, ) -> Result<reqwest::Response, reqwest::Error> { let resp = client.get(url).send().await?; resp.error_for_status() }
/*! Module Containing the Crawler functions. */ use crate::extractors::links; use futures::{stream, StreamExt}; use links::Link; use reqwest::Url; use std::time::Duration; use std::{collections::HashSet, sync::Arc}; use tokio::sync::mpsc; fn init_reqwest_client(timeout: u64) -> Result<reqwest::Client, String> { let client_builder = reqwest::ClientBuilder::new().timeout(Duration::new(timeout, 0)); match client_builder.build() { Ok(x) => Ok(x), Err(_) => Err("Could not build http client".to_string()), } } fn init_dns_resolver() -> Result<trust_dns_resolver::TokioAsyncResolver, String> { match trust_dns_resolver::TokioAsyncResolver::tokio_from_system_conf() { Ok(x) => Ok(x), Err(_) => Err("Could not build dns resolver".to_string()), } } pub async fn crawl_with_depth( origin_url: Link, crawl_depth: usize, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url); for _ in 0..crawl_depth { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } stream::iter(to_crawl) .map(|x| links::Link::new_from_url(&x)) .for_each_concurrent(task_limit, |x| async { let _ = tx_output.send(x).await; }) .await; Ok(()) }
async fn crawl_page( url: Url, client: reqwest::Client, tx: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, limit: usize, resolver: trust_dns_resolver::TokioAsyncResolver, word_list: Arc<HashSet<String>>, ) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => { link.crawled = true; let _ = tx.send(link.clone()).await; return; } }; link.update_from_response(&resp); if let Some(host) = &link.host { let host = host.to_string(); let ipv4 = links::resolve_ipv4(&resolver, &host).await; let ipv6 = links::resolve_ipv6(&resolver, &host).await; link.update_dns(ipv4, ipv6); }; let is_html = link.check_mime_from_list(&[mime::TEXT_HTML, mime::TEXT_HTML_UTF_8]); if is_html { let html = match resp.text().await { Ok(x) => x, Err(_) => { return; } }; if links::check_words_html(&html, word_list) { link.contains_words = true; let _ = tx_selenium.send(link.url.to_string()).await; } let links = links::get_links_from_html(&html, url.as_str()); let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async move { let _ = tx_ref.send(x).await; }) .await; } if let Err(_) = tx.send(link).await { return; } } async fn crawl_sitemaps(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut robottxt_url = url.clone(); robottxt_url.set_path("robots.txt"); let robottxt = match get_page(robottxt_url.as_str(), &client).await { Ok(x) => match x.text().await { Ok(x) => x, Err(_) => return, }, Err(_) => return, }; let url_str = url.to_string(); robottxt .lines() .filter(|x| x.contains("Sitemap")) .filter_map(|x| x[9..].split_whitespace().next()) .map(|x| x.trim()) .filter_map(|x| links::normalize_url(x, &url_str)) .for_each(|x| { let tx_clone = tx.clone(); let client_clone = client.clone(); let limit_clone = limit.clone(); tokio::spawn(async move { crawl_sitemap(x.url, tx_clone, limit_clone, client_clone).await; }); }); } async fn crawl_sitemap(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => return, }; link.update_from_response(&resp); let text = match resp.text().await { Ok(x) => x, Err(_) => return, }; let links = match link.content_type { Some(x) => match (x.type_(), x.subtype()) { (mime::TEXT, mime::PLAIN) => links::get_links_from_text(&text, url.as_str()), _ => return, }, None => return, }; let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async { let _ = tx_ref.send(x).await; }) .await; } async fn get_page( url: &str, client: &reqwest::Client, ) -> Result<reqwest::Response, reqwest::Error> { let resp = client.get(url).send().await?; resp.error_for_status() }
pub async fn crawl_no_depth( origin_url: Link, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url.clone()); let mut first_crawl = true; while !to_crawl.is_empty() { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); if first_crawl { let tx_clone = tx_cralwer.clone(); let client_clone = client.clone(); let url = origin_url.url.clone(); tokio::spawn(async move { crawl_sitemaps(url, tx_clone, task_limit, client_clone).await; }); first_crawl = false; } to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } Ok(()) }
function_block-full_function
[ { "content": "/// Helper function to parse url in a page.\n\n/// Converts relative urls to full urls.\n\n/// Also removes javascript urls and other false urls.\n\npub fn normalize_url(url: &str, base_url: &str) -> Option<Link> {\n\n if url.starts_with(\"#\") {\n\n // Checks for internal links.\n\n ...
Rust
rust/src/storage/gcs/client.rs
Smurphy000/delta-rs
9196ff49bc147b36339b4901624229473092b024
use super::{util, GCSClientError, GCSObject}; use futures::Stream; use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; use std::sync::Arc; use tame_gcs::objects::{self, Object}; use tame_oauth::gcp as oauth; use log::debug; pub struct GCSStorageBackend { pub client: reqwest::Client, pub cred_path: PathBuf, pub auth: Arc<oauth::ServiceAccountAccess>, } impl std::fmt::Debug for GCSStorageBackend { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { f.debug_struct("GCSStorageBackend {...}").finish() } } impl TryFrom<PathBuf> for GCSStorageBackend { type Error = GCSClientError; fn try_from(cred_path: PathBuf) -> Result<Self, Self::Error> { let client = reqwest::Client::builder().build()?; let cred_contents = std::fs::read_to_string(&cred_path)?; let svc_account_info = oauth::ServiceAccountInfo::deserialize(cred_contents)?; let svc_account_access = oauth::ServiceAccountAccess::new(svc_account_info)?; Ok(Self { client, cred_path, auth: std::sync::Arc::new(svc_account_access), }) } } impl GCSStorageBackend { pub async fn metadata<'a>( &self, path: GCSObject<'a>, ) -> Result<objects::Metadata, GCSClientError> { debug!("creating request"); let get_meta_request = Object::get(&path, None)?; debug!("executing request"); let response = util::execute::<_, objects::GetObjectResponse>(self, get_meta_request).await?; debug!("returning meta"); Ok(response.metadata) } pub async fn download<'a>(&self, path: GCSObject<'a>) -> Result<bytes::Bytes, GCSClientError> { let download_request = Object::download(&path, None)?; let response = util::execute::<_, objects::DownloadObjectResponse>(self, download_request) .await .map_err(util::check_object_not_found)?; Ok(response.consume()) } pub fn list<'a>( &'a self, uri: GCSObject<'a>, ) -> impl Stream<Item = Result<objects::Metadata, GCSClientError>> + 'a { let mut page_token: Option<String> = None; async_stream::try_stream! { loop { let list_request_opts = Some(objects::ListOptional { prefix: Some(uri.path.as_ref()), page_token: page_token.as_deref(), standard_params: tame_gcs::common::StandardQueryParameters { fields: Some("items(name, updated"), ..Default::default() }, ..Default::default() }); let list_request = Object::list(&uri.bucket, list_request_opts)?; let list_response = util::execute::<_, objects::ListResponse>( self, list_request).await?; for object_meta in list_response.objects { yield object_meta } page_token = list_response.page_token; if page_token.is_none() { break; } } } } pub async fn insert<'a, 'b>( &self, uri: GCSObject<'a>, content: Vec<u8>, ) -> Result<(), GCSClientError> { let content_len = content.len().try_into().unwrap(); let content_body = std::io::Cursor::new(content); let insert_request = Object::insert_simple(&uri, content_body, content_len, None)?; let _response = util::execute::<_, objects::InsertResponse>(self, insert_request).await?; Ok(()) } pub async fn rename<'a>( &self, src: GCSObject<'a>, dst: GCSObject<'a>, ) -> Result<(), GCSClientError> { let mut rewrite_token = None; loop { let metadata = None; let precondition = Some(objects::RewriteObjectOptional { destination_conditionals: Some(tame_gcs::common::Conditionals { if_generation_match: Some(0), ..Default::default() }), ..Default::default() }); let rewrite_http_request = Object::rewrite(&src, &dst, rewrite_token, metadata, precondition)?; let response = util::execute::<_, objects::RewriteObjectResponse>(self, rewrite_http_request) .await .map_err(util::check_precondition_status)?; rewrite_token = response.rewrite_token; if rewrite_token.is_none() { break; } } self.delete(src).await } pub async fn delete<'a>(&self, uri: GCSObject<'a>) -> Result<(), GCSClientError> { let delete_request = Object::delete(&uri, None)?; let _response = util::execute::<_, objects::DeleteObjectResponse>(self, delete_request).await?; Ok(()) } }
use super::{util, GCSClientError, GCSObject}; use futures::Stream; use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; use std::sync::Arc; use tame_gcs::objects::{self, Object}; use tame_oauth::gcp as oauth; use log::debug; pub struct GCSStorageBackend { pub client: reqwest::Client, pub cred_path: PathBuf, pub auth: Arc<oauth::ServiceAccoun
..Default::default() }); let list_request = Object::list(&uri.bucket, list_request_opts)?; let list_response = util::execute::<_, objects::ListResponse>( self, list_request).await?; for object_meta in list_response.objects { yield object_meta } page_token = list_response.page_token; if page_token.is_none() { break; } } } } pub async fn insert<'a, 'b>( &self, uri: GCSObject<'a>, content: Vec<u8>, ) -> Result<(), GCSClientError> { let content_len = content.len().try_into().unwrap(); let content_body = std::io::Cursor::new(content); let insert_request = Object::insert_simple(&uri, content_body, content_len, None)?; let _response = util::execute::<_, objects::InsertResponse>(self, insert_request).await?; Ok(()) } pub async fn rename<'a>( &self, src: GCSObject<'a>, dst: GCSObject<'a>, ) -> Result<(), GCSClientError> { let mut rewrite_token = None; loop { let metadata = None; let precondition = Some(objects::RewriteObjectOptional { destination_conditionals: Some(tame_gcs::common::Conditionals { if_generation_match: Some(0), ..Default::default() }), ..Default::default() }); let rewrite_http_request = Object::rewrite(&src, &dst, rewrite_token, metadata, precondition)?; let response = util::execute::<_, objects::RewriteObjectResponse>(self, rewrite_http_request) .await .map_err(util::check_precondition_status)?; rewrite_token = response.rewrite_token; if rewrite_token.is_none() { break; } } self.delete(src).await } pub async fn delete<'a>(&self, uri: GCSObject<'a>) -> Result<(), GCSClientError> { let delete_request = Object::delete(&uri, None)?; let _response = util::execute::<_, objects::DeleteObjectResponse>(self, delete_request).await?; Ok(()) } }
tAccess>, } impl std::fmt::Debug for GCSStorageBackend { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { f.debug_struct("GCSStorageBackend {...}").finish() } } impl TryFrom<PathBuf> for GCSStorageBackend { type Error = GCSClientError; fn try_from(cred_path: PathBuf) -> Result<Self, Self::Error> { let client = reqwest::Client::builder().build()?; let cred_contents = std::fs::read_to_string(&cred_path)?; let svc_account_info = oauth::ServiceAccountInfo::deserialize(cred_contents)?; let svc_account_access = oauth::ServiceAccountAccess::new(svc_account_info)?; Ok(Self { client, cred_path, auth: std::sync::Arc::new(svc_account_access), }) } } impl GCSStorageBackend { pub async fn metadata<'a>( &self, path: GCSObject<'a>, ) -> Result<objects::Metadata, GCSClientError> { debug!("creating request"); let get_meta_request = Object::get(&path, None)?; debug!("executing request"); let response = util::execute::<_, objects::GetObjectResponse>(self, get_meta_request).await?; debug!("returning meta"); Ok(response.metadata) } pub async fn download<'a>(&self, path: GCSObject<'a>) -> Result<bytes::Bytes, GCSClientError> { let download_request = Object::download(&path, None)?; let response = util::execute::<_, objects::DownloadObjectResponse>(self, download_request) .await .map_err(util::check_object_not_found)?; Ok(response.consume()) } pub fn list<'a>( &'a self, uri: GCSObject<'a>, ) -> impl Stream<Item = Result<objects::Metadata, GCSClientError>> + 'a { let mut page_token: Option<String> = None; async_stream::try_stream! { loop { let list_request_opts = Some(objects::ListOptional { prefix: Some(uri.path.as_ref()), page_token: page_token.as_deref(), standard_params: tame_gcs::common::StandardQueryParameters { fields: Some("items(name, updated"), ..Default::default() },
random
[ { "content": "pub fn check_object_not_found(err: GCSClientError) -> GCSClientError {\n\n match err {\n\n GCSClientError::GCSError {\n\n source: tame_gcs::error::Error::HttpStatus(HttpStatusError(StatusCode::NOT_FOUND)),\n\n } => GCSClientError::NotFound,\n\n err => err,\n\n ...
Rust
src/query.rs
m42e/zsh-histdb-skim
385c039f4338963e4cada1ab9c3035e1d969a552
use crate::location::Location; use crate::environment::*; pub fn build_query_string(theloc: &Location, grouped: bool) -> String { let mut query = String::from("select history.id as id, commands.argv as cmd,"); if !grouped { query.push_str(" start_time") } else { query.push_str(" max(start_time)") } query.push_str(" as start, exit_status, duration,"); if !grouped { query.push_str(" 1") } else { query.push_str(" count()") } query.push_str(" as count, history.session as session, places.host as host, places.dir as dir"); query.push_str(" from history"); query.push_str(" left join commands on history.command_id = commands.id"); query.push_str(" left join places on history.place_id = places.id"); match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(" where"); } _ => {} }; match theloc { Location::Session => { query.push_str(&format!(" session == {} and", &get_current_session_id())); } Location::Directory => { query.push_str(&format!(" places.dir like '{}' and", &get_current_dir())); } _ => {} }; match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(&format!(" places.host == '{}'", &get_current_host())); } _ => {} }; if grouped { query.push_str(" group by history.command_id, history.place_id"); } query.push_str(" order by start desc"); return query; } #[cfg(test)] mod query { use super::*; use regex::Regex; #[test] fn has_select_fields() { for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(query.contains("history.id as id")); assert!(query.contains("exit_status")); assert!(query.contains("start")); assert!(query.contains("duration")); assert!(query.contains("count")); assert!(query.contains("history.session as session")); assert!(query.contains("places.dir")); } } #[test] fn contains_host() { let re_host = Regex::new(r"host == '.*'").unwrap(); for l in vec![Location::Session, Location::Directory, Location::Machine] { let query = build_query_string(&l, true); assert!(re_host.is_match(&query)); } let query = build_query_string(&Location::Everywhere, true); assert!(!re_host.is_match(&query)); } #[test] fn contains_grouping() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(re_group.is_match(&query)); } } #[test] fn contains_no_grouping_if_disabled() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); let re_only_group = Regex::new(r"group").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, false); assert!(!re_only_group.is_match(&query)); assert!(!re_group.is_match(&query)); } } #[test] fn for_session() { let query = build_query_string(&Location::Session, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_session.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_directory() { let query = build_query_string(&Location::Directory, false); let re_directory = Regex::new(r"places.dir like '.*' and").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_directory.is_match(&query)); assert!(!re_group.is_match(&query)); } #[test] fn for_machine() { let query = build_query_string(&Location::Machine, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_everywhere() { let query = build_query_string(&Location::Everywhere, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(!re_host.is_match(&query)); assert!(re_group.is_match(&query)); } }
use crate::location::Location; use crate::environment::*; pub fn build_query_string(theloc: &Location, grouped: bool) -> String { let mut query = String::from("select history.id as id, commands.argv as cmd,"); if !grouped { query.push_str(" start_time") } else { query.push_str(" max(start_time)") } query.push_str(" as start, exit_status, duration,"); if !grouped { query.push_str(" 1") } else { query.push_str(" count()") } query.push_str(" as count, history.session as session, places.host as host, places.dir as dir"); query.push_str(" from history"); query.push_str(" left join commands on history.command_id = commands.id"); query.push_str(" left join places on history.place_id = places.id"); match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(" where"); } _ => {} }; match theloc { Location::Session => { query.push_str(&format!(" session == {} and", &get_current_session_id())); } Location::Directory => { query.push_str(&format!(" places.dir like '{}' and", &get_current_dir())); } _ => {} }; match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(&format!(" places.host == '{}'", &get_current_host())); } _ => {} }; if grouped { query.push_str(" group by history.command_id, history.place_id"); } query.push_str(" order by start desc"); return query; } #[cfg(test)] mod query { use super::*; use regex::Regex; #[test] fn has_select_fields() { for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(query.contains("history.id as id")); assert!(query.contains("exit_status")); assert!(query.contains("start")); assert!(query.contains("duration")); assert!(query.contains("count")); assert!(query.contains("history.session as session")); assert!(query.contains("places.dir")); } } #[test]
#[test] fn contains_grouping() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(re_group.is_match(&query)); } } #[test] fn contains_no_grouping_if_disabled() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); let re_only_group = Regex::new(r"group").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, false); assert!(!re_only_group.is_match(&query)); assert!(!re_group.is_match(&query)); } } #[test] fn for_session() { let query = build_query_string(&Location::Session, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_session.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_directory() { let query = build_query_string(&Location::Directory, false); let re_directory = Regex::new(r"places.dir like '.*' and").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_directory.is_match(&query)); assert!(!re_group.is_match(&query)); } #[test] fn for_machine() { let query = build_query_string(&Location::Machine, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_everywhere() { let query = build_query_string(&Location::Everywhere, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(!re_host.is_match(&query)); assert!(re_group.is_match(&query)); } }
fn contains_host() { let re_host = Regex::new(r"host == '.*'").unwrap(); for l in vec![Location::Session, Location::Directory, Location::Machine] { let query = build_query_string(&l, true); assert!(re_host.is_match(&query)); } let query = build_query_string(&Location::Everywhere, true); assert!(!re_host.is_match(&query)); }
function_block-full_function
[ { "content": "/// Get the histdb session from the environment\n\npub fn get_current_session_id() -> String {\n\n let key = \"HISTDB_SESSION\";\n\n let session_id = env::var(key).unwrap_or(String::from(\"\"));\n\n return session_id.to_string();\n\n}\n\n\n", "file_path": "src/environment.rs", "ra...
Rust
src/routes/mod.rs
koto-bank/zeph
bfb154678e2881a9584e4ea19d90c4dc29e9455b
use {DB,CONFIG}; use iron::prelude::*; use iron::status; use iron::mime::{Mime, TopLevel, SubLevel, Attr, Value}; use urlencoded::UrlEncodedQuery; use serde_json::to_value; pub mod image; pub mod user; pub mod admin; pub use image::*; pub use user::*; pub use admin::*; pub fn index_n_search(_req: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph" script src="/assets/js/main.js" {} div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } div#tags {} a href="/about" style="opacity: 0.5;" "About Zeph & Help" } div#images {} button#upload-button onclick="showUploadOrLogin()" "Login" div#login-or-upload-form / } }; Ok(Response::with((status::Ok, page))) } pub fn more(req: &mut Request) -> IronResult<Response> { let mut response = Response::new(); let q = match req.get_ref::<UrlEncodedQuery>() { Ok(hashmap) => hashmap, Err(_) => return Ok(Response::with((status::BadRequest, "No parameters"))) }; let offset = query!(q,"offset").unwrap_or(&"0".to_string()).parse::<usize>().unwrap(); let images = match query!(q,"q") { Some(x) => DB.lock().unwrap().by_tags(25, offset, &x.to_lowercase().split_whitespace().map(String::from).collect::<Vec<_>>()).unwrap(), None => DB.lock().unwrap().get_images(25, offset).unwrap() }; response .set_mut(Mime(TopLevel::Application, SubLevel::Json, vec![(Attr::Charset, Value::Utf8)])) .set_mut(to_value(&images).to_string()) .set_mut(status::Ok); Ok(response) } pub fn about(_: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph - About" div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" / h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } } } div style="margin-left: 15%;" { {"Zeph is an open-source booru/imageboard written in " a href="https://www.rust-lang.org/" "Rust" } br / { "You can get source code to build Zeph yourself from " a href="https://github.com/koto-bank/zeph" "Github" } br / @if let Some(addr) = CONFIG.get("contact-email") { { "Contact e-mail adress: " a href={"mailto:" ( addr.as_str().unwrap()) } ( addr.as_str().unwrap() ) } } br h3 "Search options" table style="width: 50%;" { tr { th "Example" th "Meaning" } tr { td code "1girl" td "Search for a girl on her own" } tr { td code "1girl -fur" td "Search for a non-fluffy girl (exclude 'fur' tag)" } tr { td code "rating:s,q" td "Search for a safe and questionable images" } tr { td { code "*girls" "or" code "2girl*" } td "Search for anything that ends with 'girls' (or starts with '2girl')" } tr { td code "from:konachan" td "Search for images synchronized from konachan (full list in source code & easily extendable)" } tr { td code "uploader:random_dude" td "Images uploaded by random_dude, note that 'sync' are synchronized images" } tr { td code "sort:asc:score" td "Sort images by score from worst to best (ascending); desc is for descening" } tr { td code "1girl | 2girls" td "Search for images of girl on her own OR 2 girls" } tr { td code "1girl format:jpg,gif" td "Search for GIF and JPEG images" } } } }; Ok(Response::with((status::Ok, page))) }
use {DB,CONFIG}; use iron::prelude::*; use iron::status; use iron::mime::{Mime, TopLevel, SubLevel, Attr, Value}; use urlencoded::UrlEncodedQuery; use serde_json::to_value; pub mod image; pub mod user; pub mod admin; pub use image::*; pub use user::*; pub use admin::*; pub fn index_n_search(_req: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph" script src="/assets/js/main.js" {} div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } div#tags {} a href="/about" style="opacity: 0.5;" "About Zeph & Help" } div#images {} button#upload-button onclick="showUploadOrLogin()" "Login" div#login-or-upload-form / } }; Ok(Response::with((status::Ok, page))) } pub fn more(req: &mut Request) -> IronResult<Response> { let mut response = Response::new(); let q = match req.get_ref::<UrlEncodedQuery>() { Ok(hashmap) => hashmap, Err(_) => return Ok(Response::with((status::BadRequest, "No parameters"))) }; let offset = query!(q,"offset").unwrap_or(&"0".to_string()).parse::<usize>().unwrap(); let images = match query!(q,"q") { Some(x) => DB.lock().unwrap().by_tags(25, offset, &x.to_lowercase().split_whitespace().map(String::from).collect::<Vec<_>>()).unwrap(), None => DB.lock().unwrap().get_images(25, offset).unwrap() }; response .set_mut(Mime(TopLevel::Application, SubLevel::Json, vec![(Attr::Charset, Value::Utf8)])) .set_mut(to_value(&images).to_string()) .set_mut(status::Ok); Ok(response) } pub fn about(_: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph - About" div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" / h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } } } div style="margin-left: 15%;" { {"Zeph is an open-source booru/imageboard written in " a href="https://www.rust-lang.org/" "Rust" } br / { "You can get source code to build Zeph yourself from " a href="https://github.com/koto-bank/zeph" "Github" } br / @if let Some(addr) = CONFIG.get("contact-email") { { "Contact e-mail adress: " a href={"mailto:" ( addr.as_str().unwrap()) } ( addr.as_str().unwrap() ) } } br h3 "Search options" table style="width: 50%;" { tr { th "Example" th "Meaning" } tr { td code "1girl" td "Search for a girl on her own" } tr { td code "1girl -fur" td "Search for a non-fluffy girl (exclude 'fur' tag)" } tr { td code "rating:s,q" td "Search for a safe and questionable images" } tr { td { code "*girls" "or" code "2girl*" } td "Search for anything that ends with 'girls' (or starts with '2girl')" } tr { td code "from:konachan" td "Search for images synchronized from konachan (full list in source code & easily extendable)" } tr { td code "uploader:random_dude" td "Images uploaded by random_dude, note that 'sync' are synchronized images" } tr { td code "sort:asc:score" td "Sort images by score from worst to best (ascending); desc is for descening" } tr { td code "1girl | 2girls" td "Search for images of girl on her own OR 2 girls" }
tr { td code "1girl format:jpg,gif" td "Search for GIF and JPEG images" } } } }; Ok(Response::with((status::Ok, page))) }
function_block-function_prefix_line
[ { "content": "pub fn login(req: &mut Request) -> IronResult<Response> {\n\n let mut response = Response::new();\n\n\n\n let body = match req.get::<UrlEncodedBody>() {\n\n Ok(data) => data,\n\n Err(_) => return Ok(Response::with(status::BadRequest))\n\n };\n\n\n\n if let (Some(login), ...
Rust
src/opus/imported_encode.rs
binast/range-encoding-rs
7dd51d3522a936b50b0ee02a351af5783d65e1e5
#![allow(dead_code, mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut)] use std; use std::io::Write; pub type int32_t = std::os::raw::c_int; pub type uint32_t = std::os::raw::c_uint; pub type __uint16_t = std::os::raw::c_ushort; pub type __uint32_t = std::os::raw::c_uint; pub type __int64_t = std::os::raw::c_longlong; pub type __uint64_t = std::os::raw::c_ulonglong; pub type __darwin_size_t = std::os::raw::c_ulong; pub type __darwin_off_t = __int64_t; pub type opus_int32 = int32_t; pub type opus_uint32 = uint32_t; pub type size_t = __darwin_size_t; pub struct ec_enc<W> where W: std::io::Write, { pub out: W, pub end_window: ec_window, pub nend_bits: std::os::raw::c_int, pub nbits_total: std::os::raw::c_int, pub offs: opus_uint32, pub rng: opus_uint32, pub val: opus_uint32, pub ext: opus_uint32, pub rem: std::os::raw::c_int, pub end_buffer: Vec<u8>, } #[derive(Copy, Clone)] #[repr(C)] pub union unnamed { __f: std::os::raw::c_double, __u: std::os::raw::c_ulonglong, } #[derive(Copy, Clone)] #[repr(C)] pub struct unnamed_0 { pub __m: std::os::raw::c_ulonglong, pub __sexp: std::os::raw::c_ushort, } pub type ec_window = opus_uint32; pub type fpos_t = __darwin_off_t; fn celt_udiv(mut n: opus_uint32, mut d: opus_uint32) -> opus_uint32 { return n.wrapping_div(d); } pub unsafe fn ec_encode<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _ft: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = celt_udiv((*_this).rng, _ft); if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } unsafe fn ec_enc_normalize<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { while (*_this).rng <= 1u32 << 32i32 - 1i32 >> 8i32 { ec_enc_carry_out( _this, ((*_this).val >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int, )?; (*_this).val = (*_this).val << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); (*_this).rng <<= 8i32; (*_this).nbits_total += 8i32 } Ok(()) } unsafe fn ec_enc_carry_out<W: Write>( mut _this: *mut ec_enc<W>, mut _c: std::os::raw::c_int, ) -> Result<(), std::io::Error> { if _c as std::os::raw::c_uint != (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint) { let mut carry = _c >> 8i32; if (*_this).rem >= 0i32 { ec_write_byte(_this, ((*_this).rem + carry) as std::os::raw::c_uint)?; } if (*_this).ext > 0i32 as std::os::raw::c_uint { let mut sym = (1u32 << 8i32) .wrapping_sub(1i32 as std::os::raw::c_uint) .wrapping_add(carry as std::os::raw::c_uint) & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint); loop { ec_write_byte(_this, sym)?; (*_this).ext = (*_this).ext.wrapping_sub(1); if !((*_this).ext > 0i32 as std::os::raw::c_uint) { break; } } } (*_this).rem = (_c as std::os::raw::c_uint & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint)) as std::os::raw::c_int } else { (*_this).ext = (*_this).ext.wrapping_add(1) }; Ok(()) } unsafe fn ec_write_byte<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).out.write_all(&[_value as u8])?; Ok(()) } pub unsafe fn ec_encode_bin<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _bits; if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_bit_logp<W: Write>( mut _this: *mut ec_enc<W>, mut _val: std::os::raw::c_int, mut _logp: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng; let mut l = (*_this).val; let mut s = r >> _logp; r = (r as std::os::raw::c_uint).wrapping_sub(s) as opus_uint32 as opus_uint32; if 0 != _val { (*_this).val = l.wrapping_add(r) } (*_this).rng = if 0 != _val { s } else { r }; ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_icdf<W: Write>( mut _this: *mut ec_enc<W>, mut _s: std::os::raw::c_int, mut _icdf: *const std::os::raw::c_uchar, mut _ftb: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _ftb; if _s > 0i32 { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add((*_this).rng.wrapping_sub( r.wrapping_mul(*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_uint), )) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul( (*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_int - *_icdf.offset(_s as isize) as std::os::raw::c_int) as std::os::raw::c_uint, ) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(*_icdf.offset(_s as isize) as std::os::raw::c_uint)) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_uint<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _ft: opus_uint32, ) -> Result<(), std::io::Error> { assert!(_ft > 1); _ft = _ft.wrapping_sub(1); let mut ftb = ::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - _ft.leading_zeros() as i32; if ftb > 8i32 { ftb -= 8i32; let ft = (_ft >> ftb).wrapping_add(1i32 as std::os::raw::c_uint); let fl = _fl >> ftb; ec_encode(_this, fl, fl.wrapping_add(1i32 as std::os::raw::c_uint), ft)?; ec_enc_bits( _this, _fl & ((1i32 as opus_uint32) << ftb).wrapping_sub(1u32), ftb as std::os::raw::c_uint, )?; } else { ec_encode( _this, _fl, _fl.wrapping_add(1i32 as std::os::raw::c_uint), _ft.wrapping_add(1i32 as std::os::raw::c_uint), )?; } return Ok(()); } pub unsafe fn ec_enc_bits<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; assert!(_bits > 0); if (used as std::os::raw::c_uint).wrapping_add(_bits) > (::std::mem::size_of::<ec_window>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32) as std::os::raw::c_uint { loop { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; if !(used >= 8i32) { break; } } } window |= _fl << used; used = (used as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; (*_this).end_window = window; (*_this).nend_bits = used; (*_this).nbits_total = ((*_this).nbits_total as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; return Ok(()); } unsafe fn ec_write_byte_at_end<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).end_buffer.push(_value as u8); Ok(()) } pub unsafe fn ec_enc_done<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { let mut l = 32i32 - (::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - (*_this).rng.leading_zeros() as i32); let mut msk = (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint) >> l; let mut end = (*_this).val.wrapping_add(msk) & !msk; if end | msk >= (*_this).val.wrapping_add((*_this).rng) { l += 1; msk >>= 1i32; end = (*_this).val.wrapping_add(msk) & !msk; }; while l > 0i32 { ec_enc_carry_out(_this, (end >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int)?; end = end << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); l -= 8i32; } if (*_this).rem >= 0i32 || (*_this).ext > 0i32 as std::os::raw::c_uint { ec_enc_carry_out(_this, 0i32)?; }; let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; while used >= 8i32 { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; } if used > 0i32 { *(*_this).end_buffer.last_mut().unwrap() |= window as u8; }; for byte in (*_this).end_buffer.iter().rev() { ec_write_byte(_this, *byte as u32)?; } Ok(()) }
#![allow(dead_code, mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut)] use std; use std::io::Write; pub type int32_t = std::os::raw::c_int; pub type uint32_t = std::os::raw::c_uint; pub type __uint16_t = std::os::raw::c_ushort; pub type __uint32_t = std::os::raw::c_uint; pub type __int64_t = std::os::raw::c_longlong; pub type __uint64_t = std::os::raw::c_ulonglong; pub type __darwin_size_t = std::os::raw::c_ulong; pub type __darwin_off_t = __int64_t; pub type opus_int32 = int32_t; pub type opus_uint32 = uint32_t; pub type size_t = __darwin_size_t; pub struct ec_enc<W> where W: std::io::Write, { pub out: W, pub end_window: ec_window, pub nend_bits: std::os::raw::c_int, pub nbits_total: std::os::raw::c_int, pub offs: opus_uint32, pub rng: opus_uint32, pub val: opus_uint32, pub ext: opus_uint32, pub rem: std::os::raw::c_int, pub end_buffer: Vec<u8>, } #[derive(Copy, Clone)] #[repr(C)] pub union unnamed { __f: std::os::raw::c_double, __u: std::os::raw::c_ulonglong, } #[derive(Copy, Clone)] #[repr(C)] pub struct unnamed_0 { pub __m: std::os::raw::c_ulonglong, pub __sexp: std::os::raw::c_ushort, } pub type ec_window = opus_uint32; pub type fpos_t = __darwin_off_t; fn celt_udiv(mut n: opus_uint32, mut d: opus_uint32) -> opus_uint32 { return n.wrapping_div(d); } pub unsafe fn ec_encode<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _ft: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = celt_udiv((*_this).rng, _ft); if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } unsafe fn ec_enc_normalize<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { while (*_this).rng <= 1u32 << 32i32 - 1i32 >> 8i32 { ec_enc_carry_out( _this, ((*_this).val >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int, )?; (*_this).val = (*_this).val << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); (*_this).rng <<= 8i32; (*_this).nbits_total += 8i32 } Ok(()) } unsafe fn ec_enc_carry_out<W: Write>( mut _this: *mut ec_enc<W>, mut _c: std::os::raw::c_int, ) -> Result<(), std::io::Error> { if _c as std::os::raw::c_uint != (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint) { let mut carry = _c >> 8i32; if (*_this).rem >= 0i32 { ec_write_byte(_this, ((*_this).rem + carry) as std::os::raw::c_uint)?; } if (*_this).ext > 0i32 as std::os::raw::c_uint { let mut sym = (1u32 << 8i32)
this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).out.write_all(&[_value as u8])?; Ok(()) } pub unsafe fn ec_encode_bin<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _bits; if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_bit_logp<W: Write>( mut _this: *mut ec_enc<W>, mut _val: std::os::raw::c_int, mut _logp: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng; let mut l = (*_this).val; let mut s = r >> _logp; r = (r as std::os::raw::c_uint).wrapping_sub(s) as opus_uint32 as opus_uint32; if 0 != _val { (*_this).val = l.wrapping_add(r) } (*_this).rng = if 0 != _val { s } else { r }; ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_icdf<W: Write>( mut _this: *mut ec_enc<W>, mut _s: std::os::raw::c_int, mut _icdf: *const std::os::raw::c_uchar, mut _ftb: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _ftb; if _s > 0i32 { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add((*_this).rng.wrapping_sub( r.wrapping_mul(*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_uint), )) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul( (*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_int - *_icdf.offset(_s as isize) as std::os::raw::c_int) as std::os::raw::c_uint, ) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(*_icdf.offset(_s as isize) as std::os::raw::c_uint)) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_uint<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _ft: opus_uint32, ) -> Result<(), std::io::Error> { assert!(_ft > 1); _ft = _ft.wrapping_sub(1); let mut ftb = ::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - _ft.leading_zeros() as i32; if ftb > 8i32 { ftb -= 8i32; let ft = (_ft >> ftb).wrapping_add(1i32 as std::os::raw::c_uint); let fl = _fl >> ftb; ec_encode(_this, fl, fl.wrapping_add(1i32 as std::os::raw::c_uint), ft)?; ec_enc_bits( _this, _fl & ((1i32 as opus_uint32) << ftb).wrapping_sub(1u32), ftb as std::os::raw::c_uint, )?; } else { ec_encode( _this, _fl, _fl.wrapping_add(1i32 as std::os::raw::c_uint), _ft.wrapping_add(1i32 as std::os::raw::c_uint), )?; } return Ok(()); } pub unsafe fn ec_enc_bits<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; assert!(_bits > 0); if (used as std::os::raw::c_uint).wrapping_add(_bits) > (::std::mem::size_of::<ec_window>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32) as std::os::raw::c_uint { loop { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; if !(used >= 8i32) { break; } } } window |= _fl << used; used = (used as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; (*_this).end_window = window; (*_this).nend_bits = used; (*_this).nbits_total = ((*_this).nbits_total as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; return Ok(()); } unsafe fn ec_write_byte_at_end<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).end_buffer.push(_value as u8); Ok(()) } pub unsafe fn ec_enc_done<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { let mut l = 32i32 - (::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - (*_this).rng.leading_zeros() as i32); let mut msk = (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint) >> l; let mut end = (*_this).val.wrapping_add(msk) & !msk; if end | msk >= (*_this).val.wrapping_add((*_this).rng) { l += 1; msk >>= 1i32; end = (*_this).val.wrapping_add(msk) & !msk; }; while l > 0i32 { ec_enc_carry_out(_this, (end >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int)?; end = end << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); l -= 8i32; } if (*_this).rem >= 0i32 || (*_this).ext > 0i32 as std::os::raw::c_uint { ec_enc_carry_out(_this, 0i32)?; }; let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; while used >= 8i32 { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; } if used > 0i32 { *(*_this).end_buffer.last_mut().unwrap() |= window as u8; }; for byte in (*_this).end_buffer.iter().rev() { ec_write_byte(_this, *byte as u32)?; } Ok(()) }
.wrapping_sub(1i32 as std::os::raw::c_uint) .wrapping_add(carry as std::os::raw::c_uint) & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint); loop { ec_write_byte(_this, sym)?; (*_this).ext = (*_this).ext.wrapping_sub(1); if !((*_this).ext > 0i32 as std::os::raw::c_uint) { break; } } } (*_this).rem = (_c as std::os::raw::c_uint & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint)) as std::os::raw::c_int } else { (*_this).ext = (*_this).ext.wrapping_add(1) }; Ok(()) } unsafe fn ec_write_byte<W: Write>( mut _
random
[ { "content": "#[test]\n\nfn widths() {\n\n let widths = [1, 30, 5, 20];\n\n let probabilities = CumulativeDistributionFrequency::new(widths.iter().cloned().collect());\n\n\n\n let widths2: Vec<_> = probabilities.widths().collect();\n\n assert_eq!(widths2, widths)\n\n}\n\n\n", "file_path": "tests...
Rust
src/database/tcn_dao.rs
Co-Epi/app-backend-rust
b332afe7e92caf2703334c00dabf72fb4f279869
use crate::{ byte_vec_to_16_byte_array, errors::{ServicesError}, expect_log, reports_interval, tcn_recording::observed_tcn_processor::ObservedTcn, }; use log::*; use reports_interval::UnixTime; use rusqlite::{params, Row, NO_PARAMS, types::Value}; use std::{ sync::Arc, rc::Rc, }; use tcn::TemporaryContactNumber; use super::database::Database; pub trait TcnDao: Send + Sync { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError>; fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError>; fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError>; } pub struct TcnDaoImpl { db: Arc<Database>, } impl TcnDaoImpl { fn create_table_if_not_exists(db: &Arc<Database>) { let res = db.execute_sql( "create table if not exists tcn( tcn text not null, contact_start integer not null, contact_end integer not null, min_distance real not null, avg_distance real not null, total_count integer not null )", params![], ); expect_log!(res, "Couldn't create tcn table"); } fn to_tcn(row: &Row) -> ObservedTcn { let tcn: Result<String, _> = row.get(0); let tcn_value = expect_log!(tcn, "Invalid row: no TCN"); let tcn = Self::db_tcn_str_to_tcn(tcn_value); let contact_start_res = row.get(1); let contact_start: i64 = expect_log!(contact_start_res, "Invalid row: no contact start"); let contact_end_res = row.get(2); let contact_end: i64 = expect_log!(contact_end_res, "Invalid row: no contact end"); let min_distance_res = row.get(3); let min_distance: f64 = expect_log!(min_distance_res, "Invalid row: no min distance"); let avg_distance_res = row.get(4); let avg_distance: f64 = expect_log!(avg_distance_res, "Invalid row: no avg distance"); let total_count_res = row.get(5); let total_count: i64 = expect_log!(total_count_res, "Invalid row: no total count"); ObservedTcn { tcn, contact_start: UnixTime { value: contact_start as u64, }, contact_end: UnixTime { value: contact_end as u64, }, min_distance: min_distance as f32, avg_distance: avg_distance as f32, total_count: total_count as usize, } } fn db_tcn_str_to_tcn(str: String) -> TemporaryContactNumber { let tcn_value_bytes_vec_res = hex::decode(str); let tcn_value_bytes_vec = expect_log!(tcn_value_bytes_vec_res, "Invalid stored TCN format"); let tcn_value_bytes = byte_vec_to_16_byte_array(tcn_value_bytes_vec); TemporaryContactNumber(tcn_value_bytes) } pub fn new(db: Arc<Database>) -> TcnDaoImpl { Self::create_table_if_not_exists(&db); TcnDaoImpl { db } } } impl TcnDao for TcnDaoImpl { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError> { self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn", NO_PARAMS, |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError> { let tcn_strs: Vec<Value> = with.into_iter().map(|tcn| Value::Text(hex::encode(tcn.0)) ) .collect(); self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)], |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError> { debug!("Overwriting db exposures with same TCNs, with: {:?}", observed_tcns); let tcn_strs: Vec<Value> = observed_tcns.clone().into_iter().map(|tcn| Value::Text(hex::encode(tcn.tcn.0)) ) .collect(); self.db.transaction(|t| { let delete_res = t.execute("delete from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)]); if delete_res.is_err() { return Err(ServicesError::General("Delete TCNs failed".to_owned())) } for tcn in observed_tcns { let tcn_str = hex::encode(tcn.tcn.0); let insert_res = t.execute("insert into tcn(tcn, contact_start, contact_end, min_distance, avg_distance, total_count) values(?1, ?2, ?3, ?4, ?5, ?6)", params![ tcn_str, tcn.contact_start.value as i64, tcn.contact_end.value as i64, tcn.min_distance as f64, tcn.avg_distance as f64, tcn.total_count as i64 ]); if insert_res.is_err() { return Err(ServicesError::General("Insert TCN failed".to_owned())) } } Ok(()) }) } } #[cfg(test)] mod tests { use super::*; use rusqlite::Connection; use crate::{tcn_recording::tcn_batches_manager::TcnBatchesManager, reports_update::exposure::ExposureGrouper}; #[test] fn saves_and_loads_observed_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![observed_tcn.clone()]); assert!(save_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 1); assert_eq!(loaded_tcns[0], observed_tcn); } #[test] fn saves_and_loads_multiple_tcns() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn_1 = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_2 = ObservedTcn { tcn: TemporaryContactNumber([ 43, 229, 125, 245, 98, 86, 100, 1, 172, 25, 0, 150, 123, 66, 34, 12, ]), contact_start: UnixTime { value: 1590518190 }, contact_end: UnixTime { value: 1590518191 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_3 = ObservedTcn { tcn: TemporaryContactNumber([ 11, 246, 125, 123, 102, 86, 100, 1, 34, 25, 21, 150, 99, 66, 34, 0, ]), contact_start: UnixTime { value: 2230522104 }, contact_end: UnixTime { value: 2230522105 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res_1 = tcn_dao.overwrite(vec![observed_tcn_1.clone()]); let save_res_2 = tcn_dao.overwrite(vec![observed_tcn_2.clone()]); let save_res_3 = tcn_dao.overwrite(vec![observed_tcn_3.clone()]); assert!(save_res_1.is_ok()); assert!(save_res_2.is_ok()); assert!(save_res_3.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 3); assert_eq!(loaded_tcns[0], observed_tcn_1); assert_eq!(loaded_tcns[1], observed_tcn_2); assert_eq!(loaded_tcns[2], observed_tcn_3); } #[test] fn test_finds_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 6000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([1; 16]), contact_start: UnixTime { value: 2000 }, contact_end: UnixTime { value: 3000 }, min_distance: 1.8, avg_distance: 1.8, total_count: 1, }; let stored_tcn3 = ObservedTcn { tcn: TemporaryContactNumber([2; 16]), contact_start: UnixTime { value: 1600 }, contact_end: UnixTime { value: 2600 }, min_distance: 2.3, avg_distance: 2.3, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![ stored_tcn1.clone(), stored_tcn2.clone(), stored_tcn3.clone(), ]); assert!(save_res.is_ok()); let res = tcn_dao.find_tcns(vec![ TemporaryContactNumber([0; 16]), TemporaryContactNumber([2; 16]), ]); assert!(res.is_ok()); let mut tcns = res.unwrap(); tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!(2, tcns.len()); assert_eq!(stored_tcn1, tcns[0]); assert_eq!(stored_tcn3, tcns[1]); } #[test] fn test_multiple_exposures_updated_correctly() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let batches_manager = TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 }); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 7000 }, min_distance: 2.0, avg_distance: 2.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![stored_tcn1.clone(), stored_tcn2.clone()]); assert!(save_res.is_ok()); let tcn = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 7500 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.0, total_count: 1, }; batches_manager.push(tcn.clone()); let flush_res = batches_manager.flush(); assert!(flush_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let mut loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(2, loaded_tcns.len()); loaded_tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!( loaded_tcns[0], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1 } ); assert_eq!( loaded_tcns[1], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.5, total_count: 2 } ); } }
use crate::{ byte_vec_to_16_byte_array, errors::{ServicesError}, expect_log, reports_interval, tcn_recording::observed_tcn_processor::ObservedTcn, }; use log::*; use reports_interval::UnixTime; use rusqlite::{params, Row, NO_PARAMS, types::Value}; use std::{ sync::Arc, rc::Rc, }; use tcn::TemporaryContactNumber; use super::database::Database; pub trait TcnDao: Send + Sync { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError>; fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError>; fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError>; } pub struct TcnDaoImpl { db: Arc<Database>, } impl TcnDaoImpl { fn create_table_if_not_exists(db: &Arc<Database>) { let res = db.execute_sql( "create table if not exists tcn( tcn text not null, contact_start integer not null, contact_end integer not null, min_distance real not null, avg_distance real not null, total_count integer not null )", params![], ); expect_log!(res, "Couldn't create tcn table"); } fn to_tcn(row: &Row) -> ObservedTcn { let tcn: Result<String, _> = row.get(0); let tcn_value = expect_log!(tcn, "Invalid row: no TCN"); let tcn = Self::db_tcn_str_to_tcn(tcn_value); let contact_start_res = row.get(1); let contact_start: i64 = expect_log!(contact_start_res, "Invalid row: no contact start"); let contact_end_res = row.get(2); let contact_end: i64 = expect_log!(contact_end_res, "Invalid row: no contact end"); let min_distance_res = row.get(3); let min_distance: f64 = expect_log!(min_distance_res, "Invalid row: no min distance"); let avg_distance_res = row.get(4); let avg_distance: f64 = expect_log!(avg_distance_res, "Invalid row: no avg distance"); let total_count_res = row.get(5); let total_count: i64 = expect_log!(total_count_res, "Invalid row: no total count"); ObservedTcn { tcn, contact_start: UnixTime { value: contact_start as u64, }, contact_end: UnixTime { value: contact_end as u64, }, min_distance: min_distance as f32, avg_distance: avg_distance as f32, total_count: total_count as usize, } } fn db_tcn_str_to_tcn(str: String) -> TemporaryContactNumber { let tcn_value_bytes_vec_res = hex::decode(str); let tcn_value_bytes_vec = expect_log!(tcn_value_bytes_vec_res, "Invalid stored TCN format"); let tcn_value_bytes = byte_vec_to_16_byte_array(tcn_value_bytes_vec); TemporaryContactNumber(tcn_value_bytes) } pub fn new(db: Arc<Database>) -> TcnDaoImpl { Self::create_table_if_not_exists(&db); TcnDaoImpl { db } } } impl TcnDao for TcnDaoImpl { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError> { self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn", NO_PARAMS, |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError> { let tcn_strs: Vec<Value> = with.into_iter().map(|tcn| Value::Text(hex::encode(tcn.0)) ) .collect(); self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)], |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError> { debug!("Overwriting db exposures with same TCNs, with: {:?}", observed_tcns); let tcn_strs: Vec<Value> = observed_tcns.clone().into_iter().map(|tcn| Value::Text(hex::encode(tcn.tcn.0)) ) .collect(); self.db.transaction(|t| { let delete_res = t.execute("delete from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)]); if delete_res.is_err() { return Err(ServicesError::General("Delete TCNs failed".to_owned())) } for tcn in observed_tcns { let tcn_str = hex::encode(tcn.tcn.0); let insert_res = t.execute("insert into tcn(tcn, contact_start, contact_end, min_distance, avg_distance, total_count) values(?1, ?2, ?3, ?4, ?5, ?6)", params![ tcn_str, tcn.contact_start.value as i64, tcn.contact_end.value as i64, tcn.min_distance as f64, tcn.avg_distance as f64, tcn.total_count as i64 ]); if insert_res.is_err() { return Err(ServicesError::General("Insert TCN failed".to_owned())) } } Ok(()) }) } } #[cfg(test)] mod tests { use super::*; use rusqlite::Connection; use crate::{tcn_recording::tcn_batches_manager::TcnBatchesManager, reports_update::exposure::ExposureGrouper}; #[test] fn saves_and_loads_observed_tcn() { let database = Arc::new(Databas
stance: 2.0, avg_distance: 2.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![stored_tcn1.clone(), stored_tcn2.clone()]); assert!(save_res.is_ok()); let tcn = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 7500 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.0, total_count: 1, }; batches_manager.push(tcn.clone()); let flush_res = batches_manager.flush(); assert!(flush_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let mut loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(2, loaded_tcns.len()); loaded_tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!( loaded_tcns[0], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1 } ); assert_eq!( loaded_tcns[1], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.5, total_count: 2 } ); } }
e::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![observed_tcn.clone()]); assert!(save_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 1); assert_eq!(loaded_tcns[0], observed_tcn); } #[test] fn saves_and_loads_multiple_tcns() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn_1 = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_2 = ObservedTcn { tcn: TemporaryContactNumber([ 43, 229, 125, 245, 98, 86, 100, 1, 172, 25, 0, 150, 123, 66, 34, 12, ]), contact_start: UnixTime { value: 1590518190 }, contact_end: UnixTime { value: 1590518191 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_3 = ObservedTcn { tcn: TemporaryContactNumber([ 11, 246, 125, 123, 102, 86, 100, 1, 34, 25, 21, 150, 99, 66, 34, 0, ]), contact_start: UnixTime { value: 2230522104 }, contact_end: UnixTime { value: 2230522105 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res_1 = tcn_dao.overwrite(vec![observed_tcn_1.clone()]); let save_res_2 = tcn_dao.overwrite(vec![observed_tcn_2.clone()]); let save_res_3 = tcn_dao.overwrite(vec![observed_tcn_3.clone()]); assert!(save_res_1.is_ok()); assert!(save_res_2.is_ok()); assert!(save_res_3.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 3); assert_eq!(loaded_tcns[0], observed_tcn_1); assert_eq!(loaded_tcns[1], observed_tcn_2); assert_eq!(loaded_tcns[2], observed_tcn_3); } #[test] fn test_finds_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 6000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([1; 16]), contact_start: UnixTime { value: 2000 }, contact_end: UnixTime { value: 3000 }, min_distance: 1.8, avg_distance: 1.8, total_count: 1, }; let stored_tcn3 = ObservedTcn { tcn: TemporaryContactNumber([2; 16]), contact_start: UnixTime { value: 1600 }, contact_end: UnixTime { value: 2600 }, min_distance: 2.3, avg_distance: 2.3, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![ stored_tcn1.clone(), stored_tcn2.clone(), stored_tcn3.clone(), ]); assert!(save_res.is_ok()); let res = tcn_dao.find_tcns(vec![ TemporaryContactNumber([0; 16]), TemporaryContactNumber([2; 16]), ]); assert!(res.is_ok()); let mut tcns = res.unwrap(); tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!(2, tcns.len()); assert_eq!(stored_tcn1, tcns[0]); assert_eq!(stored_tcn3, tcns[1]); } #[test] fn test_multiple_exposures_updated_correctly() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let batches_manager = TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 }); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 7000 }, min_di
random
[ { "content": "fn record_tcn(env: &JNIEnv, tcn: JString, distance: jfloat) -> Result<(), ServicesError> {\n\n let tcn_java_str = env.get_string(tcn)?;\n\n let tcn_str = tcn_java_str.to_str()?;\n\n\n\n dependencies()\n\n .observed_tcn_processor\n\n .save(tcn_str, distance as f32)\n\n}\n\n\n...
Rust
src/bits/mask.rs
feb29/cwt
5e757ca672887b584c75871bc54875295759f825
use std::{ borrow::Cow, cmp::Ordering::{self, Equal, Greater, Less}, iter::{empty, Peekable}, }; pub trait Mask<'a>: Sized { type Block: 'a + ?Sized + ToOwned; type Steps: Iterator<Item = (usize, Cow<'a, Self::Block>)>; fn into_steps(self) -> Self::Steps; fn and<Rhs: Mask<'a>>(self, that: Rhs) -> And<'a, Self, Rhs> { And::new(self, that) } fn or<Rhs: Mask<'a>>(self, that: Rhs) -> Or<'a, Self, Rhs> { Or::new(self, that) } fn and_not<Rhs: Mask<'a>>(self, that: Rhs) -> AndNot<'a, Self, Rhs> { AndNot::new(self, that) } fn xor<Rhs: Mask<'a>>(self, that: Rhs) -> Xor<'a, Self, Rhs> { Xor::new(self, that) } } impl<'a, I, T> Mask<'a> for I where T: 'a + ?Sized + ToOwned, I: IntoIterator<Item = (usize, Cow<'a, T>)>, { type Block = T; type Steps = I::IntoIter; fn into_steps(self) -> Self::Steps { self.into_iter() } } macro_rules! defops { ( $( $name:ident ),* ) => ($( #[must_use = "do nothing unless consumed"] pub struct $name<'a, L: Mask<'a>, R: Mask<'a>> { lhs: Peekable<L::Steps>, rhs: Peekable<R::Steps>, } impl<'a, L: Mask<'a>, R: Mask<'a>> $name<'a, L, R> { pub(crate) fn new(lhs: L, rhs: R) -> Self { $name { lhs: lhs.into_steps().peekable(), rhs: rhs.into_steps().peekable(), } } } )*); } defops!(And, AndNot, Or, Xor); impl<'a, L, R> Iterator for And<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Intersection<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { let compared = lhs .peek() .and_then(|(x, _)| rhs.peek().map(|(y, _)| x.cmp(y))); match compared { Some(Less) => { lhs.next(); } Some(Equal) => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().intersection(&rhs); break Some((i, lhs)); } Some(Greater) => { rhs.next(); } None => break None, } } } } impl<'a, L, R> Iterator for Or<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Union<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().union(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } impl<'a, L, R> Iterator for AndNot<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Difference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { match cmp_index(lhs.peek(), rhs.peek(), Less, Less) { Less => return lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().difference(rhs.as_ref()); return Some((i, lhs)); } Greater => { rhs.next(); } }; } } } impl<'a, L, R> Iterator for Xor<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: SymmetricDifference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().symmetric_difference(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } #[inline] pub fn and<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> And<'a, L, R> { And::new(lhs, rhs) } #[inline] pub fn or<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Or<'a, L, R> { Or::new(lhs, rhs) } #[inline] pub fn and_not<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> AndNot<'a, L, R> { AndNot::new(lhs, rhs) } #[inline] pub fn xor<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Xor<'a, L, R> { Xor::new(lhs, rhs) } pub trait Intersection<T: ?Sized> { fn intersection(&mut self, data: &T); } pub trait Union<T: ?Sized> { fn union(&mut self, data: &T); } pub trait Difference<T: ?Sized> { fn difference(&mut self, data: &T); } pub trait SymmetricDifference<T: ?Sized> { fn symmetric_difference(&mut self, data: &T); } fn cmp_index<T>( x: Option<&(usize, T)>, y: Option<&(usize, T)>, none_x: Ordering, none_y: Ordering, ) -> Ordering { match (x, y) { (None, _) => none_x, (_, None) => none_y, (Some((i, _)), Some((j, _))) => i.cmp(j), } } pub struct Fold<'a, T>(Box<dyn Iterator<Item = (usize, T)> + 'a>); impl<'a, T: ?Sized> Fold<'a, Cow<'a, T>> where T: 'a + ToOwned, { pub(crate) fn fold<A, B, F>(xs: impl IntoIterator<Item = A>, mut f: F) -> Fold<'a, Cow<'a, T>> where A: 'a + Mask<'a, Block = T>, B: 'a + Mask<'a, Block = T>, F: FnMut(Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A) -> B, { let mut xs = xs.into_iter(); if let Some(head) = xs.next() { let init = Box::new(head.into_steps()); Fold(xs.fold(init, |a, x| Box::new(f(a, x).into_steps()))) } else { Fold(Box::new(empty())) } } pub fn and<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, And<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, And::new) } pub fn or<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Or<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Or::new) } pub fn and_not<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, AndNot<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, AndNot::new) } pub fn xor<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Xor<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Xor::new) } } impl<'a, T: 'a + ?Sized + ToOwned> Iterator for Fold<'a, Cow<'a, T>> { type Item = (usize, Cow<'a, T>); #[inline] fn next(&mut self) -> Option<Self::Item> { self.0.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } }
use std::{ borrow::Cow, cmp::Ordering::{self, Equal, Greater, Less}, iter::{empty, Peekable}, }; pub trait Mask<'a>: Sized { type Block: 'a + ?Sized + ToOwned; type Steps: Iterator<Item = (usize, Cow<'a, Self::Block>)>; fn into_steps(self) -> Self::Steps; fn and<Rhs: Mask<'a>>(self, that: Rhs) -> And<'a, Self, Rhs> { And::new(self, that) } fn or<Rhs: Mask<'a>>(self, that: Rhs) -> Or<'a, Self, Rhs> { Or::new(self, that) } fn and_not<Rhs: Mask<'a>>(self, that: Rhs) -> AndNot<'a, Self, Rhs> { AndNot::new(self, that) } fn xor<Rhs: Mask<'a>>(self, that: Rhs) -> Xor<'a, Self, Rhs> { Xor::new(self, that) } } impl<'a, I, T> Mask<'a> for I where T: 'a + ?Sized + ToOwned, I: IntoIterator<Item = (usize, Cow<'a, T>)>, { type Block = T; type Steps = I::IntoIter; fn into_steps(self) -> Self::Steps { self.into_iter() } } macro_rules! defops { ( $( $name:ident ),* ) => ($( #[must_use = "do nothing unless consumed"] pub struct $name<'a, L: Mask<'a>, R: Mask<'a>> { lhs: Peekable<L::Steps>, rhs: Peekable<R::Steps>, } impl<'a, L: Mask<'a>, R: Mask<'a>> $name<'a, L, R> { pub(crate) fn new(lhs: L, rhs: R) -> Self { $name { lhs: lhs.into_steps().peekable(), rhs: rhs.into_steps().peekable(), } } } )*); } defops!(And, AndNot, Or, Xor); impl<'a, L, R> Iterator for And<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Intersection<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { let compared = lhs .peek() .and_then(|(x, _)| rhs.peek().map(|(y, _)| x.cmp(y))); match compared { Some(Less) => { lhs.next(); } Some(Equal) => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().intersection(&rhs); break Some((i, lhs)); } Some(Greater) => { rhs.next(); } None => break None, } } } } impl<'a, L, R> Iterator for Or<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Union<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().union(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } impl<'a, L, R> Iterator for AndNot<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Difference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { match cmp_index(lhs.peek(), rhs.peek(), Less, Less) { Less => return lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().difference(rhs.as_ref()); return Some((i, lhs)); } Greater => { rhs.next(); } }; } } } impl<'a, L, R> Iterator for Xor<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: SymmetricDifference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().symmetric_difference(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } #[inline] pub fn and<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> And<'a, L, R> { And::new(lhs, rhs) } #[inline] pub fn or<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Or<'a, L, R> { Or::new(lhs, rhs) } #[inline] pub fn and_not<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> AndNot<'a, L, R> { AndNot::new(lhs, rhs) } #[inline] pub fn xor<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Xor<'a, L, R> { Xor::new(lhs, rhs) } pub trait Intersection<T: ?Sized> { fn intersection(&mut self, data: &T); } pub trait Union<T: ?Sized> { fn union(&mut self, data: &T); } pub trait Difference<T: ?Sized> { fn difference(&mut self, data: &T); } pub trait SymmetricDifference<T: ?Sized> { fn symmetric_difference(&mut self, data: &T); } fn cmp_index<T>( x: Option<&(usize, T)>, y: Option<&(usize, T)>, none_x: Ordering, none_y: Ordering, ) -> Ordering { match (x, y) { (None, _) => none_x, (_, None) => none_y, (Some((i, _)), Some((j, _))) => i.cmp(j), } } pub struct Fold<'a, T>(Box<dyn Iterator<Item = (usize, T)> + 'a>); impl<'a, T: ?Sized> Fold<'a, Cow<'a, T>> where T: 'a + ToOwned, {
pub fn and<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, And<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, And::new) } pub fn or<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Or<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Or::new) } pub fn and_not<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, AndNot<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, AndNot::new) } pub fn xor<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Xor<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Xor::new) } } impl<'a, T: 'a + ?Sized + ToOwned> Iterator for Fold<'a, Cow<'a, T>> { type Item = (usize, Cow<'a, T>); #[inline] fn next(&mut self) -> Option<Self::Item> { self.0.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } }
pub(crate) fn fold<A, B, F>(xs: impl IntoIterator<Item = A>, mut f: F) -> Fold<'a, Cow<'a, T>> where A: 'a + Mask<'a, Block = T>, B: 'a + Mask<'a, Block = T>, F: FnMut(Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A) -> B, { let mut xs = xs.into_iter(); if let Some(head) = xs.next() { let init = Box::new(head.into_steps()); Fold(xs.fold(init, |a, x| Box::new(f(a, x).into_steps()))) } else { Fold(Box::new(empty())) } }
function_block-full_function
[ { "content": "#[inline]\n\npub fn blocks<T: FixedBits>(n: usize) -> usize {\n\n blocks_by(n, T::SIZE)\n\n}\n\n\n\n/// Computes the minimum length of the sequence to store `n` bits.\n\n#[inline]\n\npub const fn blocks_by(n: usize, block_size: usize) -> usize {\n\n // If we want 17 bits, dividing by 32 will...
Rust
src/module.rs
accup/cargo-expunch
58b24319545a2874a3a3fc0127f745c00ef3bd60
use std::collections::HashMap; use std::path::PathBuf; use syn::{self, UseTree}; pub fn concat_module_parts(prefix: &[String], suffix: &[String], crate_name: &str) -> Vec<String> { let mut full_parts = prefix.to_vec(); for part in suffix { match part { _ if part == "crate" || part == crate_name => { full_parts.clear(); full_parts.push(part.clone()); } _ if part == "super" => { full_parts.pop(); } _ if part == "self" => {} _ => { full_parts.push(part.clone()); } } } full_parts } #[derive(Debug, Clone)] pub enum ModuleItemAccessibility { Direct(ModuleItemPath), Indirect(ModuleItemPath), } #[derive(Debug, Clone)] pub enum ModuleItemPath { Dir(Vec<String>, PathBuf), File(Vec<String>, PathBuf), Insoluble(Vec<String>), } pub fn make_module_item_path( module_parts: &[String], package_name: &str, package_src_path: &PathBuf, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<ModuleItemPath, String> { let mut lib_file = None; let mut path_buf = PathBuf::new(); let resolved_parts = module_parts .iter() .filter_map(|module_part| match module_part { _ if module_part == "crate" => None, _ if module_part == "self" => None, _ => Some(String::from(module_part)), }) .collect(); for module_part in module_parts.iter() { lib_file = None; path_buf.push(match module_part { _ if module_part == "crate" => crate_path.clone(), _ if module_part == package_name => { lib_file = Some(package_src_path.join("lib.rs")); package_src_path.clone() } _ if module_part == "super" => self_path .parent() .ok_or_else(|| { format!( "Failed to get the parent directory of the {0} {1} より上の階層へ遡ろうとしました", self_path.to_str().unwrap_or("(undisplayable path)"), self_path.to_str().unwrap_or("(表示できないパス)"), ) })? .to_path_buf(), _ if module_part == "self" => { if path_buf.as_os_str().is_empty() { self_path.clone() } else { continue; } } _ => PathBuf::from(module_part), }); } let module_name_file = path_buf.with_extension("rs"); Ok( if let Some(lib_file) = lib_file.and_then(|file| if file.is_file() { Some(file) } else { None }) { ModuleItemPath::File(resolved_parts, lib_file) } else if module_name_file.is_file() { ModuleItemPath::File(resolved_parts, module_name_file) } else if path_buf.is_dir() { let mod_file = path_buf.join("mod.rs"); if mod_file.is_file() { ModuleItemPath::File(resolved_parts, mod_file) } else { ModuleItemPath::Dir(resolved_parts, path_buf) } } else { ModuleItemPath::Insoluble(resolved_parts) }, ) } pub fn collect_module_items( use_tree: &UseTree, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<Vec<ModuleItemAccessibility>, String> { let mut module_path_map = HashMap::new(); collect_module_items_impl( use_tree, &mut Vec::new(), package_name, package_src_path, crate_name, crate_path, self_path, &mut module_path_map, )?; Ok(module_path_map.values().cloned().collect()) } fn collect_module_items_impl( use_tree: &UseTree, module_parts: &mut Vec<String>, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, module_path_map: &mut HashMap<Vec<String>, ModuleItemAccessibility>, ) -> Result<(), String> { match use_tree { UseTree::Path(use_path) => { let name = use_path.ident.to_string(); module_parts.push(name); module_path_map.entry(module_parts.clone()).or_insert( ModuleItemAccessibility::Indirect(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?), ); collect_module_items_impl( &use_path.tree, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; module_parts.pop(); } UseTree::Name(use_name) => { let name = use_name.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Rename(use_rename) => { let name = use_rename.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Group(use_group) => { for item in use_group.items.iter() { collect_module_items_impl( item, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; } } _ => (), }; Ok(()) }
use std::collections::HashMap; use std::path::PathBuf; use syn::{self, UseTree}; pub fn concat_module_parts(prefix: &[String], suffix: &[String], crate_name: &str) -> Vec<String> {
ne() } _ if module_part == "super" => self_path .parent() .ok_or_else(|| { format!( "Failed to get the parent directory of the {0} {1} より上の階層へ遡ろうとしました", self_path.to_str().unwrap_or("(undisplayable path)"), self_path.to_str().unwrap_or("(表示できないパス)"), ) })? .to_path_buf(), _ if module_part == "self" => { if path_buf.as_os_str().is_empty() { self_path.clone() } else { continue; } } _ => PathBuf::from(module_part), }); } let module_name_file = path_buf.with_extension("rs"); Ok( if let Some(lib_file) = lib_file.and_then(|file| if file.is_file() { Some(file) } else { None }) { ModuleItemPath::File(resolved_parts, lib_file) } else if module_name_file.is_file() { ModuleItemPath::File(resolved_parts, module_name_file) } else if path_buf.is_dir() { let mod_file = path_buf.join("mod.rs"); if mod_file.is_file() { ModuleItemPath::File(resolved_parts, mod_file) } else { ModuleItemPath::Dir(resolved_parts, path_buf) } } else { ModuleItemPath::Insoluble(resolved_parts) }, ) } pub fn collect_module_items( use_tree: &UseTree, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<Vec<ModuleItemAccessibility>, String> { let mut module_path_map = HashMap::new(); collect_module_items_impl( use_tree, &mut Vec::new(), package_name, package_src_path, crate_name, crate_path, self_path, &mut module_path_map, )?; Ok(module_path_map.values().cloned().collect()) } fn collect_module_items_impl( use_tree: &UseTree, module_parts: &mut Vec<String>, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, module_path_map: &mut HashMap<Vec<String>, ModuleItemAccessibility>, ) -> Result<(), String> { match use_tree { UseTree::Path(use_path) => { let name = use_path.ident.to_string(); module_parts.push(name); module_path_map.entry(module_parts.clone()).or_insert( ModuleItemAccessibility::Indirect(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?), ); collect_module_items_impl( &use_path.tree, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; module_parts.pop(); } UseTree::Name(use_name) => { let name = use_name.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Rename(use_rename) => { let name = use_rename.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Group(use_group) => { for item in use_group.items.iter() { collect_module_items_impl( item, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; } } _ => (), }; Ok(()) }
let mut full_parts = prefix.to_vec(); for part in suffix { match part { _ if part == "crate" || part == crate_name => { full_parts.clear(); full_parts.push(part.clone()); } _ if part == "super" => { full_parts.pop(); } _ if part == "self" => {} _ => { full_parts.push(part.clone()); } } } full_parts } #[derive(Debug, Clone)] pub enum ModuleItemAccessibility { Direct(ModuleItemPath), Indirect(ModuleItemPath), } #[derive(Debug, Clone)] pub enum ModuleItemPath { Dir(Vec<String>, PathBuf), File(Vec<String>, PathBuf), Insoluble(Vec<String>), } pub fn make_module_item_path( module_parts: &[String], package_name: &str, package_src_path: &PathBuf, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<ModuleItemPath, String> { let mut lib_file = None; let mut path_buf = PathBuf::new(); let resolved_parts = module_parts .iter() .filter_map(|module_part| match module_part { _ if module_part == "crate" => None, _ if module_part == "self" => None, _ => Some(String::from(module_part)), }) .collect(); for module_part in module_parts.iter() { lib_file = None; path_buf.push(match module_part { _ if module_part == "crate" => crate_path.clone(), _ if module_part == package_name => { lib_file = Some(package_src_path.join("lib.rs")); package_src_path.clo
random
[ { "content": "/// Rustソースコードを解析して展開する\n\nfn expunch_file(source_code_path: &str) -> Result<(), String> {\n\n let source_code_path = PathBuf::from(source_code_path);\n\n let package_path = PathBuf::from(\".\");\n\n let metadata = MetadataCommand::new()\n\n .manifest_path(\"./Cargo.toml\")\n\n ...
Rust
src/resource.rs
pduval/rustic_hal
ca1651aaf2651d21d5268ba9b7edc016f4c4801f
use std::collections::btree_map::Entry; use std::collections::*; use std::vec::*; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::link::HalLink; use super::{HalError, HalResult}; use serde_json::{from_value, to_value, Map, Value as JsonValue}; #[derive(Clone, Debug)] pub struct OneOrMany<T> { force_many: bool, content: Vec<T>, } impl<T> OneOrMany<T> where T: Sized + Clone, { pub fn new() -> OneOrMany<T> { OneOrMany { content: Vec::new(), force_many: false, } } pub fn force_many(mut self) -> Self { self.force_many = true; self } pub fn len(&self) -> usize { self.content.len() } pub fn is_empty(&self) -> bool { self.content.is_empty() } pub fn single(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self.content[0]) } } pub fn many(&self) -> &Vec<T> { &self.content } pub fn push(&mut self, newval: &T) { self.content.push(newval.clone()); } pub fn with(mut self, newval: &T) -> Self { self.content.push(newval.clone()); self } } impl<T> Serialize for OneOrMany<T> where T: Serialize + Clone, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if self.is_empty() && !self.force_many { ().serialize(serializer) } else if self.len() == 1 && !self.force_many { self.single().serialize(serializer) } else { self.content.serialize(serializer) } } } impl<'de, T> Deserialize<'de> for OneOrMany<T> where for<'d> T: Deserialize<'d> + Clone, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let value: JsonValue = Deserialize::deserialize(deserializer)?; let v2 = value.clone(); match v2 { JsonValue::Object(_) => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } JsonValue::Array(_) => { let obj: Vec<T> = match from_value(value) { Ok(v) => from_value(v).unwrap(), Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.content = obj; Ok(res) } _ => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))) }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } } } } #[derive(Clone, Serialize, Deserialize)] pub struct HalResource { #[serde(rename = "_links", default, skip_serializing_if = "BTreeMap::is_empty")] links: BTreeMap<String, OneOrMany<HalLink>>, #[serde( rename = "_embedded", default, skip_serializing_if = "BTreeMap::is_empty" )] embedded: BTreeMap<String, OneOrMany<HalResource>>, #[serde( rename = "_curies", default, skip_serializing_if = "BTreeMap::is_empty" )] curies: BTreeMap<String, HalLink>, #[serde(flatten)] data: Option<JsonValue>, } impl HalResource { pub fn new<T>(payload: T) -> HalResource where T: Serialize, { let val = match to_value(payload) { Ok(val) => match val { JsonValue::Object(_) => Some(val), _ => None, }, _ => None, }; HalResource { links: BTreeMap::new(), embedded: BTreeMap::new(), curies: BTreeMap::new(), data: val, } } pub fn with_link<S, L>(mut self, name: S, link: L) -> Self where S: Into<String>, L: Into<HalLink>, { let lk_name = name.into(); match self.links.entry(lk_name.clone()) { Entry::Vacant(entry) => { let mut lk = OneOrMany::new(); let mut lk = match lk_name.as_ref() { "curies" => lk.force_many(), _ => lk, }; lk.push(&(link.into())); entry.insert(lk); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&(link.into())); } } self } pub fn get_link(&self, name: &str) -> Option<&HalLink> { match self.links.get(name) { Some(link) => link.single(), None => None, } } pub fn get_self(&self) -> Option<&HalLink> { self.get_link("self") } pub fn get_links(&self, name: &str) -> Option<&Vec<HalLink>> { match self.links.get(name) { Some(link) => Some(link.many()), None => None, } } pub fn with_resource(mut self, name: &str, resource: HalResource) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut resources = OneOrMany::new(); resources.push(&resource); entry.insert(resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&resource); } } self } pub fn with_resources(mut self, name: &str, resources: Vec<HalResource>) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut _resources = OneOrMany::new().force_many(); for resource in resources.iter() { _resources.push(resource) } entry.insert(_resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); for resource in resources.iter() { content.push(&resource); } } } self } pub fn with_curie(self, name: &str, href: &str) -> Self { self.with_link("curies", HalLink::new(href).templated(true).with_name(name)) } pub fn with_extra_data<V>(mut self, name: &str, value: V) -> Self where V: Serialize, { match self.data { Some(JsonValue::Object(ref mut m)) => { m.insert(name.to_string(), to_value(value).unwrap()); } _ => { let mut data = Map::<String, JsonValue>::new(); data.insert(name.to_string(), to_value(value).unwrap()); self.data = Some(JsonValue::Object(data)); } }; self } pub fn get_extra_data<V>(&self, name: &str) -> HalResult<V> where for<'de> V: Deserialize<'de>, { let data = match self.data { Some(JsonValue::Object(ref m)) => m, _ => return Err(HalError::Custom("Invalid payload".to_string())), }; match data.get(name) { Some(v) => from_value::<V>(v.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom(format!("Key {} missing in payload", name))), } } pub fn get_data<V>(&self) -> HalResult<V> where for<'de> V: Deserialize<'de>, { match self.data { Some(ref val) => from_value::<V>(val.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom("No value".to_owned())), } } } impl PartialEq for HalResource { fn eq(&self, other: &HalResource) -> bool { self.get_self() == other.get_self() } }
use std::collections::btree_map::Entry; use std::collections::*; use std::vec::*; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::link::HalLink; use super::{HalError, HalResult}; use serde_json::{from_value, to_value, Map, Value as JsonValue}; #[derive(Clone, Debug)] pub struct OneOrMany<T> { force_many: bool, content: Vec<T>, } impl<T> OneOrMany<T> where T: Sized + Clone, { pub fn new() -> OneOrMany<T> { OneOrMany { content: Vec::new(), force_many: false, } } pub fn force_many(mut self) -> Self { self.force_many = true; self } pub fn len(&self) -> usize { self.content.len() } pub fn is_empty(&self) -> bool { self.content.is_empty() } pub fn single(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self.content[0]) } } pub fn many(&self) -> &Vec<T> { &self.content } pub fn push(&mut self, newval: &T) { self.content.push(newval.clone()); } pub fn with(mut self, newval: &T) -> Self { self.content.push(newval.clone()); self } } impl<T> Serialize for OneOrMany<T> where T: Serialize + Clone, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if self.is_empty() && !self.force_many { ().serialize(serializer) } else if self.len() == 1 && !self.force_many { self.single().serialize(serializer) } else { self.content.serialize(serializer) } } } impl<'de, T> Deserialize<'de> for OneOrMany<T> where for<'d> T: Deserialize<'d> + Clone, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let value: JsonValue = Deserialize::deserialize(deserializer)?; let v2 = value.clone(); match v2 { JsonValue::Object(_) => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } JsonValue::Array(_) => { let obj: Vec<T> = match from_value(value) { Ok(v) => from_value(v).unwrap(), Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.content = obj; Ok(res) } _ => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))) }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } } } } #[derive(Clone, Serialize, Deserialize)] pub struct HalResource { #[serde(rename = "_links", default, skip_serializing_if = "BTreeMap::is_empty")] links: BTreeMap<String, OneOrMany<HalLink>>, #[serde( rename = "_embedded", default, skip_serializing_if = "BTreeMap::is_empty" )] embedded: BTreeMap<String, OneOrMany<HalResource>>, #[serde( rename = "_curies", default, skip_serializing_if = "BTreeMap::is_empty" )] curies: BTreeMap<String, HalLink>, #[serde(flatten)] data: Option<JsonValue>, } impl HalResourc
h(&resource); } } } self } pub fn with_curie(self, name: &str, href: &str) -> Self { self.with_link("curies", HalLink::new(href).templated(true).with_name(name)) } pub fn with_extra_data<V>(mut self, name: &str, value: V) -> Self where V: Serialize, { match self.data { Some(JsonValue::Object(ref mut m)) => { m.insert(name.to_string(), to_value(value).unwrap()); } _ => { let mut data = Map::<String, JsonValue>::new(); data.insert(name.to_string(), to_value(value).unwrap()); self.data = Some(JsonValue::Object(data)); } }; self } pub fn get_extra_data<V>(&self, name: &str) -> HalResult<V> where for<'de> V: Deserialize<'de>, { let data = match self.data { Some(JsonValue::Object(ref m)) => m, _ => return Err(HalError::Custom("Invalid payload".to_string())), }; match data.get(name) { Some(v) => from_value::<V>(v.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom(format!("Key {} missing in payload", name))), } } pub fn get_data<V>(&self) -> HalResult<V> where for<'de> V: Deserialize<'de>, { match self.data { Some(ref val) => from_value::<V>(val.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom("No value".to_owned())), } } } impl PartialEq for HalResource { fn eq(&self, other: &HalResource) -> bool { self.get_self() == other.get_self() } }
e { pub fn new<T>(payload: T) -> HalResource where T: Serialize, { let val = match to_value(payload) { Ok(val) => match val { JsonValue::Object(_) => Some(val), _ => None, }, _ => None, }; HalResource { links: BTreeMap::new(), embedded: BTreeMap::new(), curies: BTreeMap::new(), data: val, } } pub fn with_link<S, L>(mut self, name: S, link: L) -> Self where S: Into<String>, L: Into<HalLink>, { let lk_name = name.into(); match self.links.entry(lk_name.clone()) { Entry::Vacant(entry) => { let mut lk = OneOrMany::new(); let mut lk = match lk_name.as_ref() { "curies" => lk.force_many(), _ => lk, }; lk.push(&(link.into())); entry.insert(lk); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&(link.into())); } } self } pub fn get_link(&self, name: &str) -> Option<&HalLink> { match self.links.get(name) { Some(link) => link.single(), None => None, } } pub fn get_self(&self) -> Option<&HalLink> { self.get_link("self") } pub fn get_links(&self, name: &str) -> Option<&Vec<HalLink>> { match self.links.get(name) { Some(link) => Some(link.many()), None => None, } } pub fn with_resource(mut self, name: &str, resource: HalResource) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut resources = OneOrMany::new(); resources.push(&resource); entry.insert(resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&resource); } } self } pub fn with_resources(mut self, name: &str, resources: Vec<HalResource>) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut _resources = OneOrMany::new().force_many(); for resource in resources.iter() { _resources.push(resource) } entry.insert(_resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); for resource in resources.iter() { content.pus
random
[ { "content": "fn is_not(b: &bool) -> bool {\n\n !*b\n\n}\n\nmacro_rules! chainable_string {\n\n ($x: ident, $y: ident) => {\n\n pub fn $y(mut self, $x: &str) -> Self {\n\n self.$x = Some($x.to_string());\n\n self\n\n }\n\n\n\n pub fn $x(&self) -> Option<String> {...
Rust
src/poly/comp.rs
HColeman127/algeo
bf2ac1514041ff00a1e571732ac01647329ece72
use std::marker::PhantomData; use itertools::{EitherOrBoth, Itertools}; use crate::core::num::Field; use super::elts::*; use super::mdeg::MultiDegree; use super::ord::MonomialOrder; pub struct Computer<F: Field, O: MonomialOrder> { _marker: PhantomData<(F, O)>, } impl<F: Field, O: MonomialOrder> Computer<F, O> { pub fn sort_terms(f: &Polynomial<F>) -> Polynomial<F> { Polynomial::new_unchecked( f.terms() .sorted_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) .cloned() .collect(), ) } pub fn leading_term(f: &Polynomial<F>) -> Option<&Term<F>> { f.terms().max_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) } pub fn leading_coef(f: &Polynomial<F>) -> Option<F> { Some(Self::leading_term(f)?.coef) } pub fn divide( f: &Polynomial<F>, divs: &[Polynomial<F>], ) -> (Polynomial<F>, Vec<Polynomial<F>>) { let m = divs.len(); let mut quotients = vec![Polynomial::<F>::zero(); m]; let mut remainder = Polynomial::<F>::zero(); let mut f = f.clone(); 'outer: while let Some(lt_f) = Self::leading_term(&f).cloned() { for (g, q) in divs.iter().zip(quotients.iter_mut()) { if let Some(lt_g) = Self::leading_term(&g) { if let Some(a) = lt_f.try_div(lt_g) { *q = &*q + &a; f = f - a * g; continue 'outer; } } } f.terms.sort_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)); if let Some(lt_f) = f.terms.pop() { remainder = remainder + lt_f; } } (remainder, quotients) } pub fn monic_lcm(s: &Term<F>, t: &Term<F>) -> Term<F> { if s.is_zero() || t.is_zero() { return Term::zero(); } Term::monic(MultiDegree( s.mdeg .degs() .zip_longest(t.mdeg.degs()) .map(|pair| match pair { EitherOrBoth::Both(a, b) => *a.max(b), EitherOrBoth::Left(a) => *a, EitherOrBoth::Right(b) => *b, }) .collect(), )) } pub fn try_reduce(f: &Polynomial<F>, g: &Polynomial<F>) -> Option<Polynomial<F>> { let lt_f = Self::leading_term(f)?; let lt_g = Self::leading_term(g)?; let lcm = Self::monic_lcm(&lt_f, &lt_g); Some(lcm.try_div(&lt_f)? * f - lcm.try_div(&lt_g)? * g) } pub fn buchberger_criterion(generators: &[Polynomial<F>]) -> bool { let m = generators.len(); for (i, j) in (0..m).cartesian_product(0..m).filter(|&(f, g)| f != g) { if let Some(h) = Self::try_reduce(&generators[i], &generators[j]) { if !Self::divide(&h, generators).0.is_zero() { return false; } } else { panic!("should not happen. yeah i know, joseph, 'parse don't validate'"); } } true } fn buchberger_extend(generators: &mut Vec<Polynomial<F>>) { let mut i = 0; 'outer: while i < generators.len() { for j in 0..i { let (r, _) = Self::divide( &Self::try_reduce(&generators[i], &generators[j]) .expect("should not happen. yeah i know, joseph, 'parse don't validate'"), generators, ); if let Some(lc_r) = Self::leading_coef(&r) { generators.push(&r / lc_r); i = 0; continue 'outer; } } i += 1; } } fn buchberger_minimize(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { if Self::leading_term(&generators[j]) .unwrap() .divides(&Self::leading_term(&generators[i]).unwrap()) { generators.remove(i); continue 'outer; } } }; } } fn buchberger_reduce(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { for t in generators[i].terms() { if Self::leading_term(&generators[j]).unwrap().divides(t) { let g = generators.remove(i); let (g_rem, _) = Self::divide(&g, &generators); generators.push(g_rem); continue 'outer; } } } }; } } pub fn buchberger_algorithm(generators: &mut Vec<Polynomial<F>>) { Self::buchberger_extend(generators); Self::buchberger_minimize(generators); Self::buchberger_reduce(generators); } } #[inline] fn pairs_iter(n: usize) -> impl Iterator<Item = (usize, usize)> { (0..n).cartesian_product(0..n).filter(|(i, j)| i != j) } #[cfg(test)] mod tests { use std::ops::Add; use super::*; use crate::{ core::num::Rational, poly::{elts::Polynomial, elts::Term}, }; use super::super::ord::Lex; type Poly = Polynomial<f64>; type Comp = Computer<f64, Lex>; #[allow(unused)] macro_rules! pp { ($poly:expr) => { println!("{} = {}", stringify!($poly), &$poly); }; } #[allow(unused)] macro_rules! pps { ($polys:expr) => { for i in 0..($polys.len()) { println!("{}[{}] = {}", stringify!($polys), i, &$polys[i]); } }; } #[allow(unused)] #[test] fn dbg_stuff() { let c = |coef: f64| Poly::from(coef); fn_vars! { f64: x y z } /* let f = y(1); let divs = [ Poly::zero(), y(1) + c(1.0), ]; println!("{} / [{}, {}]", &f, &divs[0], &divs[1]); let (r, q) = Computer::<Lex, f64>::divide(&f, &divs); pp!(r); pp!(q[0]); pp!(q[1]); pp!(&q[0] * &divs[0]); pp!(&q[1] * &divs[1]); */ let f = x(2) + x(3) + x(1) + x(2) + x(0) + x(3); pp!(f); } #[test] fn division() { let poly_iter = polys(2, 3, 1) .into_iter() .cartesian_product(polys(2, 1, 2).into_iter().cartesian_product(polys(2, 1, 2))); for (f, (g1, g2)) in poly_iter { test_result_equality(&f, &[g1, g2]); } } fn polys(vars: i32, max_deg: u8, max_coef: i32) -> Vec<Polynomial<f64>> { (0..vars) .map(|_| (0..=max_deg).rev()) .multi_cartesian_product() .map(MultiDegree::from_vec) .filter(|mdeg| mdeg.total_deg() <= max_deg) .map(|mdeg| { (0..=max_coef) .map(f64::from) .map(move |coef| Term::new(coef, mdeg.clone())) }) .multi_cartesian_product() .map(|v| Poly::new_unchecked(v.into_iter().filter(|t| !t.is_zero()).collect())) .collect() } #[cfg(test)] fn poly_assert_eq(f1: &Poly, f2: &Poly) { assert_eq!(Comp::sort_terms(f1).terms, Comp::sort_terms(f2).terms); } #[cfg(test)] fn test_result_equality(f: &Poly, g: &[Poly]) { let (r, q) = Comp::divide(f, g); let f2 = q.iter().zip_eq(g).map(|(qi, gi)| qi * gi).fold(r, Add::add); poly_assert_eq(f, &f2); } #[test] fn reduction() { let c = |coef| Polynomial::from(Rational::new_i64(coef, 1)); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; let f = c(5) * x(4) * y(3) + c(2) * x(2) * y(1) + c(3) * x(1) * y(2) + y(2) + c(3); let g = c(8) * x(5) * y(2) + x(3) + c(3) * x(2) * y(2) + y(4) + c(6); pp!(f); pp!(g); println!(); if let Some(h) = CompQ::try_reduce(&f, &g) { pp!(h); } else { println!("could not reduce"); } } #[test] fn buchberger() { let q = |a, b| Polynomial::from(Rational::new_i64(a, b)); let c = |coef| q(coef, 1); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; fn print_buchberger(g: &[Polynomial<Rational>]) -> Vec<Polynomial<Rational>> { let mut g = Vec::from(g); println!("initial:"); pps!(g); CompQ::buchberger_algorithm(&mut g); println!("\nresult:"); pps!(g); println!("--------------------"); g } print_buchberger(&[x(3) * y(1) - x(1) * y(2) + c(1), x(2) * y(2) - y(3) - c(1)]); print_buchberger(&[ x(2) + x(1) * y(5) + y(4), x(1) * y(6) - x(1) * y(3) + y(5) - y(2), x(1) * y(5) - x(1) * y(2), ]); } }
use std::marker::PhantomData; use itertools::{EitherOrBoth, Itertools}; use crate::core::num::Field; use super::elts::*; use super::mdeg::MultiDegree; use super::ord::MonomialOrder; pub struct Computer<F: Field, O: MonomialOrder> { _marker: PhantomData<(F, O)>, } impl<F: Field, O: MonomialOrder> Computer<F, O> { pub fn sort_terms(f: &Polynomial<F>) -> Polynomial<F> { Polynomial::new_unchecked( f.terms() .sorted_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) .cloned() .collect(), ) } pub fn leading_term(f: &Polynomial<F>) -> Option<&Term<F>> { f.terms().max_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) } pub fn leading_coef(f: &Polynomial<F>) -> Option<F> { Some(Self::leading_term(f)?.coef) } pub fn divide( f: &Polynomial<F>, divs: &[Polynomial<F>], ) -> (Polynomial<F>, Vec<Polynomial<F>>) { let m = divs.len(); let mut quotients = vec![Polynomial::<F>::zero(); m]; let mut remainder = Polynomial::<F>::zero(); let mut f = f.clone(); 'outer: while let Some(lt_f) = Self::leading_term(&f).cloned() { for (g, q) in divs.iter().zip(quotients.iter_mut()) { if let Some(lt_g) = Self::leading_term(&g) { if let Some(a) = lt_f.try_div(lt_g) { *q = &*q + &a; f = f - a * g; continue 'outer; } } } f.terms.sort_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)); if let Some(lt_f) = f.terms.pop() { remainder = remainder + lt_f; } } (remainder, quotients) } pub fn monic_lcm(s: &Term<F>, t: &Term<F>) -> Term<F> { if s.is_zero() || t.is_zero() { return Term::zero(); } Term::monic(MultiDegree( s.mdeg .degs() .zip_longest(t.mdeg.degs()) .map(|pair| match pair { EitherOrBoth::Both(a, b) => *a.max(b), EitherOrBoth::Left(a) => *a, EitherOrBoth::Right(b) => *b, }) .collect(), )) } pub fn try_reduce(f: &Polynomial<F>, g: &Polynomial<F>) -> Option<Polynomial<F>> { let lt_f = Self::leading_term(f)?; let lt_g = Self::leading_term(g)?; let lcm = Self::monic_lcm(&lt_f, &lt_g); Some(lcm.try_div(&lt_f)? * f - lcm.try_div(&lt_g)? * g) } pub fn buchberger_criterion(generators: &[Polynomial<F>]) -> bool { let m = generators.len(); for (i, j) in (0..m).cartesian_product(0..m).filter(|&(f, g)| f != g) { if let Some(h) = Self::try_reduce(&generators[i], &generators[j]) { if !Self::divide(&h, generators).0.is_zero() { return false; } } else { panic!("should not happen. yeah i know, joseph, 'parse don't validate'"); } } true } fn buchberger_extend(generators: &mut Vec<Polynomial<F>>) { let mut i = 0; 'outer: while i < generators.len() { for j in 0..i { let (r, _) = Self::divide( &Self::try_reduce(&generators[i], &generators[j]) .expect("should not happen. yeah i know, joseph, 'parse don't validate'"), generators, ); if let Some(lc_r) = Self::leading_coef(&r) { generators.push(&r / lc_r); i = 0; continue 'outer; } } i += 1; } } fn buchberger_minimize(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { if Self::leading_term(&generators[j]) .unwrap() .divides(&Self::leading_term(&generators[i]).unwrap()) { generators.remove(i); continue 'outer; } } }; } } fn buchberger_reduce(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { for t in generators[i].terms() { if Self::leading_term(&generators[j]).unwrap().divides(t) { let g = generators.remove(i); let (g_rem, _) = Self::divide(&g, &generators); generators.push(g_rem); continue 'outer; } } } }; } } pub fn buchberger_algorithm(generators: &mut Vec<Polynomial<F>>) { Self::buchberger_extend(generators); Self::buchberger_minimize(generators); Self::buchberger_reduce(generators); } } #[inline] fn pairs_iter(n: usize) -> impl Iterator<Item = (usize, usize)> { (0..n).cartesian_product(0..n).filter(|(i, j)| i != j) } #[cfg(test)] mod tests { use std::ops::Add; use super::*; use crate::{ core::num::Rational, poly::{elts::Polynomial, elts::Term}, }; use super::super::ord::Lex; type Poly = Polynomial<f64>; type Comp = Computer<f64, Lex>; #[allow(unused)] macro_rules! pp { ($poly:expr) => { println!("{} = {}", stringify!($poly), &$poly); }; } #[allow(unused)] macro_rules! pps { ($polys:expr) => { for i in 0..($polys.len()) { println!("{}[{}] = {}", stringify!($polys), i, &$polys[i]); } }; } #[allow(unused)] #[test] fn dbg_stuff() { let c = |coef: f64| Poly::from(coef); fn_vars! { f64: x y z } /* let f = y(1); let divs = [ Poly::zero(), y(1) + c(1.0), ]; println!("{} / [{}, {}]", &f, &divs[0], &divs[1]); let (r, q) = Computer::<Lex, f64>::divide(&f, &divs); pp!(r); pp!(q[0]); pp!(q[1]); pp!(&q[0] * &divs[0]); pp!(&q[1] * &divs[1]); */ let f = x(2) + x(3) + x(1) + x(2) + x(0) + x(3); pp!(f); } #[test] fn division() { let poly_iter = polys(2, 3, 1) .into_iter() .cartesian_product(polys(2, 1, 2).into_iter().cartesian_product(polys(2, 1, 2))); for (f, (g1, g2)) in poly_iter { test_result_equality(&f, &[g1, g2]); } } fn polys(vars: i32, max_deg: u8, max_coef: i32) -> Vec<Polynomial<f64>> { (0..vars) .map(|_| (0..=max_deg).rev()) .multi_cartesian_product() .map(MultiDegree::from_vec) .filter(|mdeg| mdeg.total_deg() <= max_deg) .map(|mdeg| { (0..=max_coef) .map(f64::from) .map(move |coef| Term::new(coef, mdeg.clone())) }) .multi_cartesian_product() .map(|v| Poly::new_unchecked(v.into_iter().filter(|t| !t.is_zero()).collect())) .collect() } #[cfg(test)] fn poly_assert_eq(f1: &Poly, f2: &Poly) { assert_eq!(Comp::sort_terms(f1).terms, Comp::sort_terms(f2).terms); } #[cfg(test)] fn test_result_equality(f: &Poly, g: &[Poly]) { let (r, q) = Comp::divide(f, g); let f2 = q.iter().zip_eq(g).map(|(qi, gi)| qi * gi).fold(r, Add::add); poly_assert_eq(f, &f2); } #[test] fn reduction() { let c = |coef| Polynomial::from(Rational::new_i64(coef, 1)); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; let f = c(5) * x(4) * y(3) + c(2) * x(2) * y(1) + c(3) * x(1) * y(2) + y(2) + c(3); let g = c(8) * x(5) * y(2) + x(3) + c(3) * x(2) * y(2) + y(4) + c(6); pp!(f); pp!(g); println!(); if let Some(h) = CompQ::try_reduce(&f, &g) { pp!(h); } else { println!("could not reduce"); } } #[test] fn buchberger() { let q = |a, b| Polynomial::from(Rational::new_i64(a, b)); let c = |coef| q(coef, 1); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; fn print_buchberger(g: &[Polynomial<Rational>]) -> Vec<Polynomial<Rational>> { let mut g = Vec::from(g); println!("initial:"); pps!(g); CompQ::buchberger_algorithm(&mut g); println!("\nresult:"); pps!(g); println!("--------------------"); g } print_buchberger(&[x(3) * y(1) - x(1) * y(2) + c(1), x(2) * y(2) - y(3) - c(1)]);
; } }
print_buchberger(&[ x(2) + x(1) * y(5) + y(4), x(1) * y(6) - x(1) * y(3) + y(5) - y(2), x(1) * y(5) - x(1) * y(2), ])
call_expression
[ { "content": "pub fn mat_iterator<'a, F: Field>(n: usize, m: usize, values: &'a [F])-> impl 'a + Iterator<Item=Mat<F>>{\n\n\tMatIterator {\n\n\t\tvalues,\n\n\t\tn,\n\n\t\tm,\n\n\t\tnum: 0\n\n\t}\n\n}\n\n\n\npub struct MatIterator<'a, F: Field> {\n\n\tvalues: &'a [F],\n\n\tn: usize,\n\n\tm: usize,\n\n\tnum: usiz...
Rust
tests/auth_test.rs
nappa85/rust-etcd
9fd9f3c33687623772abfff3e5a43579ab9aa677
extern crate etcd; extern crate futures; extern crate hyper; extern crate hyper_tls; extern crate native_tls; extern crate tokio_core; extern crate tokio_timer; use futures::future::Future; use tokio_core::reactor::Core; use etcd::{BasicAuth, Client}; use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; #[test] fn auth() { let mut core = Core::new().unwrap(); let client = Client::new(&["http://etcd:2379"], None).unwrap(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let root_user = NewUser::new("root", "secret"); let work: Box<Future<Item = (), Error = ()>> = Box::new( auth::status(&client) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client, root_user) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client, rkt_role) }) .then(|res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_user.add_role("rkt"); auth::create_user(&authed_client, rkt_user) }) .then(|res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt"); let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client, update_rkt_user) }) .then(|res| { res.unwrap(); auth::get_role(&authed_client, "rkt") }) .then(|res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client, "rkt") }) .then(|res| { res.unwrap(); auth::delete_role(&authed_client, "rkt") }) .then(|res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); auth::disable(&authed_client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); assert!(core.run(work).is_ok()); }
extern crate etcd; extern crate futures; extern crate hyper; extern crate hyper_tls; extern crate native_tls; extern crate tokio_core; extern crate tokio_timer; use futures::future::Future; use tokio_core::reactor::Core; use etcd::{BasicAuth, Client}; use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; #[test] fn auth() { let mut core = Core::new().unwrap(); let client = Client::new(&["http://etcd:2379"], None).unwrap(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let root_user = NewUser::new("root", "secret"); let work: Box<Future<Item = (), Error = ()>> = Box::new( auth::status(&client) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client, root_user) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client, rkt_role) }) .then(|res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_use
res.unwrap(); auth::delete_role(&authed_client, "rkt") }) .then(|res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); auth::disable(&authed_client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); assert!(core.run(work).is_ok()); }
r.add_role("rkt"); auth::create_user(&authed_client, rkt_user) }) .then(|res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt"); let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client, update_rkt_user) }) .then(|res| { res.unwrap(); auth::get_role(&authed_client, "rkt") }) .then(|res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client, "rkt") }) .then(|res| {
random
[ { "content": "/// Attempts to enable the auth system.\n\npub fn enable<C>(client: &Client<C>) -> Box<Future<Item = Response<AuthChange>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.e...
Rust
src/order_service.rs
alank976/restaurant-order
a623ebede260f11c324982d9831136f2a4e8254a
use std::collections::HashMap; use std::sync::{Arc, RwLock}; use crate::order_item::OrderItem; pub struct OrderService(Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>); impl OrderService { pub fn new() -> Self { OrderService(Arc::new(RwLock::new(HashMap::new()))) } fn new_for_test(m: Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) -> Self { OrderService(m) } pub fn add(&self, table_id: u8, item: OrderItem) -> Result<(), ()> { match table_id { 1..=100 => { let mut items_by_table_id = self.0 .write() .unwrap(); if let Some(items) = items_by_table_id.get_mut(&table_id) { items.push(item); Ok(()) } else { items_by_table_id.insert(table_id, vec![item]); Ok(()) } } _ => Err(()) } } pub fn get_items(&self, table_id: u8) -> Result<Vec<OrderItem>, ()> { match table_id { 1..=100 => Ok( self.0 .read() .unwrap() .get(&table_id) .map(|it| it.clone()) .unwrap_or(vec![]) ), _ => Err(()) } } pub fn cancel_item(&self, table_id: u8, item_name: String) -> Result<(), ()> { match table_id { 1..=100 => { if let Some(items) = self.0 .write() .unwrap() .get_mut(&table_id) { items.retain(|item| item_name != *item.name); } Ok(()) } _ => Err(()) } } } #[cfg(test)] mod tests { use super::*; fn new_service_and_inner_map() -> (OrderService, Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) { let m: HashMap<u8, Vec<OrderItem>> = HashMap::new(); let arc_rw = Arc::new(RwLock::new(m)); let svc = OrderService::new_for_test(arc_rw.clone()); (svc, arc_rw.clone()) } #[test] fn it_returns_items_from_hashmap() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(10, vec![OrderItem::new("sushi".to_string())]); let items = svc.get_items(10); assert!(items.is_ok()); let items = items.unwrap(); assert_eq!(1, items.len()); let item = items.get(0).unwrap(); assert_eq!("sushi".to_string(), item.name); } #[test] fn it_returns_nothing_when_no_table_id_found() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(2); assert!(items.is_ok()); assert!(items.unwrap().is_empty()); } #[test] fn it_saves_item_into_map() { let (svc, rw_map) = new_service_and_inner_map(); let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); let inner_map = rw_map .read() .unwrap(); let items = inner_map.get(&1); assert!(items.is_some()); let items = items.unwrap(); assert_eq!(1, items.len()); assert_eq!("french fries", items .first() .unwrap() .name() ); } #[test] fn it_saves_items_with_same_name() { let (svc, rw_map) = new_service_and_inner_map(); for _ in 0..2 { let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); } let inner_map = rw_map .read() .unwrap(); let mut items = inner_map.get(&1); assert!(items.is_some()); let items = items.as_mut().unwrap(); assert_eq!(2, items.len()); assert!(items.iter() .map(|item: &OrderItem| item.name()) .all(|name| "french fries" == name)); } #[test] fn it_deletes_item() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(1, vec![OrderItem::new("ramen".to_string())]); svc.cancel_item(1, "ramen".to_string()).unwrap(); assert!(rw_map .read() .unwrap() .get(&1) .unwrap() .is_empty()); } #[test] fn it_rejects_when_table_id_greater_100() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(200); assert!(items.is_err()); } }
use std::collections::HashMap; use std::sync::{Arc, RwLock}; use crate::order_item::OrderItem; pub struct OrderService(Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>); impl OrderService { pub fn new() -> Self { OrderService(Arc::new(RwLock::new(HashMap::new()))) } fn new_for_test(m: Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) -> Self { OrderService(m) } pub fn add(&self, table_id: u8, item: OrderItem) -> Result<(), ()> { match table_id { 1..=100 => { let mut items_by_table_id = self.0 .write() .unwrap(); if let Some(items) = items_by_table_id.get_mut(&table_id) { items.push(item); Ok(()) } else { items_by_table_id.insert(table_id, vec![item]); Ok(()) } } _ => Err(()) } } pub fn get_items(&self, table_id: u8) -> Result<Vec<OrderItem>, ()> { match table_id { 1..=100 => Ok( self.0 .read() .unwrap() .get(&table_id) .map(|it| it.clone()) .unwrap_or(vec![]) ), _ => Err(()) } } pub fn cancel_item(&self, table_id: u8, item_name: String) -> Result<(), ()> { match table_id { 1..=100 => { if let Some(items) = self.0 .write() .unwrap() .get_mut(&table_id) { items.retain(|item| item_name != *item.name); } Ok(()) } _ => Err(()) } } } #[cfg(test)] mod tests { use super::*; fn new_service_and_inner_map() -> (OrderService, Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) { let m: HashMap<u8, Vec<OrderItem>> = HashMap::new(); let arc_rw = Arc::new(RwLock::new(m)); let svc = OrderService::new_for_test(arc_rw.clone()); (svc, arc_rw.clone()) } #[test] fn it_returns_items_from_hashmap() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(10, vec![OrderItem::new("sushi".to_string())]); let items = svc.get_items(10); assert!(items.is_ok()); let items = items.unwrap(); assert_eq!(1, items.len()); let item = items.get(0).unwrap(); assert_eq!("sushi".to_string(), item.name); } #[test] fn it_returns_nothing_when_no_table_id_found() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(2); assert!(items.is_ok()); assert!(items.unwrap().is_empty()); } #[test] fn it_saves_item_into_map() { let (svc, rw_map) = new_service_and_inner_map(); let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); let inner_map = rw_map .read() .unwrap(); let items = inner_map.get(&1); assert!(items.is_some()); let items = items.unwrap(); assert_eq!(1, items.len()); assert_eq!("french fries", items .first() .unwrap() .name() ); } #[test] fn it_saves_items_with_same_name() { let (svc, rw_map) = new_service_and_inner_map(); for _ in 0..2 { let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); } let inner_map = rw_map .read() .unwrap(); let mut items = inner_map.get(&1); assert!(items.is_some()); let items = items.as_mut().unwrap(); assert_eq!(2, items.len()); assert!(items.iter() .map(|item: &OrderItem| item.name()) .all(|name| "french fries" == name)); } #[test] fn it_deletes_item() { let (s
em::new("ramen".to_string())]); svc.cancel_item(1, "ramen".to_string()).unwrap(); assert!(rw_map .read() .unwrap() .get(&1) .unwrap() .is_empty()); } #[test] fn it_rejects_when_table_id_greater_100() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(200); assert!(items.is_err()); } }
vc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(1, vec![OrderIt
function_block-random_span
[ { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::thread;\n\n use std::time::{Duration, Instant};\n\n\n\n use restaurant_order::*;\n\n use restaurant_order::clients::*;\n\n\n\n #[test]\n\n fn integration_test() {\n\n thread::spawn(move || WebServer::new().start(8001));\n\n\n\n ...
Rust
parser/src/lib.rs
codeanonorg/ULP
1412606cf2456a183a33f3ef5a5defe94675b41b
mod report; mod spanned; mod token; use crate::token::Token; use chumsky::{prelude::*, Stream}; use report::Report; use report::{report_of_char_error, report_of_token_error}; use token::lexer; #[derive(Clone, Debug, PartialEq)] pub enum Lit { Num(String), List(Vec<Self>), } #[derive(Clone, Debug, PartialEq)] pub enum Sym { CombS, CombK, CombD, CombI, Map, Iota, Len, Reduce, Filter, Neg, And, Or, Eq, Add, Literal(Lit), Var(u32), Lambda(Vec<Self>), } impl Sym { pub fn lambda<I: IntoIterator<Item = Option<Self>>>(inner: I) -> Option<Self> { Some(Self::Lambda(inner.into_iter().collect::<Option<Vec<_>>>()?)) } } fn literal() -> impl Parser<Token, Lit, Error = Simple<Token>> { use Token::*; use token::Dir::*; let int = filter_map(|span, tok| match tok { Num(n) => Ok(Lit::Num(n)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); recursive(|lit| lit.repeated().at_least(1).delimited_by(Bracket(L), Bracket(R)).map(Lit::List).or(int)) } fn parser() -> impl Parser<Token, Option<Vec<Sym>>, Error = Simple<Token>> { use token::Dir::*; use Token::*; let var = filter_map(|span, tok| match tok { Var(i) => Ok(Sym::Var(i)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); let lit = literal().map(Sym::Literal); recursive(|instr| { instr .delimited_by(Brace(L), Brace(R)) .map(|v| Sym::lambda(v)) .or(just(Ident("K".to_string())) .to(Sym::CombK) .or(just(Ident("S".to_string())).to(Sym::CombS)) .or(just(Ident("I".to_string())).to(Sym::CombI)) .or(just(Ident("D".to_string())).to(Sym::CombD)) .or(just(Op("i".to_string())).to(Sym::Iota)) .or(just(Op("$".to_string())).to(Sym::Map)) .or(just(Op("+".to_string())).to(Sym::Add)) .or(just(Op("#".to_string())).to(Sym::Len)) .or(just(Op("=".to_string())).to(Sym::Eq)) .or(just(Op("/".to_string())).to(Sym::Reduce)) .or(just(Op("&".to_string())).to(Sym::And)) .or(just(Op("|".to_string())).to(Sym::Or)) .or(just(Op("!".to_string())).to(Sym::Neg)) .or(just(Op("\\".to_string())).to(Sym::Filter)) .or(lit) .or(var) .map(Some)) .recover_with(nested_delimiters(Brace(L), Brace(R), [], |_| None)) .repeated() }) .map(|v| v.into_iter().collect::<Option<Vec<_>>>()) } pub fn parse(src_id: impl Into<String>, input: &str) -> (Option<Vec<Sym>>, Vec<Report>) { let src_id = src_id.into(); let slen = input.len(); let (tokens, tokerr) = lexer().then_ignore(end()).parse_recovery(input); let tokerr = tokerr.into_iter().map({ let src_id = src_id.clone(); move |err| report_of_char_error(src_id.clone(), err) }); if let Some(tokens) = tokens { let (instrs, err) = parser() .then_ignore(end()) .parse_recovery(Stream::from_iter( slen..slen + 1, tokens.into_iter().map(Into::into), )); let tokerr = tokerr .chain( err.into_iter() .map(move |err| report_of_token_error(src_id.clone(), err)), ) .collect(); if let Some(Some(instrs)) = instrs { (Some(instrs), tokerr) } else { (None, tokerr) } } else { (None, tokerr.collect()) } } #[cfg(test)] mod tests { use super::*; use Sym::*; macro_rules! assert_parse { ($input:expr, [$($e:expr),*]) => { { use ariadne::Source; let input = $input; let (res, err) = parse("<test>", input); for report in err { report.eprint(("<test>".into(), Source::from(input))).unwrap(); } assert_eq!(res, Some(vec![$($e),*])); } }; } #[test] fn test_ski() { assert_parse!("S K I", [CombS, CombK, CombI]); } #[test] fn test_lambda() { assert_parse!("{D w1 I}", [Lambda(vec![CombD, Var(1), CombI])]) } #[test] fn test_nested_lambda() { assert_parse!( "{D {+ w1 w2} I}", [Lambda(vec![ CombD, Lambda(vec![Add, Var(1), Var(2)]), CombI ])] ); } }
mod report; mod spanned; mod token; use crate::token::Token; use chumsky::{prelude::*, Stream}; use report::Report; use report::{report_of_char_error, report_of_token_error}; use token::lexer; #[derive(Clone, Debug, PartialEq)] pub enum Lit { Num(String), List(Vec<Self>), } #[derive(Clone, Debug, PartialEq)] pub enum Sym { CombS, CombK, CombD, CombI, Map, Iota, Len, Reduce, Filter, Neg, And, Or, Eq, Add, Literal(Lit), Var(u32), Lambda(Vec<Self>), } impl Sym { pub fn lambda<I: IntoIterator<Item = Option<Self>>>(inner: I) -> Option<Self> { Some(Self::Lambda(inner.into_iter().collect::<Option<Vec<_>>>()?)) } } fn literal() -> impl Parser<Token, Lit, Error = Simple<Token>> { use Token::*;
fn parser() -> impl Parser<Token, Option<Vec<Sym>>, Error = Simple<Token>> { use token::Dir::*; use Token::*; let var = filter_map(|span, tok| match tok { Var(i) => Ok(Sym::Var(i)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); let lit = literal().map(Sym::Literal); recursive(|instr| { instr .delimited_by(Brace(L), Brace(R)) .map(|v| Sym::lambda(v)) .or(just(Ident("K".to_string())) .to(Sym::CombK) .or(just(Ident("S".to_string())).to(Sym::CombS)) .or(just(Ident("I".to_string())).to(Sym::CombI)) .or(just(Ident("D".to_string())).to(Sym::CombD)) .or(just(Op("i".to_string())).to(Sym::Iota)) .or(just(Op("$".to_string())).to(Sym::Map)) .or(just(Op("+".to_string())).to(Sym::Add)) .or(just(Op("#".to_string())).to(Sym::Len)) .or(just(Op("=".to_string())).to(Sym::Eq)) .or(just(Op("/".to_string())).to(Sym::Reduce)) .or(just(Op("&".to_string())).to(Sym::And)) .or(just(Op("|".to_string())).to(Sym::Or)) .or(just(Op("!".to_string())).to(Sym::Neg)) .or(just(Op("\\".to_string())).to(Sym::Filter)) .or(lit) .or(var) .map(Some)) .recover_with(nested_delimiters(Brace(L), Brace(R), [], |_| None)) .repeated() }) .map(|v| v.into_iter().collect::<Option<Vec<_>>>()) } pub fn parse(src_id: impl Into<String>, input: &str) -> (Option<Vec<Sym>>, Vec<Report>) { let src_id = src_id.into(); let slen = input.len(); let (tokens, tokerr) = lexer().then_ignore(end()).parse_recovery(input); let tokerr = tokerr.into_iter().map({ let src_id = src_id.clone(); move |err| report_of_char_error(src_id.clone(), err) }); if let Some(tokens) = tokens { let (instrs, err) = parser() .then_ignore(end()) .parse_recovery(Stream::from_iter( slen..slen + 1, tokens.into_iter().map(Into::into), )); let tokerr = tokerr .chain( err.into_iter() .map(move |err| report_of_token_error(src_id.clone(), err)), ) .collect(); if let Some(Some(instrs)) = instrs { (Some(instrs), tokerr) } else { (None, tokerr) } } else { (None, tokerr.collect()) } } #[cfg(test)] mod tests { use super::*; use Sym::*; macro_rules! assert_parse { ($input:expr, [$($e:expr),*]) => { { use ariadne::Source; let input = $input; let (res, err) = parse("<test>", input); for report in err { report.eprint(("<test>".into(), Source::from(input))).unwrap(); } assert_eq!(res, Some(vec![$($e),*])); } }; } #[test] fn test_ski() { assert_parse!("S K I", [CombS, CombK, CombI]); } #[test] fn test_lambda() { assert_parse!("{D w1 I}", [Lambda(vec![CombD, Var(1), CombI])]) } #[test] fn test_nested_lambda() { assert_parse!( "{D {+ w1 w2} I}", [Lambda(vec![ CombD, Lambda(vec![Add, Var(1), Var(2)]), CombI ])] ); } }
use token::Dir::*; let int = filter_map(|span, tok| match tok { Num(n) => Ok(Lit::Num(n)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); recursive(|lit| lit.repeated().at_least(1).delimited_by(Bracket(L), Bracket(R)).map(Lit::List).or(int)) }
function_block-function_prefix_line
[ { "content": "/// Check that an ULP program is well formed and returns its associated\n\n/// computation tree\n\npub fn check(mut prog: Vec<Sym>) -> Result<ComputationTree, &'static str> {\n\n if prog.len() == 0 {\n\n Err(\"No symbols\")\n\n } else {\n\n prog.reverse();\n\n linear_che...
Rust
src/resolver/storage.rs
casper-ecosystem/caspiler
69874a86537fb6f1a138f03e382686f03e46751e
use num_bigint::BigInt; use num_traits::FromPrimitive; use num_traits::One; use num_traits::Zero; use super::cfg::{ControlFlowGraph, Instr, Vartable}; use super::expression::{cast, expression, Expression}; use output::Output; use parser::pt; use resolver; pub fn array_offset( loc: &pt::Loc, start: Expression, index: Expression, elem_ty: resolver::Type, ns: &resolver::Namespace, ) -> Expression { let elem_size = elem_ty.storage_slots(ns); if elem_size == BigInt::one() { Expression::Add(*loc, Box::new(start), Box::new(index)) } else if (elem_size.clone() & (elem_size.clone() - BigInt::one())) == BigInt::zero() { Expression::ShiftLeft( *loc, Box::new(start), Box::new(Expression::ShiftLeft( *loc, Box::new(index), Box::new(Expression::NumberLiteral( *loc, 256, BigInt::from_usize(elem_size.bits()).unwrap(), )), )), ) } else { Expression::Add( *loc, Box::new(start), Box::new(Expression::Multiply( *loc, Box::new(index), Box::new(Expression::NumberLiteral(*loc, 256, elem_size)), )), ) } } pub fn delete( loc: &pt::Loc, var: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?; let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, "cannot use ‘delete’ in constant expression".to_string(), )); return Err(()); } }; if let resolver::Type::StorageRef(ty) = &var_ty { if ty.is_mapping() { errors.push(Output::error( *loc, "‘delete’ cannot be applied to mapping type".to_string(), )); return Err(()); } cfg.writes_contract_storage = true; cfg.add( tab, Instr::ClearStorage { ty: ty.as_ref().clone(), storage: var_expr, }, ); } else { errors.push(Output::error( *loc, "argument to ‘delete’ should be storage reference".to_string(), )); return Err(()); } Ok((Expression::Poison, resolver::Type::Undef)) } pub fn array_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if args.len() > 1 { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let elem_ty = ty.storage_deref(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.writes_contract_storage = true; cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, length_pos), elem_ty.clone(), ns, ), }, ); if args.len() == 1 { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; let pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: pos, expr: cast( &args[0].loc(), val_expr, &val_ty, &elem_ty.deref(), true, ns, errors, )?, }, ); cfg.add( tab, Instr::SetStorage { ty: elem_ty.clone(), local: pos, storage: Expression::Variable(*loc, entry_pos), }, ); } let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Add( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); if args.is_empty() { Ok((Expression::Variable(*loc, entry_pos), elem_ty)) } else { Ok((Expression::Poison, resolver::Type::Undef)) } } pub fn array_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let empty_array = cfg.new_basic_block("empty_array".to_string()); let has_elements = cfg.new_basic_block("has_elements".to_string()); cfg.writes_contract_storage = true; cfg.add( tab, Instr::BranchCond { cond: Expression::Equal( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::zero())), ), true_: empty_array, false_: has_elements, }, ); cfg.set_basic_block(empty_array); cfg.add(tab, Instr::AssertFailure { expr: None }); cfg.set_basic_block(has_elements); let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Subtract( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); let elem_ty = ty.storage_deref().deref().clone(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, new_length), elem_ty.clone(), ns, ), }, ); let res_pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: res_pos, expr: Expression::StorageLoad( *loc, elem_ty.clone(), Box::new(Expression::Variable(*loc, entry_pos)), ), }, ); cfg.add( tab, Instr::ClearStorage { ty: elem_ty.clone(), storage: Expression::Variable(*loc, entry_pos), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); Ok((Expression::Variable(*loc, res_pos), elem_ty)) } pub fn bytes_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; cfg.writes_contract_storage = true; let val = match args.len() { 0 => Expression::NumberLiteral(*loc, 8, BigInt::zero()), 1 => { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; cast( &args[0].loc(), val_expr, &val_ty, &resolver::Type::Bytes(1), true, ns, errors, )? } _ => { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } }; if args.is_empty() { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Bytes(1), )) } else { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Undef, )) } } pub fn bytes_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { cfg.writes_contract_storage = true; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } Ok(( Expression::StorageBytesPop(*loc, Box::new(var_expr)), resolver::Type::Bytes(1), )) } pub fn mapping_subscript( loc: &pt::Loc, mapping: Expression, mapping_ty: &resolver::Type, index: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (key_ty, value_ty) = match mapping_ty.deref() { resolver::Type::Mapping(k, v) => (k, v), _ => unreachable!(), }; let (index_expr, index_ty) = expression(index, cfg, contract_no, ns, vartab, errors)?; let index_expr = cast( &index.loc(), index_expr, &index_ty, key_ty, true, ns, errors, )?; let slot_ty = resolver::Type::Uint(256); let index_ty = if let resolver::Type::Enum(n) = index_ty { ns.enums[n].ty.clone() } else { index_ty }; let slot = Expression::Keccak256(*loc, vec![(mapping, slot_ty), (index_expr, index_ty)]); Ok((slot, resolver::Type::StorageRef(value_ty.clone()))) }
use num_bigint::BigInt; use num_traits::FromPrimitive; use num_traits::One; use num_traits::Zero; use super::cfg::{ControlFlowGraph, Instr, Vartable}; use super::expression::{cast, expression, Expression}; use output::Output; use parser::pt; use resolver; pub fn array_offset( loc: &pt::Loc, start: Expression, index: Expression, elem_ty: resolver::Type, ns: &resolver::Namespace, ) -> Expression { let elem_size = elem_ty.storage_slots(ns); if elem_size == BigInt::one() { Expression::Add(*loc, Box::new(start), Box::new(index)) } else if (elem_size.clone() & (elem_size.clone() - BigInt::one())) == BigInt::zero() { Expression::ShiftLeft( *loc, Box::new(start), Box::new(Expression::ShiftLeft( *loc, Box::new(index), Box::new(Expression::NumberLiteral( *loc, 256, BigInt::from_usize(elem_size.bits()).unwrap(), )), )), ) } else { Expression::Add( *loc, Box::new(start), Box::new(Expression::Multiply( *loc, Box::new(index), Box::new(Expression::NumberLiteral(*loc, 256, elem_size)), )), ) } } pub fn delete( loc: &pt::Loc, var: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?; let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, "cannot use ‘delete’ in constant expression".to_string(), )); return Err(()); } }; if let resolver::Type::StorageRef(ty) = &var_ty { if ty.is_mapping() { errors.push(Output::error( *loc, "‘delete’ cannot be applied to mapping type".to_string(), )); return Err(()); } cfg.writes_contract_storage = true; cfg.add( tab, Instr::ClearStorage { ty: ty.as_ref().clone(), storage: var_expr, }, ); } else { errors.push(Output::error( *loc, "argument to ‘delete’ should be storage reference".to_string(), )); return Err(()); } Ok((Expression::Poison, resolver::Type::Undef)) } pub fn array_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if args.len() > 1 { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let elem_ty = ty.storage_deref(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.writes_contract_storage = true; cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, length_pos), elem_ty.clone(), ns, ), }, ); if args.len() == 1 { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; let pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: pos, expr: cast( &args[0].loc(), val_expr, &val_ty, &elem_ty.deref(), true, ns, errors, )?, }, ); cfg.add( tab, Instr::SetStorage { ty: elem_ty.clone(), local: pos, storage: Expression::Variable(*loc, entry_pos), }, ); } let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Add( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); if args.is_empty() { Ok((Expression::Variable(*loc, entry_pos), elem_ty)) } else { Ok((Expression::Poison, resolver::Type::Undef)) } } pub fn array_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let empty_array = cfg.new_basic_block("empty_array".to_string()); let has_elements = cfg.new_basic_block("has_elements".to_string()); cfg.writes_contract_storage = true; cfg.add( tab, Instr::BranchCond { cond: Expression::Equal( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::zero())), ), true_: empty_array, false_: has_elements, }, ); cfg.set_basic_block(empty_array); cfg.add(tab, Instr::AssertFailure { expr: None }); cfg.set_basic_block(has_elements); let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Subtract( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); let elem_ty = ty.storage_deref().deref().clone(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, new_length), elem_ty.clone(), ns, ), }, ); let res_pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: res_pos, expr: Expression::StorageLoad( *loc, elem_ty.clone(), Box::new(Expression::Variable(*loc, entry_pos)), ), }, ); cfg.add( tab, Instr::ClearStorage { ty: elem_ty.clone(), storage: Expression::Variable(*loc, entry_pos), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); Ok((Expression::Variable(*loc, res_pos), elem_ty)) } pub fn bytes_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; cfg.writes_contract_storage = true; let val = match args.len() { 0 => Expression::NumberLiteral(*loc, 8, BigInt::zero()), 1 => { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; cast( &args[0].loc(), val_expr, &val_ty, &resolver::Type::Bytes(1), true, ns, errors, )? } _ => { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } };
} pub fn bytes_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { cfg.writes_contract_storage = true; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } Ok(( Expression::StorageBytesPop(*loc, Box::new(var_expr)), resolver::Type::Bytes(1), )) } pub fn mapping_subscript( loc: &pt::Loc, mapping: Expression, mapping_ty: &resolver::Type, index: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (key_ty, value_ty) = match mapping_ty.deref() { resolver::Type::Mapping(k, v) => (k, v), _ => unreachable!(), }; let (index_expr, index_ty) = expression(index, cfg, contract_no, ns, vartab, errors)?; let index_expr = cast( &index.loc(), index_expr, &index_ty, key_ty, true, ns, errors, )?; let slot_ty = resolver::Type::Uint(256); let index_ty = if let resolver::Type::Enum(n) = index_ty { ns.enums[n].ty.clone() } else { index_ty }; let slot = Expression::Keccak256(*loc, vec![(mapping, slot_ty), (index_expr, index_ty)]); Ok((slot, resolver::Type::StorageRef(value_ty.clone()))) }
if args.is_empty() { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Bytes(1), )) } else { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Undef, )) }
if_condition
[ { "content": "/// Cast from one type to another, which also automatically derefs any Type::Ref() type.\n\n/// if the cast is explicit (e.g. bytes32(bar) then implicit should be set to false.\n\npub fn cast(\n\n loc: &pt::Loc,\n\n expr: Expression,\n\n from: &resolver::Type,\n\n to: &resolver::Type,\...
Rust
src/main.rs
drgmr/t8bar
eb0d8db31b5e7aba58ac439ae06cd6ca6dad01ca
#[macro_use] extern crate log; use std::env; use std::fs::File; use std::io::{copy, Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; use rubrail::ItemId; use rubrail::TTouchbar; use rubrail::Touchbar; use serde::Deserialize; #[derive(Debug, Deserialize)] struct Target { hostname: String, github: String, } fn main() { fruitbasket::create_logger(".t8bar.log", fruitbasket::LogDir::Home, 5, 2).unwrap(); let mut nsapp = fruitbasket::Trampoline::new("t8bar", "t8bar", "com.drgmr.t8bar") .version(env!("CARGO_PKG_VERSION")) .plist_key("LSBackgroundOnly", "1") .build(fruitbasket::InstallDir::Custom("target/".to_string())) .unwrap(); nsapp.set_activation_policy(fruitbasket::ActivationPolicy::Prohibited); let stopper = nsapp.stopper(); let mut touchbar = Touchbar::alloc("t8bar"); setup(&mut touchbar, stopper); nsapp .run(fruitbasket::RunPeriod::Forever) .expect("Failed to launch app"); } fn setup(touchbar: &mut Touchbar, stopper: fruitbasket::FruitStopper) { let targets = targets_from_config(); let mut root_bar = touchbar.create_bar(); let mut button_ids = Vec::<ItemId>::new(); let quit_stopper = stopper.clone(); let quit_button_id = touchbar.create_button( None, Some("Quit"), Box::new(move |_| { info!("Exit requested by user"); quit_stopper.stop(); }), ); button_ids.push(quit_button_id); let image_base_path = PathBuf::from(env::var_os("TMPDIR").unwrap()); for target in targets { info!("Building data for {} - {}", target.hostname, target.github); let filepath = image_base_path .clone() .join(format!("{}.png", target.github)); let mut image_file = File::create(filepath.clone()).unwrap(); let mut request = reqwest::get(&format!("https://github.com/{}.png", target.github)).unwrap(); copy(&mut request, &mut image_file).unwrap(); let image = touchbar.create_image_from_path(filepath.to_str().unwrap()); let hostname = target.hostname.clone(); let target_button_id = touchbar.create_button( Some(&image), None, Box::new(move |_| { info!("Button clicked - hostname: {}", hostname); let child = Command::new("osascript") .stdin(Stdio::piped()) .spawn() .unwrap(); info!("Spawned osascript"); let mut stdin = child.stdin.unwrap(); let script = format!( r#"tell application "Screen Sharing" activate tell application "System Events" keystroke "{}.local" keystroke return delay 1 tell application "System Events" click (radio button 1 of radio group 1 of window 1) of application process "Screen Sharing" keystroke return end tell end tell end tell"#, hostname); info!("Sending script"); stdin.write(&script.as_bytes()).unwrap(); info!("Done"); }), ); touchbar.update_button_width(&target_button_id, 50); button_ids.push(target_button_id); } info!("Done building data for buttons"); touchbar.add_items_to_bar(&mut root_bar, button_ids); touchbar.set_bar_as_root(root_bar); } fn targets_from_config() -> Vec<Target> { let home_path = env::var_os("HOME").unwrap(); let config_path = PathBuf::from(home_path) .join(".config") .join("t8bar") .join("config.json"); info!("Expected config path: {:?}", config_path); let mut file = File::open(config_path).unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); let result: Vec<Target> = serde_json::from_str(&contents).unwrap(); info!("Configuration acquired: {:#?}", result); result }
#[macro_use] extern crate log; use std::env; use std::fs::File; use std::io::{copy, Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; use rubrail::ItemId; use rubrail::TTouchbar; use rubrail::Touchbar; use serde::Deserialize; #[derive(Debug, Deserialize)] struct Target { hostname: String, github: String, } fn main() { fruitbasket::create_logger(".t8bar.log", fruitbasket::LogDir::Home, 5, 2).unwrap(); let mut nsapp = fruitbasket::Trampoline::new("t8bar", "t8bar", "com.drgmr.t8bar") .version(env!("CARGO_PKG_VERSION")) .plist_key("LSBackgroundOnly", "1") .build(fruitbasket::InstallDir::Custom("target/".to_string())) .unwrap(); nsapp.set_activation_policy(fruitbasket::ActivationPolicy::Prohibited); let stopper = nsapp.stopper(); let mut touchbar = Touchbar::alloc("t8bar"); setup(&mut touchbar, stopper); nsapp .run(fruitbasket::RunPeriod::Forever) .expect("Failed to launch app"); } fn setup(touchbar: &mut Touchbar, stopper: fruitbasket::FruitStopper) { let targets = targets_from_config(); let mut root_bar = touchbar.create_bar(); let mut button_ids = Vec::<ItemId>::new(); let quit_stopper = stopper.clone(); let quit_button_id = touchbar.create_button( None, Some("Quit"), Box::new(move |_| { info!("Exit requested by user"); quit_stopper.stop(); }), ); button_ids.push(quit_button_id); let image_base_path = PathBuf::from(env::var_os("TMPDIR").unwrap()); for target in targets { info!("Building data for {} - {}", target.hostname, target.github); let filepath = image_base_path .clone() .join(format!("{}.png", target.github)); let mut image_file = File::create(filepath.clone()).unwrap(); let mut request = reqwest::get(&format!("https://github.com/{}.png", target.github)).unwrap(); copy(&mut request, &mut image_file).unwrap(); let image = touchbar.create_image_from_path(filepath.to_str().unwrap()); let hostname = target.hostname.clone(); let target_button_id = touchbar.create_button( Some(&image), None, Box::new(move |_| { info!("Button clicked - hostname: {}", hostname); let child = Command::new("osascript") .stdin(Stdio::piped()) .spawn() .unwrap(); info!("Spawned osascript"); let mut stdin = child.stdin.unwrap(); let script = format!( r#"tell application "Screen Sharing" activate tell application "System Events" keystroke "{}.local" keystroke return delay 1 tell application "System Events" click (radio button 1 of radio group 1 of window 1) of application process "Screen Sharing" keystroke return end tell end tell end tell"#, hostname); info!("Sending script"); stdin.write(&script.as_bytes()).unwrap(); info!("Done"); }), ); touchbar.update_button_width(&target_button_id, 50); button_ids.push(target_button_id); } info!("Done building data for buttons"); touchbar.add_items_to_bar(&mut root_bar, button_ids); touchbar.set_bar_as_root(root_bar); } fn targets_from_config() -> Vec<Target> { let home_path = env::var_os("HOME").unwrap(); let config_path = PathBuf::from(home_path) .join(".config") .join("t8bar") .join("config.json"); info!("Expected config path: {:?}", config_path); let mut file = File::open(config_path).unwra
p(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); let result: Vec<Target> = serde_json::from_str(&contents).unwrap(); info!("Configuration acquired: {:#?}", result); result }
function_block-function_prefixed
[ { "content": "# t8bar\n\n\n\nA Screen Sharing touch bar utility.\n\n\n\n![Example Image](/media/screenshot.png)\n\n\n\n## Current State\n\n\n\nRelatively simple but arguably bad code is working - this was mostly an\n\nexperiment of building something that's actually useful using Rust. Code\n\nimprovements and b...
Rust
logger/src/lib.rs
graham/trillium
eac4620156275b5eec41744bd2015c26208a3bab
#![forbid(unsafe_code)] #![warn( rustdoc::missing_crate_level_docs, missing_docs, nonstandard_style, unused_qualifications )] /*! Welcome to the trillium logger! */ pub use crate::formatters::{apache_combined, apache_common, dev_formatter}; use std::fmt::Display; use trillium::{async_trait, Conn, Handler, Info}; /** Components with which common log formats can be constructed */ pub mod formatters; /** A configuration option that determines if format will be colorful. The default is [`ColorMode::Auto`], which only enables color if stdout is detected to be a shell terminal (tty). If this detection is incorrect, you can explicitly set it to [`ColorMode::On`] or [`ColorMode::Off`] **Note**: The actual colorization of output is determined by the log formatters, so it is possible for this to be correctly enabled but for the output to have no colored components. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum ColorMode { Auto, On, Off, } impl ColorMode { pub(crate) fn is_enabled(&self) -> bool { match self { ColorMode::Auto => atty::is(atty::Stream::Stdout), ColorMode::On => true, ColorMode::Off => false, } } } impl Default for ColorMode { fn default() -> Self { Self::Auto } } /** Specifies where the logger output should be sent The default is [`Target::Stdout`]. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum Target { /** Send trillium logger output to a log crate backend. See [`log`] for output options */ Logger(log::Level), /** Send trillium logger output to stdout */ Stdout, } impl Target { pub(crate) fn write(&self, data: impl Display) { match self { Target::Logger(level) => { log::log!(*level, "{}", data); } Target::Stdout => { println!("{}", data); } } } } impl Default for Target { fn default() -> Self { Self::Stdout } } /** The interface to format a &[`Conn`] as a [`Display`]-able output In general, the included loggers provide a mechanism for composing these, so top level formats like [`dev_formatter`], [`apache_common`] and [`apache_combined`] are composed in terms of component formatters like [`formatters::method`], [`formatters::ip`], [`formatters::timestamp`], and many others (see [`formatters`] for a full list) When implementing this trait, note that [`Display::fmt`] is called on [`LogFormatter::Output`] _after_ the response has been fully sent, but that the [`LogFormatter::format`] is called _before_ the response has been sent. If you need to perform timing-sensitive calculations that represent the full http cycle, move whatever data is needed to make the calculation into a new type that implements Display, ensuring that it is calculated at the right time. ## Implementations ### Tuples LogFormatter is implemented for all tuples of other LogFormatter types, from 2-26 formatters long. The output of these formatters is concatenated with no space between. ### `&'static str` LogFormatter is implemented for &'static str, allowing for interspersing spaces and other static formatting details into tuples. ```rust use trillium_logger::{Logger, formatters}; let handler = Logger::new() .with_formatter(("-> ", formatters::method, " ", formatters::url)); ``` ### `Fn(&Conn, bool) -> impl Display` LogFormatter is implemented for all functions that conform to this signature. ```rust # use trillium_logger::{Logger, dev_formatter}; # use trillium::Conn; # use std::borrow::Cow; # struct User(String); impl User { fn name(&self) -> &str { &self.0 } } fn user(conn: &Conn, color: bool) -> Cow<'static, str> { match conn.state::<User>() { Some(user) => String::from(user.name()).into(), None => "guest".into() } } let handler = Logger::new().with_formatter((dev_formatter, " ", user)); ``` */ pub trait LogFormatter: Send + Sync + 'static { /** The display type for this formatter For a simple formatter, this will likely be a String, or even better, a lightweight type that implements Display. */ type Output: Display + Send + Sync + 'static; /** Extract Output from this Conn */ fn format(&self, conn: &Conn, color: bool) -> Self::Output; } /** The trillium handler for this crate, and the core type */ pub struct Logger<F> { format: F, color_mode: ColorMode, target: Target, } impl Logger<()> { /** Builds a new logger Defaults: * formatter: [`dev_formatter`] * color mode: [`ColorMode::Auto`] * target: [`Target::Stdout`] */ pub fn new() -> Logger<impl LogFormatter> { Logger { format: dev_formatter, color_mode: ColorMode::Auto, target: Target::Stdout, } } } impl<T> Logger<T> { /** replace the formatter with any type that implements [`LogFormatter`] see the trait documentation for [`LogFormatter`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_color_mode`] ``` use trillium_logger::{Logger, apache_common}; Logger::new().with_formatter(apache_common("-", "-")); ``` */ pub fn with_formatter<Formatter: LogFormatter>( self, formatter: Formatter, ) -> Logger<Formatter> { Logger { format: formatter, color_mode: self.color_mode, target: self.target, } } } impl<F: LogFormatter> Logger<F> { /** specify the color mode for this logger. see [`ColorMode`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, ColorMode}; Logger::new().with_color_mode(ColorMode::On); ``` */ pub fn with_color_mode(mut self, color_mode: ColorMode) -> Self { self.color_mode = color_mode; self } /** specify the logger target see [`Target`] for more details. note that this can be chained with [`Logger::with_color_mode`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, Target}; Logger::new().with_target(Target::Logger(log::Level::Info)); ``` */ pub fn with_target(mut self, target: Target) -> Self { self.target = target; self } } struct LoggerWasRun; #[async_trait] impl<F> Handler for Logger<F> where F: LogFormatter, { async fn init(&mut self, info: &mut Info) { self.target.write(&format!( " 🌱🦀🌱 {} started Listening at {}{} Control-C to quit", info.server_description(), info.listener_description(), info.tcp_socket_addr() .map(|s| format!(" (bound as tcp://{})", s)) .unwrap_or_default(), )); } async fn run(&self, conn: Conn) -> Conn { conn.with_state(LoggerWasRun) } async fn before_send(&self, mut conn: Conn) -> Conn { if conn.state::<LoggerWasRun>().is_some() { let target = self.target; let output = self.format.format(&conn, self.color_mode.is_enabled()); conn.inner_mut().after_send(move |_| target.write(output)); } conn } } pub fn logger() -> Logger<impl LogFormatter> { Logger::new() }
#![forbid(unsafe_code)] #![warn( rustdoc::missing_crate_level_docs, missing_docs, nonstandard_style, unused_qualifications )] /*! Welcome to the trillium logger! */ pub use crate::formatters::{apache_combined, apache_common, dev_formatter}; use std::fmt::Display; use trillium::{async_trait, Conn, Handler, Info}; /** Components with which common log formats can be constructed */ pub mod formatters; /** A configuration option that determines if format will be colorful. The default is [`ColorMode::Auto`], which only enables color if stdout is detected to be a shell terminal (tty). If this detection is incorrect, you can explicitly set it
Self { self.color_mode = color_mode; self } /** specify the logger target see [`Target`] for more details. note that this can be chained with [`Logger::with_color_mode`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, Target}; Logger::new().with_target(Target::Logger(log::Level::Info)); ``` */ pub fn with_target(mut self, target: Target) -> Self { self.target = target; self } } struct LoggerWasRun; #[async_trait] impl<F> Handler for Logger<F> where F: LogFormatter, { async fn init(&mut self, info: &mut Info) { self.target.write(&format!( " 🌱🦀🌱 {} started Listening at {}{} Control-C to quit", info.server_description(), info.listener_description(), info.tcp_socket_addr() .map(|s| format!(" (bound as tcp://{})", s)) .unwrap_or_default(), )); } async fn run(&self, conn: Conn) -> Conn { conn.with_state(LoggerWasRun) } async fn before_send(&self, mut conn: Conn) -> Conn { if conn.state::<LoggerWasRun>().is_some() { let target = self.target; let output = self.format.format(&conn, self.color_mode.is_enabled()); conn.inner_mut().after_send(move |_| target.write(output)); } conn } } pub fn logger() -> Logger<impl LogFormatter> { Logger::new() }
to [`ColorMode::On`] or [`ColorMode::Off`] **Note**: The actual colorization of output is determined by the log formatters, so it is possible for this to be correctly enabled but for the output to have no colored components. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum ColorMode { Auto, On, Off, } impl ColorMode { pub(crate) fn is_enabled(&self) -> bool { match self { ColorMode::Auto => atty::is(atty::Stream::Stdout), ColorMode::On => true, ColorMode::Off => false, } } } impl Default for ColorMode { fn default() -> Self { Self::Auto } } /** Specifies where the logger output should be sent The default is [`Target::Stdout`]. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum Target { /** Send trillium logger output to a log crate backend. See [`log`] for output options */ Logger(log::Level), /** Send trillium logger output to stdout */ Stdout, } impl Target { pub(crate) fn write(&self, data: impl Display) { match self { Target::Logger(level) => { log::log!(*level, "{}", data); } Target::Stdout => { println!("{}", data); } } } } impl Default for Target { fn default() -> Self { Self::Stdout } } /** The interface to format a &[`Conn`] as a [`Display`]-able output In general, the included loggers provide a mechanism for composing these, so top level formats like [`dev_formatter`], [`apache_common`] and [`apache_combined`] are composed in terms of component formatters like [`formatters::method`], [`formatters::ip`], [`formatters::timestamp`], and many others (see [`formatters`] for a full list) When implementing this trait, note that [`Display::fmt`] is called on [`LogFormatter::Output`] _after_ the response has been fully sent, but that the [`LogFormatter::format`] is called _before_ the response has been sent. If you need to perform timing-sensitive calculations that represent the full http cycle, move whatever data is needed to make the calculation into a new type that implements Display, ensuring that it is calculated at the right time. ## Implementations ### Tuples LogFormatter is implemented for all tuples of other LogFormatter types, from 2-26 formatters long. The output of these formatters is concatenated with no space between. ### `&'static str` LogFormatter is implemented for &'static str, allowing for interspersing spaces and other static formatting details into tuples. ```rust use trillium_logger::{Logger, formatters}; let handler = Logger::new() .with_formatter(("-> ", formatters::method, " ", formatters::url)); ``` ### `Fn(&Conn, bool) -> impl Display` LogFormatter is implemented for all functions that conform to this signature. ```rust # use trillium_logger::{Logger, dev_formatter}; # use trillium::Conn; # use std::borrow::Cow; # struct User(String); impl User { fn name(&self) -> &str { &self.0 } } fn user(conn: &Conn, color: bool) -> Cow<'static, str> { match conn.state::<User>() { Some(user) => String::from(user.name()).into(), None => "guest".into() } } let handler = Logger::new().with_formatter((dev_formatter, " ", user)); ``` */ pub trait LogFormatter: Send + Sync + 'static { /** The display type for this formatter For a simple formatter, this will likely be a String, or even better, a lightweight type that implements Display. */ type Output: Display + Send + Sync + 'static; /** Extract Output from this Conn */ fn format(&self, conn: &Conn, color: bool) -> Self::Output; } /** The trillium handler for this crate, and the core type */ pub struct Logger<F> { format: F, color_mode: ColorMode, target: Target, } impl Logger<()> { /** Builds a new logger Defaults: * formatter: [`dev_formatter`] * color mode: [`ColorMode::Auto`] * target: [`Target::Stdout`] */ pub fn new() -> Logger<impl LogFormatter> { Logger { format: dev_formatter, color_mode: ColorMode::Auto, target: Target::Stdout, } } } impl<T> Logger<T> { /** replace the formatter with any type that implements [`LogFormatter`] see the trait documentation for [`LogFormatter`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_color_mode`] ``` use trillium_logger::{Logger, apache_common}; Logger::new().with_formatter(apache_common("-", "-")); ``` */ pub fn with_formatter<Formatter: LogFormatter>( self, formatter: Formatter, ) -> Logger<Formatter> { Logger { format: formatter, color_mode: self.color_mode, target: self.target, } } } impl<F: LogFormatter> Logger<F> { /** specify the color mode for this logger. see [`ColorMode`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, ColorMode}; Logger::new().with_color_mode(ColorMode::On); ``` */ pub fn with_color_mode(mut self, color_mode: ColorMode) ->
random
[ { "content": "pub fn dev_formatter(conn: &Conn, color: bool) -> impl Display + Send + 'static {\n\n (method, \" \", url, \" \", response_time, \" \", status).format(conn, color)\n\n}\n\n\n\n/**\n\nformatter for the peer ip address of the connection\n\n\n\n**note**: this can be modified by handlers prior to l...
Rust
src/parser/record.rs
natir/needletail
3756d79cd3452f178a657387c26114bbbe7a5650
use std::borrow::Cow; use std::io::Write; use memchr::memchr; use crate::errors::ParseError; use crate::parser::fasta::BufferPosition as FastaBufferPosition; use crate::parser::fastq::BufferPosition as FastqBufferPosition; use crate::parser::utils::{Format, LineEnding, Position}; use crate::Sequence; #[derive(Debug, Clone)] enum BufferPositionKind<'a> { Fasta(&'a FastaBufferPosition), Fastq(&'a FastqBufferPosition), } #[derive(Debug, Clone)] pub struct SequenceRecord<'a> { buffer: &'a [u8], buf_pos: BufferPositionKind<'a>, position: &'a Position, line_ending: LineEnding, } impl<'a> SequenceRecord<'a> { pub(crate) fn new_fasta( buffer: &'a [u8], buf_pos: &'a FastaBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fasta(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } pub(crate) fn new_fastq( buffer: &'a [u8], buf_pos: &'a FastqBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fastq(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } #[inline] pub fn format(&self) -> Format { match self.buf_pos { BufferPositionKind::Fasta(_) => Format::Fasta, BufferPositionKind::Fastq(_) => Format::Fastq, } } #[inline] pub fn id(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.id(&self.buffer), BufferPositionKind::Fastq(bp) => bp.id(&self.buffer), } } #[inline] pub fn raw_seq(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.raw_seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer), } } pub fn seq(&self) -> Cow<[u8]> { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer).into(), } } #[inline] pub fn qual(&self) -> Option<&[u8]> { match self.buf_pos { BufferPositionKind::Fasta(_) => None, BufferPositionKind::Fastq(bp) => Some(bp.qual(&self.buffer)), } } #[inline] pub fn all(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.all(&self.buffer), BufferPositionKind::Fastq(bp) => bp.all(&self.buffer), } } #[inline] pub fn num_bases(&self) -> usize { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.num_bases(&self.buffer), BufferPositionKind::Fastq(bp) => bp.num_bases(&self.buffer), } } pub fn start_line_number(&self) -> u64 { self.position.line } pub fn line_ending(&self) -> LineEnding { self.line_ending } pub fn write( &self, writer: &mut dyn Write, forced_line_ending: Option<LineEnding>, ) -> Result<(), ParseError> { match self.buf_pos { BufferPositionKind::Fasta(_) => write_fasta( self.id(), self.raw_seq(), writer, forced_line_ending.unwrap_or(self.line_ending), ), BufferPositionKind::Fastq(_) => write_fastq( self.id(), self.raw_seq(), self.qual(), writer, forced_line_ending.unwrap_or(self.line_ending), ), } } } impl<'a> Sequence<'a> for SequenceRecord<'a> { fn sequence(&'a self) -> &'a [u8] { self.raw_seq() } } pub fn mask_header_tabs(id: &[u8]) -> Option<Vec<u8>> { memchr(b'\t', id).map(|_| { id.iter() .map(|x| if *x == b'\t' { b'|' } else { *x }) .collect() }) } pub fn mask_header_utf8(id: &[u8]) -> Option<Vec<u8>> { match String::from_utf8_lossy(id) { Cow::Owned(s) => Some(s.into_bytes()), Cow::Borrowed(_) => None, } } pub fn write_fasta( id: &[u8], seq: &[u8], writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b">")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; Ok(()) } pub fn write_fastq( id: &[u8], seq: &[u8], qual: Option<&[u8]>, writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b"@")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; writer.write_all(b"+")?; writer.write_all(&ending)?; if let Some(qual) = qual { writer.write_all(&qual)?; } else { writer.write_all(&vec![b'I'; seq.len()])?; } writer.write_all(&ending)?; Ok(()) }
use std::borrow::Cow; use std::io::Write; use memchr::memchr; use crate::errors::ParseError; use crate::parser::fasta::BufferPosition as FastaBufferPosition; use crate::parser::fastq::BufferPosition as FastqBufferPosition; use crate::parser::utils::{Format, LineEnding, Position}; use crate::Sequence; #[derive(Debug, Clone)] enum BufferPositionKind<'a> { Fasta(&'a FastaBufferPosition), Fastq(&'a FastqBufferPosition), } #[derive(Debug, Clone)] pub struct SequenceRecord<'a> { buffer: &'a [u8], buf_pos: BufferPositionKind<'a>, position: &'a Position, line_ending: LineEnding, } impl<'a> SequenceRecord<'a> { pub(crate) fn new_fasta( buffer: &'a [u8], buf_pos: &'a FastaBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fasta(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } pub(crate) fn new_fastq( buffer: &'a [u8], buf_pos: &'a FastqBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fastq(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } #[inline] pub fn format(&self) -> Format { match self.buf_pos { BufferPositionKind::Fasta(_) => Format::Fasta, BufferPositionKind::Fastq(_) => Format::Fastq, } } #[inline] pub fn id(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.id(&self.buffer), BufferPositionKind::Fastq(bp) => bp.id(&self.buffer), } } #[inline] pub fn raw_seq(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.raw_seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer), } } pub fn seq(&self) -> Cow<[u8]> { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer).into(), } } #[inline] pub fn qual(&self) -> Option<&[u8]> { match self.buf_pos { BufferPositionKind::Fasta(_) => None, BufferPositionKind::Fastq(bp) => Some(bp.qual(&self.buffer)), } } #[inline] pub fn all(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.all(&self.buffer), BufferPositionKind::Fastq(bp) => bp.all(&self.buffer), } } #[inline] pub fn num_bases(&self) -> usize {
} pub fn start_line_number(&self) -> u64 { self.position.line } pub fn line_ending(&self) -> LineEnding { self.line_ending } pub fn write( &self, writer: &mut dyn Write, forced_line_ending: Option<LineEnding>, ) -> Result<(), ParseError> { match self.buf_pos { BufferPositionKind::Fasta(_) => write_fasta( self.id(), self.raw_seq(), writer, forced_line_ending.unwrap_or(self.line_ending), ), BufferPositionKind::Fastq(_) => write_fastq( self.id(), self.raw_seq(), self.qual(), writer, forced_line_ending.unwrap_or(self.line_ending), ), } } } impl<'a> Sequence<'a> for SequenceRecord<'a> { fn sequence(&'a self) -> &'a [u8] { self.raw_seq() } } pub fn mask_header_tabs(id: &[u8]) -> Option<Vec<u8>> { memchr(b'\t', id).map(|_| { id.iter() .map(|x| if *x == b'\t' { b'|' } else { *x }) .collect() }) } pub fn mask_header_utf8(id: &[u8]) -> Option<Vec<u8>> { match String::from_utf8_lossy(id) { Cow::Owned(s) => Some(s.into_bytes()), Cow::Borrowed(_) => None, } } pub fn write_fasta( id: &[u8], seq: &[u8], writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b">")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; Ok(()) } pub fn write_fastq( id: &[u8], seq: &[u8], qual: Option<&[u8]>, writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b"@")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; writer.write_all(b"+")?; writer.write_all(&ending)?; if let Some(qual) = qual { writer.write_all(&qual)?; } else { writer.write_all(&vec![b'I'; seq.len()])?; } writer.write_all(&ending)?; Ok(()) }
match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.num_bases(&self.buffer), BufferPositionKind::Fastq(bp) => bp.num_bases(&self.buffer), }
if_condition
[ { "content": "/// Find the lexigraphically smallest substring of `seq` of length `length`\n\n///\n\n/// There's probably a faster algorithm for this somewhere...\n\npub fn minimizer(seq: &[u8], length: usize) -> Cow<[u8]> {\n\n let reverse_complement: Vec<u8> = seq.iter().rev().map(|n| complement(*n)).collec...
Rust
client/src/utils/hd.rs
huhn511/stronghold.rs
85920e55eb05e50520795a67c533ec52f08fd10b
use crypto::{ed25519::SecretKey, macs::hmac::HMAC_SHA512}; use std::convert::TryFrom; #[derive(Debug)] pub enum Error { NotSupported, InvalidLength(usize), CryptoError(crypto::Error), } pub struct Seed(Vec<u8>); impl Seed { pub fn from_bytes(bs: &[u8]) -> Self { Self(bs.to_vec()) } pub fn to_master_key(&self) -> Key { let mut I = [0; 64]; HMAC_SHA512(&self.0, b"ed25519 seed", &mut I); Key(I) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { self.to_master_key().derive(chain) } } type ChainCode = [u8; 32]; #[derive(Copy, Clone, Debug)] pub struct Key([u8; 64]); impl Key { pub fn secret_key(&self) -> Result<SecretKey, Error> { let mut I_l = [0; 32]; I_l.copy_from_slice(&self.0[..32]); SecretKey::from_le_bytes(I_l).map_err(Error::CryptoError) } pub fn chain_code(&self) -> ChainCode { let mut I_r = [0; 32]; I_r.copy_from_slice(&self.0[32..]); I_r } pub fn child_key(&self, segment: &Segment) -> Result<Key, Error> { if !segment.hardened { return Err(Error::NotSupported); } let mut data = [0u8; 1 + 32 + 4]; data[1..1 + 32].copy_from_slice(&self.0[..32]); data[1 + 32..1 + 32 + 4].copy_from_slice(&segment.bs); let mut I = [0; 64]; HMAC_SHA512(&data, &self.0[32..], &mut I); Ok(Self(I)) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { let mut k = *self; for c in &chain.0 { k = k.child_key(c)?; } Ok(k) } } impl TryFrom<&[u8]> for Key { type Error = Error; fn try_from(bs: &[u8]) -> Result<Self, Self::Error> { if bs.len() != 64 { return Err(Error::InvalidLength(bs.len())); } let mut ds = [0; 64]; ds.copy_from_slice(bs); Ok(Self(ds)) } } #[derive(Debug, Clone)] pub struct Segment { hardened: bool, bs: [u8; 4], } impl Segment { pub fn from_u32(i: u32) -> Self { Self { hardened: i >= Self::HARDEN_MASK, bs: i.to_be_bytes(), } } pub const HARDEN_MASK: u32 = 1 << 31; } #[derive(Default, Debug, Clone)] pub struct Chain(Vec<Segment>); impl Chain { pub fn empty() -> Self { Self(vec![]) } pub fn from_u32<I: IntoIterator<Item = u32>>(is: I) -> Self { Self(is.into_iter().map(Segment::from_u32).collect()) } pub fn from_u32_hardened<I: IntoIterator<Item = u32>>(is: I) -> Self { Self::from_u32(is.into_iter().map(|i| Segment::HARDEN_MASK | i)) } } impl Into<Vec<u8>> for Key { fn into(self) -> Vec<u8> { self.0.to_vec() } } #[cfg(test)] mod tests { use super::*; struct TestChain { chain: Chain, chain_code: &'static str, private_key: &'static str, } struct TestVector { seed: &'static str, master_chain_code: &'static str, master_private_key: &'static str, chains: Vec<TestChain>, } #[test] fn ed25519_test_vectors() -> Result<(), Error> { let tvs = [ TestVector { seed: "000102030405060708090a0b0c0d0e0f", master_chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", master_private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", chains: vec![ TestChain { chain: Chain::empty(), chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", }, TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "8b59aa11380b624e81507a27fedda59fea6d0b779a778918a2fd3590e16e9c69", private_key: "68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1]), chain_code: "a320425f77d1b5c2505a6b1b27382b37368ee640e3557c315416801243552f14", private_key: "b1d0bad404bf35da785a64ca1ac54b2617211d2777696fbffaf208f746ae84f2", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2]), chain_code: "2e69929e00b5ab250f49c3fb1c12f252de4fed2c1db88387094a0f8c4c9ccd6c", private_key: "92a5b23c0b8a99e37d07df3fb9966917f5d06e02ddbd909c7e184371463e9fc9", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2, 1000000000]), chain_code: "68789923a0cac2cd5a29172a475fe9e0fb14cd6adb5ad98a3fa70333e7afa230", private_key: "8f94d394a8e8fd6b1bc2f3f49f5c47e385281d5c17e65324b0f62483e37e8793", }, ], }, TestVector { seed: "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542", master_chain_code: "ef70a74db9c3a5af931b5fe73ed8e1a53464133654fd55e7a66f8570b8e33c3b", master_private_key: "171cb88b1b3c1db25add599712e36245d75bc65a1a5c9e18d76f9f2b1eab4012", chains: vec![ TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "0b78a3226f915c082bf118f83618a618ab6dec793752624cbeb622acb562862d", private_key: "1559eb2bbec5790b0c65d8693e4d0875b1747f4970ae8b650486ed7470845635", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647]), chain_code: "138f0b2551bcafeca6ff2aa88ba8ed0ed8de070841f0c4ef0165df8181eaad7f", private_key: "ea4f5bfe8694d8bb74b7b59404632fd5968b774ed545e810de9c32a4fb4192f4", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1]), chain_code: "73bd9fff1cfbde33a1b846c27085f711c0fe2d66fd32e139d3ebc28e5a4a6b90", private_key: "3757c7577170179c7868353ada796c839135b3d30554bbb74a4b1e4a5a58505c", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646]), chain_code: "0902fe8a29f9140480a00ef244bd183e8a13288e4412d8389d140aac1794825a", private_key: "5837736c89570de861ebc173b1086da4f505d4adb387c6a1b1342d5e4ac9ec72", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646, 2]), chain_code: "5d70af781f3a37b829f0d060924d5e960bdc02e85423494afc0b1a41bbe196d4", private_key: "551d333177df541ad876a60ea71f00447931c0a9da16f227c11ea080d7391b8d", }, ], } ]; for tv in &tvs { let seed = Seed::from_bytes(&hex::decode(tv.seed).unwrap()); let m = seed.to_master_key(); let mut expected_master_chain_code = [0u8; 32]; hex::decode_to_slice(&tv.master_chain_code, &mut expected_master_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_master_chain_code, m.chain_code()); let mut expected_master_private_key = [0u8; 32]; hex::decode_to_slice(&tv.master_private_key, &mut expected_master_private_key as &mut [u8]).unwrap(); assert_eq!(expected_master_private_key, m.secret_key()?.to_le_bytes()); for c in tv.chains.iter() { let ck = seed.derive(&c.chain)?; let mut expected_chain_code = [0u8; 32]; hex::decode_to_slice(&c.chain_code, &mut expected_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_chain_code, ck.chain_code()); let mut expected_private_key = [0u8; 32]; hex::decode_to_slice(&c.private_key, &mut expected_private_key as &mut [u8]).unwrap(); assert_eq!(expected_private_key, ck.secret_key()?.to_le_bytes()); } } Ok(()) } }
use crypto::{ed25519::SecretKey, macs::hmac::HMAC_SHA512}; use std::convert::TryFrom; #[derive(Debug)] pub enum Error { NotSupported, InvalidLength(usize), CryptoError(crypto::Error), } pub struct Seed(Vec<u8>); impl Seed { pub fn from_bytes(bs: &[u8]) -> Self { Self(bs.to_vec()) } pub fn to_master_key(&self) -> Key { let mut I = [0; 64]; HMAC_SHA512(&self.0, b"ed25519 seed", &mut I); Key(I) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { self.to_master_key().derive(chain) } } type ChainCode = [u8; 32]; #[derive(Copy, Clone, Debug)] pub struct Key([u8; 64]); impl Key { pub fn secret_key(&self) -> Result<SecretKey, Error> { let mut I_l = [0; 32]; I_l.copy_from_slice(&self.0[..32]); SecretKey::from_le_bytes(I_l).map_err(Error::CryptoError) } pub fn chain_code(&self) -> ChainCode { let mut I_r = [0; 32]; I_r.copy_from_slice(&self.0[32..]); I_r } pub fn child_key(&self, segment: &Segment) -> Result<Key, Error> { if !segment.hardened { return Err(Error::NotSupported); } let mut data = [0u8; 1 + 32 + 4]; data[1..1 + 32].copy_from_slice(&self.0[..32]); data[1 + 32..1 + 32 + 4].copy_from_slice(&segment.bs); let mut I = [0; 64]; HMAC_SHA512(&data, &self.0[32..], &mut I); Ok(Self(I)) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { let mut k = *self; for c in &chain.0 { k = k.child_key(c)?; } Ok(k) } } impl TryFrom<&[u8]> for Key { type Error = Error; fn try_from(bs: &[u8]) -> Result<Self, Self::Error> { if bs.len() != 64 { return Err(Error::InvalidLength(bs.len())); } let mut ds = [0; 64]; ds.copy_from_slice(bs); Ok(Self(ds)) } } #[derive(Debug, Clone)] pub struct Segment { hardened: bool, bs: [u8; 4], } impl Segment { pub fn from_u32(i: u32) -> Self { Self { hardened: i >= Self::HARDEN_MASK, bs: i.to_be_bytes(), } } pub const HARDEN_MASK: u32 = 1 << 31; } #[derive(Default, Debug, Clone)] pub struct Chain(Vec<Segment>); impl Chain { pub fn empty() -> Self { Self(vec![]) } pub fn from_u32<I: IntoIterator<Item = u32>>(is: I) -> Self { Self(is.into_iter().map(Segment::from_u32).collect()) } pub fn from_u32_hardened<I: IntoIterator<Item = u32>>(is: I) -> Self { Self::from_u32(is.into_iter().map(|i| Segment::HARDEN_MASK | i)) } } impl Into<Vec<u8>> for Key { fn into(self) -> Vec<u8> { self.0.to_vec() } } #[cfg(test)] mod tests { use super::*; struct TestChain { chain: Chain, chain_code: &'static str, private_key: &'static str, } struct TestVector { seed: &'static str, master_chain_code: &'static str, master_private_key: &'static str, chains: Vec<TestChain>, } #[test] fn ed25519_test_vectors() -> Result<(), Error> { let tvs = [ TestVector { seed: "000102030405060708090a0b0c0d0e0f", master_chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", master_private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", chains: vec![ TestChain { chain: Chain::empty(), chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", }, TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "8b59aa11380b624e81507a27fedda59fea6d0b779a778918a2fd3590e16e9c69", private_key: "68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1]), chain_code: "a320425f77d1b5c2505a6b1b27382b37368ee640e3557c315416801243552f14",
}
private_key: "b1d0bad404bf35da785a64ca1ac54b2617211d2777696fbffaf208f746ae84f2", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2]), chain_code: "2e69929e00b5ab250f49c3fb1c12f252de4fed2c1db88387094a0f8c4c9ccd6c", private_key: "92a5b23c0b8a99e37d07df3fb9966917f5d06e02ddbd909c7e184371463e9fc9", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2, 1000000000]), chain_code: "68789923a0cac2cd5a29172a475fe9e0fb14cd6adb5ad98a3fa70333e7afa230", private_key: "8f94d394a8e8fd6b1bc2f3f49f5c47e385281d5c17e65324b0f62483e37e8793", }, ], }, TestVector { seed: "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542", master_chain_code: "ef70a74db9c3a5af931b5fe73ed8e1a53464133654fd55e7a66f8570b8e33c3b", master_private_key: "171cb88b1b3c1db25add599712e36245d75bc65a1a5c9e18d76f9f2b1eab4012", chains: vec![ TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "0b78a3226f915c082bf118f83618a618ab6dec793752624cbeb622acb562862d", private_key: "1559eb2bbec5790b0c65d8693e4d0875b1747f4970ae8b650486ed7470845635", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647]), chain_code: "138f0b2551bcafeca6ff2aa88ba8ed0ed8de070841f0c4ef0165df8181eaad7f", private_key: "ea4f5bfe8694d8bb74b7b59404632fd5968b774ed545e810de9c32a4fb4192f4", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1]), chain_code: "73bd9fff1cfbde33a1b846c27085f711c0fe2d66fd32e139d3ebc28e5a4a6b90", private_key: "3757c7577170179c7868353ada796c839135b3d30554bbb74a4b1e4a5a58505c", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646]), chain_code: "0902fe8a29f9140480a00ef244bd183e8a13288e4412d8389d140aac1794825a", private_key: "5837736c89570de861ebc173b1086da4f505d4adb387c6a1b1342d5e4ac9ec72", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646, 2]), chain_code: "5d70af781f3a37b829f0d060924d5e960bdc02e85423494afc0b1a41bbe196d4", private_key: "551d333177df541ad876a60ea71f00447931c0a9da16f227c11ea080d7391b8d", }, ], } ]; for tv in &tvs { let seed = Seed::from_bytes(&hex::decode(tv.seed).unwrap()); let m = seed.to_master_key(); let mut expected_master_chain_code = [0u8; 32]; hex::decode_to_slice(&tv.master_chain_code, &mut expected_master_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_master_chain_code, m.chain_code()); let mut expected_master_private_key = [0u8; 32]; hex::decode_to_slice(&tv.master_private_key, &mut expected_master_private_key as &mut [u8]).unwrap(); assert_eq!(expected_master_private_key, m.secret_key()?.to_le_bytes()); for c in tv.chains.iter() { let ck = seed.derive(&c.chain)?; let mut expected_chain_code = [0u8; 32]; hex::decode_to_slice(&c.chain_code, &mut expected_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_chain_code, ck.chain_code()); let mut expected_private_key = [0u8; 32]; hex::decode_to_slice(&c.private_key, &mut expected_private_key as &mut [u8]).unwrap(); assert_eq!(expected_private_key, ck.secret_key()?.to_le_bytes()); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Read ciphertext from the input, decrypts it using the specified key and the associated data\n\n/// specified during encryption and returns the plaintext\n\npub fn read<I: Read>(input: &mut I, key: &Key, associated_data: &[u8]) -> crate::Result<Vec<u8>> {\n\n // check the header\n\n check...
Rust
src/binary_heap.rs
acodercat/rust-algorithms
06db2526fa8709886a2baf2467f720c62076720d
use std::fmt::Debug; use std::cmp::PartialOrd; #[derive(Debug)] pub struct BinaryHeap <T> { container: Vec<T>, } impl <T: Debug + PartialOrd> BinaryHeap <T> { pub fn new() -> Self { return BinaryHeap { container: Vec::new() }; } pub fn from(vec: Vec<T>) -> BinaryHeap<T> { let mut heap = BinaryHeap { container: vec }; for i in (0 ..= heap.calculate_parent_index_of_tail()).rev() { heap.shift_down(i); } return heap; } pub fn push(&mut self, element: T) { self.container.push(element); self.shift_up(self.tail_index()); } pub fn len(&self)-> usize { return self.container.len(); } pub fn tail_index(&self)-> usize { return self.len() - 1; } pub fn capacity(self) -> usize { return self.container.capacity(); } pub fn is_empty(self) -> bool { return self.container.is_empty(); } pub fn peek(&self) -> Option<&T> { return self.container.first(); } pub fn extract(&mut self) -> Option<T> { let tail_index = self.tail_index(); let head_index = 0; self.container.swap(head_index, tail_index); let root= self.container.pop(); self.shift_down(head_index); return root; } fn shift_down(&mut self, mut current_index: usize) { let mut found_child_index; let mut left_child_index_of_current = Self::calculate_left_child_index(current_index); while left_child_index_of_current < self.len() { let right_child_index_of_current = Self::calculate_right_child_index(current_index); if right_child_index_of_current < self.len() && self.container.get(right_child_index_of_current) > self.container.get(left_child_index_of_current) { found_child_index = right_child_index_of_current; } else { found_child_index = left_child_index_of_current; } if self.container.get(current_index) >= self.container.get(found_child_index) { break; } self.container.swap(current_index, found_child_index); current_index = found_child_index; left_child_index_of_current = Self::calculate_left_child_index(current_index); } } fn shift_up(&mut self, mut current_index: usize) { let mut parent_index_of_current = Self::calculate_parent_index(current_index); while (current_index > 0) && (self.container.get(current_index) > self.container.get(parent_index_of_current)) { self.container.swap(current_index, parent_index_of_current); current_index = parent_index_of_current; parent_index_of_current = Self::calculate_parent_index(current_index); } } fn calculate_left_child_index(index: usize) -> usize { return index * 2 + 1; } fn calculate_right_child_index(index: usize) -> usize { return Self::calculate_left_child_index(index) + 1; } fn calculate_parent_index(index: usize) -> usize { let parent_index = ((index as f32 - 1.0) / 2.0).floor() as usize; if parent_index <= 0 { return 0; } return parent_index; } fn calculate_parent_index_of_tail(&self) -> usize { let tail_index = self.len() - 1; return Self::calculate_parent_index(tail_index); } } #[test] fn test_binary_heap() { let mut heap1:BinaryHeap<i32> = BinaryHeap::from(vec![1,3,4,5]); heap1.push(-21); heap1.push(1); heap1.push(3); heap1.push(190); assert_eq!(heap1.peek(), Some(&190)); assert_eq!(heap1.extract(), Some(190)); assert_eq!(heap1.peek(), Some(&5)); let mut heap2:BinaryHeap<i32> = BinaryHeap::new(); heap2.push(-21); heap2.push(1); heap2.push(3); heap2.push(190); assert_eq!(heap2.peek(), Some(&190)); assert_eq!(heap2.extract(), Some(190)); assert_eq!(heap2.peek(), Some(&3)); }
use std::fmt::Debug; use std::cmp::PartialOrd; #[derive(Debug)] pub struct BinaryHeap <T> { container: Vec<T>, } impl <T: Debug + PartialOrd> BinaryHeap <T> { pub fn new() -> Self { return BinaryHeap { container: Vec::new() }; } pub fn from(vec: Vec<T>) -> BinaryHeap<T> { let mut heap = BinaryHeap { container: vec }; for i in (0 ..= heap.calculate_parent_index_of_tail()).rev() { heap.shift_down(i); } return heap; } pub fn push(&mut self, element: T) { self.container.push(element); self.shift_up(self.tail_index()); } pub fn len(&self)-> usize { return self.container.len(); } pub fn tail_index(&self)-> usize { return self.len() - 1; } pub fn capacity(self) -> usize { return self.container.capacity(); } pub fn is_empty(self) -> bool { return self.container.is_empty(); } pub fn peek(&self) -> Option<&T> { return self.container.first(); } pub fn extract(&mut self) -> Option<T> { let tail_index = self.tail_index(); let head_index = 0; self.container.swap(head_index, tail_index); let root= self.container.pop(); self.shift_down(head_index); return root; } fn shift_down(&mut self, mut current_index: usize) { let mut found_child_index; let mut left_child_index_of_current = Self::calculate_left_child_index(current_index); while left_child_index_of_current < self.len() { let right_child_index_of_current = Self::calculate_right_child_index(current_index); if right_child_index_of_current < self.len() && self.container.get(right_child_index_of_current) > self.container.get(left_child_index_of_current) { found_child_index = right_child_index_of_current; } else { found_child_index = left_child_index_of_current; } if self.container.get(current_index) >= self.container.get(found_child_index) { break; } self.container.swap(current_index, found_child_index); current_index = found_child_index; left_child_index_of_current = Self::calculate_left_child_index(current_index); } } fn shift_up(&mut self, mut current_index: usize) { let mut parent_index_of_current = Self::calculate_parent_index(current_index); while (current_index > 0) && (self.container.get(current_index) > self.container.get(parent_index_of_current)) { self.container.swap(current_index, parent_index_of_current); current_index = parent_index_of_current; parent_index_of_current = Self::calculate_parent_index(current_index); } } fn calculate_left_child_index(index: usize) -> usize { return index * 2 + 1; } fn calculate_right_child_index(index: usize) -> usize { return Self::calculate_left_child_index(index) + 1; } fn calculate_parent_index(index: usize) -> usize { let parent_index = ((index as f32 - 1.0) / 2.0).floor() as usize; if parent_index <= 0 { return 0; } return parent_index; } fn calculate_parent_index_of_tail(&self) -> usize { let tail_index = self.len() - 1; return Self::calculate_parent_index(tail_index); } } #[test] fn test_binary_heap() { let mut heap1:BinaryHeap<i32> = BinaryHeap::from(vec![1,3,4,5]); heap1.push(-21); heap1.push(1); heap1.push(3); heap1.push(190); assert_eq!(heap1.peek(), Some(&190)); assert_eq!(heap1.extract(), Some(190)); assert_eq!(heap1.peek(), Some(&5)); let mut heap2:BinaryHeap<i32> = BinaryHeap::new(); heap2.push(-21);
heap2.push(1); heap2.push(3); heap2.push(190); assert_eq!(heap2.peek(), Some(&190)); assert_eq!(heap2.extract(), Some(190)); assert_eq!(heap2.peek(), Some(&3)); }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let mut heap:BinaryHeap<i32> = BinaryHeap::from(vec![1, 2, 3, 4]);\n\n heap.push(-21);\n\n heap.push(1);\n\n heap.push(3);\n\n heap.push(190);\n\n heap.push(4);\n\n heap.push(90);\n\n heap.extract();\n\n heap.extract();\n\n println!(\"{}\", heap.peek()...
Rust
tests/issuer.rs
evannetwork/vade-evan
7d37225a4756c7595b03a4901a5e3805654f2f7d
/* Copyright (c) 2018-present evan GmbH. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ extern crate env_logger; extern crate log; extern crate vade_evan; mod test_data; use std::{collections::HashMap, error::Error}; use test_data::{ accounts::local::{ISSUER_ADDRESS, ISSUER_DID, ISSUER_PRIVATE_KEY}, did::{EXAMPLE_DID_1, EXAMPLE_DID_DOCUMENT_1}, vc_zkp::EXAMPLE_CREDENTIAL_SCHEMA, }; use vade_evan::{ application::{ datatypes::{CredentialSchema, SchemaProperty}, issuer::Issuer, }, crypto::crypto_utils::check_assertion_proof, signing::{LocalSigner, Signer}, }; #[tokio::test] async fn can_create_schema() -> Result<(), Box<dyn Error>> { match env_logger::try_init() { Ok(_) | Err(_) => (), }; let did_document = serde_json::to_value(&EXAMPLE_DID_DOCUMENT_1)?; let mut required_properties: Vec<String> = Vec::new(); let mut test_properties: HashMap<String, SchemaProperty> = HashMap::new(); test_properties.insert( "test_property_string".to_owned(), SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }, ); required_properties.push("test_property_string".to_owned()); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let schema: CredentialSchema = Issuer::create_credential_schema( EXAMPLE_DID_1, ISSUER_DID, "test_schema", "Test description", test_properties, required_properties, false, &did_document["publicKey"][0]["id"].to_string(), &ISSUER_PRIVATE_KEY, &signer, ) .await?; assert_eq!(&schema.author, &ISSUER_DID); assert_eq!(schema.additional_properties, false); let result_property: &SchemaProperty = &schema.properties.get("test_property_string").unwrap(); let expected: SchemaProperty = SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }; assert_eq!( serde_json::to_string(&result_property).unwrap(), serde_json::to_string(&expected).unwrap(), ); let serialized = serde_json::to_string(&schema).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) } #[tokio::test] async fn can_create_credential_definition() -> Result<(), Box<dyn Error>> { let schema: CredentialSchema = serde_json::from_str(&EXAMPLE_CREDENTIAL_SCHEMA).unwrap(); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let (definition, _) = Issuer::create_credential_definition( &EXAMPLE_DID_1, &ISSUER_DID, &schema, "did:evan:testcore:0x0f737d1478ea29df0856169f25ca9129035d6fd1#key-1", &ISSUER_PRIVATE_KEY, &signer, None, None, ) .await?; assert_eq!( serde_json::to_string(&definition.issuer).unwrap(), serde_json::to_string(&ISSUER_DID).unwrap(), ); assert_eq!( serde_json::to_string(&definition.schema).unwrap(), serde_json::to_string(&schema.id).unwrap() ); assert_eq!(&definition.id, EXAMPLE_DID_1); let serialized = serde_json::to_string(&definition).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) }
/* Copyright (c) 2018-present evan GmbH. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ extern crate env_logger; extern crate log; extern crate vade_evan; mod test_data; use std::{collections::HashMap, error::Error}; use test_data::{ accounts::local::{ISSUER_ADDRESS, ISSUER_DID, ISSUER_PRIVATE_KEY}, did::{EXAMPLE_DID_1, EXAMPLE_DID_DOCUMENT_1}, vc_zkp::EXAMPLE_CREDENTIAL_SCHEMA, }; use vade_evan::{ application::{ datatypes::{CredentialSchema, SchemaProperty}, issuer::Issuer, }, crypto::crypto_utils::check_assertion_proof, signing::{LocalSigner, Signer}, }; #[tokio::test] async fn can_create_schema() -> Result<(), Box<dyn Error>> { match env_logger::try_init() { Ok(_) | Err(_) => (), }; let did_document = serde_json::to_value(&EXAMPLE_DID_DOCUMENT_1)?; let mut required_properties: Vec<String> = Vec::new(); let mut test_properties: HashMap<String, SchemaProperty> = HashMap::new(); test_properties.insert( "test_property_string".to_owned(), SchemaProperty { r#type: "string".to_owned(), format: None,
ocalSigner::new()); let schema: CredentialSchema = Issuer::create_credential_schema( EXAMPLE_DID_1, ISSUER_DID, "test_schema", "Test description", test_properties, required_properties, false, &did_document["publicKey"][0]["id"].to_string(), &ISSUER_PRIVATE_KEY, &signer, ) .await?; assert_eq!(&schema.author, &ISSUER_DID); assert_eq!(schema.additional_properties, false); let result_property: &SchemaProperty = &schema.properties.get("test_property_string").unwrap(); let expected: SchemaProperty = SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }; assert_eq!( serde_json::to_string(&result_property).unwrap(), serde_json::to_string(&expected).unwrap(), ); let serialized = serde_json::to_string(&schema).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) } #[tokio::test] async fn can_create_credential_definition() -> Result<(), Box<dyn Error>> { let schema: CredentialSchema = serde_json::from_str(&EXAMPLE_CREDENTIAL_SCHEMA).unwrap(); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let (definition, _) = Issuer::create_credential_definition( &EXAMPLE_DID_1, &ISSUER_DID, &schema, "did:evan:testcore:0x0f737d1478ea29df0856169f25ca9129035d6fd1#key-1", &ISSUER_PRIVATE_KEY, &signer, None, None, ) .await?; assert_eq!( serde_json::to_string(&definition.issuer).unwrap(), serde_json::to_string(&ISSUER_DID).unwrap(), ); assert_eq!( serde_json::to_string(&definition.schema).unwrap(), serde_json::to_string(&schema.id).unwrap() ); assert_eq!(&definition.id, EXAMPLE_DID_1); let serialized = serde_json::to_string(&definition).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) }
items: None, }, ); required_properties.push("test_property_string".to_owned()); let signer: Box<dyn Signer> = Box::new(L
function_block-random_span
[ { "content": "fn get_vade_evan(matches: &ArgMatches) -> Result<VadeEvan> {\n\n let target = get_argument_value(&matches, \"target\", Some(DEFAULT_TARGET));\n\n let signer = get_argument_value(&matches, \"signer\", Some(DEFAULT_SIGNER));\n\n return Ok(VadeEvan::new(VadeEvanConfig { target, signer })?);\...
Rust
src/identbimap/mod.rs
sozysozbot/wenyan-to-rust
2129edbb87116b899875b14fee2e0785f8a7d7b3
use crate::parse; use big_s::S; use bimap_plus_map::BiMapPlusMap; use std::collections::HashMap; type Table = HashMap<String, String>; fn to_pinyin(ident: parse::Identifier, conversion_table: &Table) -> String { let parse::Identifier(i) = ident; let vec = i .chars() .map(|c| match conversion_table.get(&format!("{:X}", c as u32)) { None => S("_"), Some(a) => a.split(' ').collect::<Vec<_>>()[0].to_string(), }) .collect::<Vec<_>>(); vec.join("") } type Hanzi = parse::Identifier; type Ascii = String; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Mutable, } pub struct IdentBiMap(BiMapPlusMap<Hanzi, Ascii, Option<Type>>); impl IdentBiMap { pub fn translate_from_hanzi(&self, id: &parse::Identifier) -> Ascii { self.0.bimap_get_by_left(id).unwrap().to_string() } pub fn is_mutable(&self, id: &parse::Identifier) -> bool { let typ = self.0.hashmap_get_by_left(id).unwrap(); *typ == Some(Type::Mutable) } pub fn new(parsed: &[parse::Statement], conversion_table: &Table) -> Self { let mut ans = IdentBiMap(BiMapPlusMap::new()); for st in parsed { ans.insert_stmt(&st, &conversion_table); } eprintln!("{:?}", ans.0); ans } fn insert_ident(&mut self, ident: &parse::Identifier, conversion_table: &Table) { if self.0.bimap_get_by_left(&ident).is_some() { return; } let mut candidate: Ascii = to_pinyin(ident.clone(), &conversion_table); loop { if self.0.bimap_get_by_right(&candidate).is_some() { candidate.push('_'); } else { self.0.insert(ident.clone(), candidate, None); break; } } } fn insert_stmts(&mut self, statements: &[parse::Statement], conversion_table: &Table) { for s in statements { self.insert_stmt(&s, &conversion_table) } } fn insert_dat(&mut self, dat: &parse::Data, conversion_table: &Table) { if let parse::Data::Identifier(id) = dat { self.insert_ident(&id, &conversion_table) } } fn insert_data_or_qi2(&mut self, dat: &parse::OrQi2<parse::Data>, conversion_table: &Table) { if let parse::OrQi2::NotQi2(d1) = dat { self.insert_dat(d1, &conversion_table); } } fn insert_rvaluenoqi2(&mut self, val: &parse::Value<parse::Data>, conversion_table: &Table) { match val { parse::Value::Index(data, _) | parse::Value::Simple(data) | parse::Value::Length(data) => self.insert_dat(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_dat(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_unaryifexpr(&mut self, unary: &parse::UnaryIfExpr, conversion_table: &Table) { match unary { parse::UnaryIfExpr::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::UnaryIfExpr::Complex(val) => self.insert_rvaluenoqi2(val, &conversion_table), } } fn insert_ifexpr(&mut self, ifexpr: &parse::IfCond, conversion_table: &Table) { match ifexpr { parse::IfCond::Binary(data1, _, data2) => { self.insert_unaryifexpr(data1, &conversion_table); self.insert_unaryifexpr(data2, &conversion_table); } parse::IfCond::Unary(data) => { self.insert_unaryifexpr(data, &conversion_table); } parse::IfCond::NotQi2 => {} } } fn insert_math(&mut self, math: &parse::MathKind, conversion_table: &Table) { match math { parse::MathKind::ArithUnaryMath(data) => { self.insert_data_or_qi2(data, &conversion_table) } parse::MathKind::ArithBinaryMath(_, data1, _, data2) | parse::MathKind::ModMath(_, data1, _, data2) => { self.insert_data_or_qi2(data1, &conversion_table); self.insert_data_or_qi2(data2, &conversion_table); } parse::MathKind::BooleanAlgebra(ident1, ident2, _) => { self.insert_ident(&ident1, &conversion_table); self.insert_ident(&ident2, &conversion_table); } } } fn insert_rvalue( &mut self, rv: &parse::Value<parse::OrQi2<parse::Data>>, conversion_table: &Table, ) { match rv { parse::Value::Index(data, _) | parse::Value::Length(data) | parse::Value::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_data_or_qi2(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_idents(&mut self, idents: &[parse::Identifier], conversion_table: &Table) { for ident in idents { self.insert_ident(&ident, &conversion_table) } } fn insert_dats(&mut self, data_arr: &[parse::Data], conversion_table: &Table) { for dat in data_arr { self.insert_dat(dat, &conversion_table); } } fn insert_stmt(&mut self, st: &parse::Statement, conversion_table: &Table) { use parse::Statement::*; match st { ReferenceWhatIsLeft { data } => { self.insert_dat(&data, &conversion_table); } ForArr { list, elem, stmts } => { self.insert_ident(&list, &conversion_table); self.insert_ident(&elem, &conversion_table); self.insert_stmts(&stmts, &conversion_table) } ArrayCat { append_to, elems } => { self.insert_data_or_qi2(&parse::OrQi2::from(append_to), &conversion_table); self.insert_idents(&elems, &conversion_table) } ArrayFill { what_to_fill, elems, } => { self.insert_data_or_qi2(&parse::OrQi2::from(what_to_fill), &conversion_table); if let parse::OrQi2::NotQi2(ident) = what_to_fill { let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); } self.insert_dats(&elems, &conversion_table); } If { ifcase: (ifexpr, ifcase), elseifcases, elsecase, } => { self.insert_ifexpr(ifexpr, &conversion_table); self.insert_stmts(&ifcase, &conversion_table); for (elseifexpr, elseifcase) in elseifcases { self.insert_ifexpr(elseifexpr, &conversion_table); self.insert_stmts(&elseifcase, &conversion_table) } self.insert_stmts(&elsecase, &conversion_table) } Reference { rvalue } => self.insert_rvaluenoqi2(rvalue, &conversion_table), NameMulti { idents } => self.insert_idents(&idents, &conversion_table), Math { math } => self.insert_math(math, &conversion_table), Assignment { lvalue: parse::Lvalue::Simple(ident), rvalue, } | Assignment { lvalue: parse::Lvalue::Index(ident, _), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_rvalue(rvalue, &conversion_table) } Assignment { lvalue: parse::Lvalue::IndexByIdent(ident, index), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_ident(&index, &conversion_table); self.insert_rvalue(rvalue, &conversion_table) } Print | Flush | Break | Continue => {} ForEnum { statements, num: _ } | Loop { statements } => { self.insert_stmts(&statements, &conversion_table) } Declare(parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }) => self.insert_dats(data_arr, &conversion_table), InitDefine { name, type_: _, data: dat, } => { self.insert_dat(dat, &conversion_table); self.insert_ident(&name, &conversion_table) } ForEnumIdent { ident, statements } => { if let parse::OrQi2::NotQi2(i) = ident { self.insert_ident(&i, &conversion_table); } self.insert_stmts(&statements, &conversion_table) } Define { idents, decl: parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }, } => { self.insert_dats(data_arr, &conversion_table); self.insert_idents(&idents, &conversion_table) } } } }
use crate::parse; use big_s::S; use bimap_plus_map::BiMapPlusMap; use std::collections::HashMap; type Table = HashMap<String, String>; fn to_pinyin(ident: parse::Identifier, conversion_table: &Table) -> String {
type Hanzi = parse::Identifier; type Ascii = String; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Mutable, } pub struct IdentBiMap(BiMapPlusMap<Hanzi, Ascii, Option<Type>>); impl IdentBiMap { pub fn translate_from_hanzi(&self, id: &parse::Identifier) -> Ascii { self.0.bimap_get_by_left(id).unwrap().to_string() } pub fn is_mutable(&self, id: &parse::Identifier) -> bool { let typ = self.0.hashmap_get_by_left(id).unwrap(); *typ == Some(Type::Mutable) } pub fn new(parsed: &[parse::Statement], conversion_table: &Table) -> Self { let mut ans = IdentBiMap(BiMapPlusMap::new()); for st in parsed { ans.insert_stmt(&st, &conversion_table); } eprintln!("{:?}", ans.0); ans } fn insert_ident(&mut self, ident: &parse::Identifier, conversion_table: &Table) { if self.0.bimap_get_by_left(&ident).is_some() { return; } let mut candidate: Ascii = to_pinyin(ident.clone(), &conversion_table); loop { if self.0.bimap_get_by_right(&candidate).is_some() { candidate.push('_'); } else { self.0.insert(ident.clone(), candidate, None); break; } } } fn insert_stmts(&mut self, statements: &[parse::Statement], conversion_table: &Table) { for s in statements { self.insert_stmt(&s, &conversion_table) } } fn insert_dat(&mut self, dat: &parse::Data, conversion_table: &Table) { if let parse::Data::Identifier(id) = dat { self.insert_ident(&id, &conversion_table) } } fn insert_data_or_qi2(&mut self, dat: &parse::OrQi2<parse::Data>, conversion_table: &Table) { if let parse::OrQi2::NotQi2(d1) = dat { self.insert_dat(d1, &conversion_table); } } fn insert_rvaluenoqi2(&mut self, val: &parse::Value<parse::Data>, conversion_table: &Table) { match val { parse::Value::Index(data, _) | parse::Value::Simple(data) | parse::Value::Length(data) => self.insert_dat(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_dat(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_unaryifexpr(&mut self, unary: &parse::UnaryIfExpr, conversion_table: &Table) { match unary { parse::UnaryIfExpr::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::UnaryIfExpr::Complex(val) => self.insert_rvaluenoqi2(val, &conversion_table), } } fn insert_ifexpr(&mut self, ifexpr: &parse::IfCond, conversion_table: &Table) { match ifexpr { parse::IfCond::Binary(data1, _, data2) => { self.insert_unaryifexpr(data1, &conversion_table); self.insert_unaryifexpr(data2, &conversion_table); } parse::IfCond::Unary(data) => { self.insert_unaryifexpr(data, &conversion_table); } parse::IfCond::NotQi2 => {} } } fn insert_math(&mut self, math: &parse::MathKind, conversion_table: &Table) { match math { parse::MathKind::ArithUnaryMath(data) => { self.insert_data_or_qi2(data, &conversion_table) } parse::MathKind::ArithBinaryMath(_, data1, _, data2) | parse::MathKind::ModMath(_, data1, _, data2) => { self.insert_data_or_qi2(data1, &conversion_table); self.insert_data_or_qi2(data2, &conversion_table); } parse::MathKind::BooleanAlgebra(ident1, ident2, _) => { self.insert_ident(&ident1, &conversion_table); self.insert_ident(&ident2, &conversion_table); } } } fn insert_rvalue( &mut self, rv: &parse::Value<parse::OrQi2<parse::Data>>, conversion_table: &Table, ) { match rv { parse::Value::Index(data, _) | parse::Value::Length(data) | parse::Value::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_data_or_qi2(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_idents(&mut self, idents: &[parse::Identifier], conversion_table: &Table) { for ident in idents { self.insert_ident(&ident, &conversion_table) } } fn insert_dats(&mut self, data_arr: &[parse::Data], conversion_table: &Table) { for dat in data_arr { self.insert_dat(dat, &conversion_table); } } fn insert_stmt(&mut self, st: &parse::Statement, conversion_table: &Table) { use parse::Statement::*; match st { ReferenceWhatIsLeft { data } => { self.insert_dat(&data, &conversion_table); } ForArr { list, elem, stmts } => { self.insert_ident(&list, &conversion_table); self.insert_ident(&elem, &conversion_table); self.insert_stmts(&stmts, &conversion_table) } ArrayCat { append_to, elems } => { self.insert_data_or_qi2(&parse::OrQi2::from(append_to), &conversion_table); self.insert_idents(&elems, &conversion_table) } ArrayFill { what_to_fill, elems, } => { self.insert_data_or_qi2(&parse::OrQi2::from(what_to_fill), &conversion_table); if let parse::OrQi2::NotQi2(ident) = what_to_fill { let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); } self.insert_dats(&elems, &conversion_table); } If { ifcase: (ifexpr, ifcase), elseifcases, elsecase, } => { self.insert_ifexpr(ifexpr, &conversion_table); self.insert_stmts(&ifcase, &conversion_table); for (elseifexpr, elseifcase) in elseifcases { self.insert_ifexpr(elseifexpr, &conversion_table); self.insert_stmts(&elseifcase, &conversion_table) } self.insert_stmts(&elsecase, &conversion_table) } Reference { rvalue } => self.insert_rvaluenoqi2(rvalue, &conversion_table), NameMulti { idents } => self.insert_idents(&idents, &conversion_table), Math { math } => self.insert_math(math, &conversion_table), Assignment { lvalue: parse::Lvalue::Simple(ident), rvalue, } | Assignment { lvalue: parse::Lvalue::Index(ident, _), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_rvalue(rvalue, &conversion_table) } Assignment { lvalue: parse::Lvalue::IndexByIdent(ident, index), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_ident(&index, &conversion_table); self.insert_rvalue(rvalue, &conversion_table) } Print | Flush | Break | Continue => {} ForEnum { statements, num: _ } | Loop { statements } => { self.insert_stmts(&statements, &conversion_table) } Declare(parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }) => self.insert_dats(data_arr, &conversion_table), InitDefine { name, type_: _, data: dat, } => { self.insert_dat(dat, &conversion_table); self.insert_ident(&name, &conversion_table) } ForEnumIdent { ident, statements } => { if let parse::OrQi2::NotQi2(i) = ident { self.insert_ident(&i, &conversion_table); } self.insert_stmts(&statements, &conversion_table) } Define { idents, decl: parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }, } => { self.insert_dats(data_arr, &conversion_table); self.insert_idents(&idents, &conversion_table) } } } }
let parse::Identifier(i) = ident; let vec = i .chars() .map(|c| match conversion_table.get(&format!("{:X}", c as u32)) { None => S("_"), Some(a) => a.split(' ').collect::<Vec<_>>()[0].to_string(), }) .collect::<Vec<_>>(); vec.join("") }
function_block-function_prefix_line
[ { "content": "pub fn compile(parsed: &[parse::Statement], conversion_table: &HashMap<String, String>) -> String {\n\n let mut ans = vec![(0, S(\"fn main() {\"))];\n\n let mut env = Env {\n\n ans_counter: 0,\n\n rand_counter: 0,\n\n indent_level: 1,\n\n variables_not_yet_named: ...
Rust
rust/leetcode/src/hash_table/valid_sudoku.rs
zayfen/LeetCode
8efdb380b79355e463f5a8b01105275ac699e08c
struct Solution (); impl Solution { pub fn is_valid_cell_in_row (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for num in &board[row] { if *num == '.' { continue; } let number = num.to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } return true; } pub fn is_valid_cell_in_column (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for r in 0..9 { let n = board[r][col]; if n == '.' { continue; } let number = n.to_digit(10).unwrap(); if numbers[(number - 1) as usize] == '.' { return false; } else { numbers[(number - 1) as usize] = '.' } } return true; } pub fn is_valid_cell_in_block (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); for i in 0..3 { for j in 0..3 { if board[cell_index_x + i][cell_index_y + j] == '.' { continue; } let number = board[cell_index_x + i][cell_index_y + j].to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } } return true; } pub fn is_valid_sudoku (board: Vec<Vec<char>>) -> bool { for row in 0..9 { for col in 0..9 { if board[row][col] != '.' { let pass = Solution::is_valid_cell_in_row(&board, row, col) && Solution::is_valid_cell_in_column(&board, row, col) && Solution::is_valid_cell_in_block(&board, row, col); if !pass { return false; } } } } return true; } pub fn is_valid_sudoku_v2 (board: Vec<Vec<char>>) -> bool { let mut hash_map = std::collections::HashMap::new(); for row in 0..9 { for col in 0..9 { let ch = board[row][col]; if ch != '.' { let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); let row_key = format!("{}r{}", ch, row); let col_key = format!("{}c{}", ch, col); let block_key = format!("{}b{:?}", ch, (cell_index_x, cell_index_y)); if hash_map.insert(row_key, "") == None && hash_map.insert(col_key, "") == None && hash_map.insert(block_key, "") == None { continue; } return false; } } } return true; } } #[cfg(test)] mod hash_table { use super::*; #[test] fn test_is_valid_sudoku () { let board = vec!( vec!('5','3','.','.','7','.','.','.','.'), vec!('6','.','.','1','9','5','.','.','.'), vec!('.','9','8','.','.','.','.','6','.'), vec!('8','.','.','.','6','.','.','.','3'), vec!('4','.','.','8','.','3','.','.','1'), vec!('7','.','.','.','2','.','.','.','6'), vec!('.','6','.','.','.','.','2','8','.'), vec!('.','.','.','4','1','9','.','.','5'), vec!('.','.','.','.','8','.','.','7','9') ); assert_eq!(Solution::is_valid_sudoku_v2(board), true); } }
struct Solution (); impl Solution { pub fn is_valid_cell_in_row (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for num in &board[row] { if *num == '.' { continue; } let number = num.to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } return true; } pub fn is_valid_cell_in_column (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for r in 0..9 { let n = board[r][col]; if n == '.' { continue; } let number = n.to_digit(10).unwrap(); if numbers[(number - 1) as usize] == '.' { return false; } else { numbers[(number - 1) as usize] = '.' } } return true; } pub fn is_valid_cell_in_block (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); for i in 0..3 { for j in 0..3 { if board[cell_index_x + i][cell_index_y + j] == '.' { continue; } let number = board[cell_index_x + i][cell_index_y + j].to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } } return true; } pub fn is_valid_sudoku (board: Vec<Vec<char>>) -> bool { for row in 0..9 { for col in 0..9 { if board[row][col] != '.' { let pass = Solution::is_valid_cell_in_row(&board, row, col) && Solution::is_valid_cell_in_column(&board, row, col) && Solution::is_valid_cell_in_block(&board, row, col); if !pass { return false; } } } } return true; } pub fn is_valid_sudoku_v2 (board: Vec<Vec<char>>) -> bool { let mut hash_map = std::collections::HashMap::new(); for row in 0..9 { for col in 0..9 { let ch = board[row][col]; if ch != '.' { let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); let row_key = format!("{}r{}", ch, row); let col_key = format!("{}c{}", ch, col); let block_key = format!("{}b{:?}", ch, (cell_index_x, cell_index_y)); if hash_map.insert(row_key, "") == None && hash_map.insert(col_key, "") == None && hash_map.insert(block_key, "") == None { continue; } return false; } } } return true; } } #[cfg(test)] mod hash_table { use super::*; #[test]
}
fn test_is_valid_sudoku () { let board = vec!( vec!('5','3','.','.','7','.','.','.','.'), vec!('6','.','.','1','9','5','.','.','.'), vec!('.','9','8','.','.','.','.','6','.'), vec!('8','.','.','.','6','.','.','.','3'), vec!('4','.','.','8','.','3','.','.','1'), vec!('7','.','.','.','2','.','.','.','6'), vec!('.','6','.','.','.','.','2','8','.'), vec!('.','.','.','4','1','9','.','.','5'), vec!('.','.','.','.','8','.','.','7','9') ); assert_eq!(Solution::is_valid_sudoku_v2(board), true); }
function_block-full_function
[ { "content": "pub fn switch (nums: &mut Vec<i32>, left: usize, right: usize) {\n\n let tmp = nums[left];\n\n nums[left] = nums[right];\n\n nums[right] = tmp;\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 0, "score": 166054.35345015902 }, { ...
Rust
src/drive_operations.rs
EndaHallahan/Scrit
10431c049fef5f587b506f0e65b4f55443b3f91c
use map_operations::*; use push::ScritFile; use hyper; use hyper::net::HttpsConnector; use hyper::Client; use hyper_native_tls::NativeTlsClient; use yup_oauth2::{Authenticator, FlowType, ApplicationSecret, DiskTokenStorage, DefaultAuthenticatorDelegate, parse_application_secret}; use google_drive3::{Drive, File}; use std::io::Cursor; use std::fs; use client_info::CLIENT_SECRET; fn read_client_secret(client_info: &'static str) -> ApplicationSecret { parse_application_secret(&client_info.to_string()).unwrap() } pub fn get_hub() -> Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> { let secret = read_client_secret(CLIENT_SECRET); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap())); let authenticator = Authenticator::new(&secret, DefaultAuthenticatorDelegate, client, DiskTokenStorage::new(&"Scrit/token_store.json".to_string()) .unwrap(), Some(FlowType::InstalledInteractive)); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap()) ); Drive::new(client, authenticator) } pub fn make_document(name: &String, contents: &String, dir_id: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.name = Some(name.to_string()); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); doc.parents = Some(vec![dir_id.clone()]); match hub.files().create(doc) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully uploaded '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn update_document(name: &String, contents: &String, dir_id: &String, file_id: &str, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); match hub.files().update(doc, file_id) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully updated '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn make_directory(name: String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut dir = File::default(); dir.name = Some(name.to_string()); dir.mime_type = Some("application/vnd.google-apps.folder".to_string()); match hub.files().create(dir) .param("fields", "id") .upload(Cursor::new(name.as_bytes()), "application/vnd.google-apps.folder".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully created directory '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x)} } } pub fn check_file_in_folder(file_id: &str, folder_id : &String, file_name: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(file_id).param("fields", "parents, trashed").doit() { Ok((_, y)) => { match y.parents { Some(parents) => { if !y.trashed.unwrap() && parents.contains(folder_id) { true } else { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } None => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } }, Err(_) => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } } pub fn check_folder(folder_id : &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(folder_id).param("fields", "trashed").doit() { Ok((_, y)) => { if !y.trashed.unwrap() { true } else { println!("Couldn't find project folder, creating new directory..."); false } }, Err(x) => { println!("Couldn't find project folder, creating new directory..."); false } } } pub fn upload(compiled_set: &mut Vec<ScritFile>) { let hub = get_hub(); let mut map = get_map(); let title = get_title_text(&map); let mut dir_id = get_directory_id(&map); if dir_id.is_empty() || !check_folder(&dir_id, &hub) { dir_id = make_directory(title.to_string(), &hub); set_directory_id(&mut map, &dir_id); } for scrit_file in compiled_set { let mut file_id: String; match check_existing_files(&mut map, scrit_file.title()) { Some(ele) => { if check_file_in_folder(ele.attr("id").unwrap(), &dir_id, scrit_file.title(), &hub) { file_id = update_document(scrit_file.title(), scrit_file.body(), &dir_id, ele.attr("id").unwrap(), &hub); } else { file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub); replace_file(ele, &file_id, scrit_file.title()); } }, None => {file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub);} } scrit_file.set_id(file_id); } map.write_to(&mut fs::File::create("Scrit/scrit_map.xml").unwrap()); } pub fn download() { let hub = get_hub(); }
use map_operations::*; use push::ScritFile; use hyper; use hyper::net::HttpsConnector; use hyper::Client; use hyper_native_tls::NativeTlsClient; use yup_oauth2::{Authenticator, FlowType, ApplicationSecret, DiskTokenStorage, DefaultAuthenticatorDelegate, parse_application_secret}; use google_drive3::{Drive, File}; use std::io::Cursor; use std::fs; use client_info::CLIENT_SECRET; fn read_client_secret(client_info: &'static str) -> ApplicationSecret { parse_application_secret(&client_info.to_string()).unwrap() } pub fn get_hub() -> Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> { let secret = read_client_secret(CLIENT_SECRET); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap())); let authenticator = Authenticator::new(&secret, DefaultAuthenticatorDelegate, client, DiskTokenStorage::new(&"Scrit/token_store.json".to_string()) .unwrap(), Some(FlowType::InstalledInteractive)); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap()) ); Drive::new(client, authenticator) } pub fn make_document(name: &String, contents: &String, dir_id: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.name = Some(name.to_string()); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); doc.parents = Some(vec![dir_id.clone()]); match hub.files().create(doc) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully uploaded '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn update_document(name: &String, contents: &String, dir_id: &String, file_id: &str, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); match hub.files().update(doc, file_id) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully updated '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn make_directory(name: String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut dir = File::default(); dir.name = Some(name.to_string()); dir.mime_type = Some("application/vnd.google-apps.folder".to_string()); match hub.files().create(dir) .param("fields", "id") .upload(Cursor::new(name.as_bytes()), "application/vnd.google-apps.folder".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully created directory '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x)} } } pub fn check_file_in_folder(file_id: &str, folder_id : &String, file_name: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(file_id).param("fields", "parents, trashed").doit() { Ok((_, y)) => { match y.parents { Some(parents) => {
} None => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } }, Err(_) => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } } pub fn check_folder(folder_id : &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(folder_id).param("fields", "trashed").doit() { Ok((_, y)) => { if !y.trashed.unwrap() { true } else { println!("Couldn't find project folder, creating new directory..."); false } }, Err(x) => { println!("Couldn't find project folder, creating new directory..."); false } } } pub fn upload(compiled_set: &mut Vec<ScritFile>) { let hub = get_hub(); let mut map = get_map(); let title = get_title_text(&map); let mut dir_id = get_directory_id(&map); if dir_id.is_empty() || !check_folder(&dir_id, &hub) { dir_id = make_directory(title.to_string(), &hub); set_directory_id(&mut map, &dir_id); } for scrit_file in compiled_set { let mut file_id: String; match check_existing_files(&mut map, scrit_file.title()) { Some(ele) => { if check_file_in_folder(ele.attr("id").unwrap(), &dir_id, scrit_file.title(), &hub) { file_id = update_document(scrit_file.title(), scrit_file.body(), &dir_id, ele.attr("id").unwrap(), &hub); } else { file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub); replace_file(ele, &file_id, scrit_file.title()); } }, None => {file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub);} } scrit_file.set_id(file_id); } map.write_to(&mut fs::File::create("Scrit/scrit_map.xml").unwrap()); } pub fn download() { let hub = get_hub(); }
if !y.trashed.unwrap() && parents.contains(folder_id) { true } else { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false }
if_condition
[ { "content": "pub fn replace_file(ele: &mut Element, in_id: &str, in_title: &String) {\n\n\tele.set_attr(\"id\", in_id);\n\n\tele.set_attr(\"title\", in_title);\n\n\tele.delete_children();\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 3, "score": 224990.306815553 }, { "content":...
Rust
vk_tracer/src/render/renderer.rs
icanwalkonwater/rs-vk-tracer
424524af7e66de2b60137ec4aab80848e076efb5
use crate::{ command_recorder::QueueType, errors::{HandleType, Result}, render::{RenderablePipelineHandle, VkRecordable}, RenderPlanHandle, RenderTargetHandle, RendererHandle, VkTracerApp, }; use ash::{ version::{DeviceV1_0, DeviceV1_2}, vk, }; impl VkTracerApp { pub fn new_renderer_from_plan( &mut self, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, ) -> RendererBuilder { RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass: vec![Vec::with_capacity(1)], pipelines_amount: 0, } } pub fn recreate_renderer( &mut self, renderer: RendererHandle, render_target: RenderTargetHandle, ) -> Result<()> { let (render_plan, pipelines_by_subpass, pipelines_amount) = { let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); unsafe { let pool = self.command_pools.get(&QueueType::Graphics).unwrap().1; self.device .free_command_buffers(pool, &[renderer.main_commands]); self.device .free_command_buffers(pool, &renderer.secondary_commands); self.device.destroy_fence(renderer.render_fence, None); } ( renderer.render_plan, std::mem::take(&mut renderer.pipelines_by_subpass), renderer.pipelines_amount, ) }; let builder = RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass, pipelines_amount, }; let ((main_commands, secondary_commands), fence) = builder.inner_build()?; let pipelines_by_subpass = builder.pipelines_by_subpass; let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); renderer.pipelines_by_subpass = pipelines_by_subpass; renderer.main_commands = main_commands; renderer.secondary_commands = secondary_commands; renderer.render_fence = fence; Ok(()) } } pub(crate) struct Renderer { pub(crate) main_commands: vk::CommandBuffer, secondary_commands: Box<[vk::CommandBuffer]>, pub(crate) render_fence: vk::Fence, render_plan: RenderPlanHandle, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } pub struct RendererBuilder<'app> { app: &'app mut VkTracerApp, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, current_subpass: usize, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } type RendererData = ((vk::CommandBuffer, Box<[vk::CommandBuffer]>), vk::Fence); impl RendererBuilder<'_> { pub fn execute_pipeline(mut self, pipeline: RenderablePipelineHandle) -> Self { self.pipelines_by_subpass[self.current_subpass].push(pipeline); self.pipelines_amount += 1; self } pub fn next_subpass(mut self) -> Self { self.pipelines_by_subpass.push(Vec::with_capacity(1)); self.current_subpass += 1; self } fn inner_build(&self) -> Result<RendererData> { let render_plan = storage_access!( self.app.render_plan_storage, self.render_plan, HandleType::RenderPlan ); let render_target = storage_access!( self.app.render_target_storage, self.render_target, HandleType::RenderTarget ); let device = &self.app.device; let pool = self.app.command_pools.get(&QueueType::Graphics).unwrap(); let commands = unsafe { let mut secondary_commands_by_subpass = { let mut command_pool = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::SECONDARY) .command_buffer_count(self.pipelines_amount as u32), )?; let mut commands_by_subpass = Vec::with_capacity(self.pipelines_by_subpass.len()); for (i, subpass) in self.pipelines_by_subpass.iter().enumerate() { let mut subpass_commands = Vec::with_capacity(subpass.len()); for pipeline in subpass.iter().copied() { let commands = command_pool.pop().unwrap(); device.begin_command_buffer( commands, &vk::CommandBufferBeginInfo::builder() .flags(vk::CommandBufferUsageFlags::RENDER_PASS_CONTINUE) .inheritance_info( &vk::CommandBufferInheritanceInfo::builder() .render_pass(render_plan.render_pass) .subpass(i as u32) .framebuffer(render_target.framebuffer), ), )?; match pipeline { RenderablePipelineHandle::Forward(handle) => { let pipeline = storage_access!( self.app.forward_pipeline_storage, handle, HandleType::ForwardPipeline ); pipeline.record_commands( self.app, render_target.extent, commands, )?; } } device.end_command_buffer(commands)?; subpass_commands.push(commands); } commands_by_subpass.push(subpass_commands); } commands_by_subpass }; let top_level_commands = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::PRIMARY) .command_buffer_count(1), )?[0]; device .begin_command_buffer(top_level_commands, &vk::CommandBufferBeginInfo::default())?; device.cmd_begin_render_pass2( top_level_commands, &vk::RenderPassBeginInfo::builder() .render_pass(render_plan.render_pass) .framebuffer(render_target.framebuffer) .render_area( vk::Rect2D::builder() .offset(vk::Offset2D::default()) .extent(render_target.extent) .build(), ) .clear_values(&render_plan.clear_values), &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), ); let mut secondary_commands = Vec::with_capacity(self.pipelines_amount as usize); loop { let subpass_commands = secondary_commands_by_subpass.pop().unwrap(); device.cmd_execute_commands(top_level_commands, &subpass_commands); secondary_commands.extend(subpass_commands); if secondary_commands_by_subpass.is_empty() { break; } device.cmd_next_subpass2( top_level_commands, &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), &vk::SubpassEndInfo::default(), ); } device.cmd_end_render_pass2(top_level_commands, &vk::SubpassEndInfo::default()); device.end_command_buffer(top_level_commands)?; (top_level_commands, secondary_commands.into_boxed_slice()) }; let render_fence = unsafe { device.create_fence( &vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED), None, )? }; Ok((commands, render_fence)) } pub fn build(self) -> Result<RendererHandle> { let (commands, render_fence) = self.inner_build()?; Ok(self.app.renderer_storage.insert(Renderer { main_commands: commands.0, secondary_commands: commands.1, render_fence, render_plan: self.render_plan, pipelines_by_subpass: self.pipelines_by_subpass, pipelines_amount: self.pipelines_amount, })) } }
use crate::{ command_recorder::QueueType, errors::{HandleType, Result}, render::{RenderablePipelineHandle, VkRecordable}, RenderPlanHandle, RenderTargetHandle, RendererHandle, VkTracerApp, }; use ash::{ version::{DeviceV1_0, DeviceV1_2}, vk, }; impl VkTracerApp { pub fn new_renderer_from_plan( &mut self, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, ) -> RendererBuilder { RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass: vec![Vec::with_capacity(1)], pipelines_amount: 0, } } pub fn recreate_renderer( &mut self, renderer: RendererHandle, render_target: RenderTargetHandle, ) -> Result<()> { let (render_plan, pipelines_by_subpass, pipelines_amount) = { let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); unsafe { let pool = self.command_pools.get(&QueueType::Graphics).unwrap().1; self.device .free_command_buffers(pool, &[renderer.main_commands]); self.device .free_command_buffers(pool, &renderer.secondary_commands); self.device.destroy_fence(renderer.render_fence, None); } ( renderer.render_plan, std::mem::take(&mut renderer.pipelines_by_subpass), renderer.pipelines_amount, ) }; let builder = RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass, pipelines_amount, }; let ((main_commands, secondary_commands), fence) = builder.inner_build()?; let pipelines_by_subpass = builder.pipelines_by_subpass; let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); renderer.pipelines_by_subpass = pipelines_by_subpass; renderer.main_commands = main_commands; renderer.secondary_commands = secondary_commands; renderer.render_fence = fence; Ok(()) } } pub(crate) struct Renderer { pub(crate) main_commands: vk::CommandBuffer, secondary_commands: Box<[vk::CommandBuffer]>, pub(crate) render_fence: vk::Fence, render_plan: RenderPlanHandle, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } pub struct RendererBuilder<'app> { app: &'app mut VkTracerApp, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, current_subpass: usize, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } type RendererData = ((vk::CommandBuffer, Box<[vk::CommandBuffer]>), vk::Fence); impl RendererBuilder<'_> { pub fn execute_pipeline(mut self, pipeline: RenderablePipelineHandle) -> Self { self.pipelines_by_subpass[self.current_subpass].push(pipeline); self.pipelines_amount += 1; self } pub fn next_subpass(mut self) -> Self { self.pipelines_by_subpass.push(Vec::with_capacity(1)); self.current_subpass += 1; self } fn inner_build(&self) -> Result<RendererData> { let render_plan = storage_access!( self.app.render_plan_storage, self.render_plan, HandleType::RenderPlan ); let render_target = storage_access!( self.app.render_target_storage, self.render_target, HandleType::RenderTarget ); let device = &self.app.device; let pool = self.app.command_pools.get(&QueueType::Graphics).unwrap(); let commands = unsafe { let mut secondary_commands_by_subpass = { let mut command_pool = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::SECONDARY) .command_buffer_count(self.pipelines_amount as u32), )?; let mut commands_by_subpass = Vec::with_capacity(self.pipelines_by_subpass.len()); for (i, subpass) in self.pipelines_by_subpass.iter().enumerate() { let mut subpass_commands = Vec::with_capacity(subpass.len()); for pipeline in subpass.iter().copied() { let commands = command_pool.pop().unwrap(); device.begin_command_buffer( commands, &vk::CommandBufferBeginInfo::builder() .flags(vk::CommandBufferUsageFlags::RENDER_PASS_CONTINUE) .inheritance_info( &vk::CommandBufferInheritanceInfo::builder() .render_pass(render_plan.render_pass) .subpass(i as u32) .framebuffer(render_target.framebuffer), ), )?; match pipeline { RenderablePipelineHandle::Forward(handle) => { let pipeline = storage_access!( self.app.forward_pipeline_storage, handle, HandleType::ForwardPipeline ); pipeline.record_commands( self.app, render_target.extent, commands, )?; } } device.end_command_buffer(commands)?; subpass_commands.push(commands); } commands_by_subpass.push(subpass_commands); } commands_by_subpass }; let top_level_commands = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::PRIMARY) .command_buffer_count(1), )?[0]; device .begin_command_buffer(top_level_commands, &vk::CommandBufferBeginInfo::default())?; device.cmd_begin_render_pass2( top_level_commands, &vk::RenderPassBeginInfo::builder() .render_pass(render_plan.render_pass) .framebuffer(render_target.framebuffer) .render_area( vk::Rect2D::builder() .offset(vk::Offset2D::default()) .extent(render_target.extent) .build(), ) .clear_values(&render_plan.clear_values), &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), ); let mut secondary_commands = Vec::with_capacity(self.pipelines_amount as usize); loop { let subpass_commands = secondary_commands_by_subpass.pop().unwrap(); device.cmd_execute_commands(top_level_commands, &subpass_commands); secondary_commands.extend(subpass_commands); if secondary_commands_by_subpass.is_empty() { break; } device.cmd_next_subpass2( top_level_commands, &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), &vk::SubpassEndInfo::default(), ); } device.cmd_end_render_pass2(top_level_commands, &vk::SubpassEndInfo::default()); device.end_command_buffer(top_level_commands)?; (top_level_commands, secondary_commands.into_boxed_slice()) }; let render_fence = unsafe { device.create_fence( &vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED), None, )? }; Ok((commands, render_fence)) } pub fn build(self) -> Result<RendererHandle> { let (commands, render_fence) = self.inner_build()?;
} }
Ok(self.app.renderer_storage.insert(Renderer { main_commands: commands.0, secondary_commands: commands.1, render_fence, render_plan: self.render_plan, pipelines_by_subpass: self.pipelines_by_subpass, pipelines_amount: self.pipelines_amount, }))
call_expression
[ { "content": "pub fn dump_vma_stats(app: &VkTracerApp) {\n\n let stats = app.vma.build_stats_string(true).unwrap();\n\n let mut f = File::create(\"vma_stats.json\").unwrap();\n\n f.write_all(stats.as_bytes()).unwrap();\n\n}\n", "file_path": "vk_tracer/src/utils.rs", "rank": 1, "score": 1528...
Rust
src/main.rs
gourlaysama/girouette
c82e9f2b4eb8b684e46c54f0f11ab1bee6034141
use anyhow::{anyhow, Context, Result}; use env_logger::{Builder, Env}; use girouette::{ cli::ProgramOptions, config::ProgramConfig, show, Girouette, Location, WeatherClient, }; use log::*; use std::{env, time::Duration}; use structopt::StructOpt; use termcolor::*; use tokio::runtime; static DEFAULT_CONFIG: &str = include_str!("../config.yml"); const DEFAULT_TIMEOUT_SEC: u64 = 10; const LOG_ENV_VAR: &str = "GIROUETTE_LOG"; fn main() -> Result<(), Box<dyn std::error::Error>> { let options = ProgramOptions::from_args(); let mut b = Builder::default(); b.format_timestamp(None); b.filter_level(LevelFilter::Warn); b.parse_env(Env::from(LOG_ENV_VAR)); if let Some(level) = options.log_level_with_default(2) { b.filter_level(level); }; b.try_init()?; let rt = runtime::Builder::new_current_thread() .enable_all() .build()?; std::process::exit(match rt.block_on(run_async()) { Ok(()) => 0, Err(e) => { let causes = e.chain().skip(1); if causes.len() != 0 { if log_enabled!(Level::Info) { show!("Error: {}", e); for cause in e.chain().skip(1) { info!("cause: {}", cause); } } else { show!("Error: {}; rerun with '-v' for more information", e); } } else { show!("Error: {}", e); } 1 } }) } async fn run_async() -> Result<()> { let options_matches = ProgramOptions::clap().get_matches(); let options = ProgramOptions::from_clap(&options_matches); if options.version { let i = options_matches .index_of("version") .ok_or_else(|| anyhow!("should never happen: version set yet no version flag"))?; if std::env::args().nth(i).unwrap_or_default() == "-V" { print_version(false); } else { print_version(true); } return Ok(()); } if options.clean_cache { return WeatherClient::clean_cache(); } if options.print_default_config { print!("{}", DEFAULT_CONFIG); return Ok(()); } let conf = make_config(&options)?; let cache_length = match conf.cache { Some(c) => Some( humantime::parse_duration(&c) .context("failed to parse cache length: not a valid duration")?, ), None => None, }; let timeout = match conf.timeout { Some(c) => humantime::parse_duration(&c) .context("failed to parse timeout: not a valid duration")?, None => Duration::from_secs(DEFAULT_TIMEOUT_SEC), }; let location = match conf.location { Some(loc) => loc, None => find_location(timeout).await?, }; let key = conf.key.clone().ok_or_else(|| { anyhow!( "no API key for OpenWeather was found you can get a key over at https://openweathermap.org/appid", ) })?; let lib = Girouette::new( conf.display_config, cache_length, timeout, key, conf.language, ); let mut stdout = StandardStream::stdout(ColorChoice::Auto); lib.display(&location, &mut stdout).await } #[cfg(feature = "geoclue")] async fn find_location(timeout: Duration) -> Result<Location> { info!("no location to query, trying geoclue"); girouette::geoclue::get_location(timeout) .await .map_err(|e| { e.context("geoclue couldn't report your location; use `-l/--location' argument`") }) } #[cfg(not(feature = "geoclue"))] async fn find_location(_timeout: Duration) -> Result<Location> { use anyhow::bail; info!("no location to query, trying geoclue"); bail!("geolocalization unsupported: set a location with '-l/--location' or in the config file") } fn make_config(options: &ProgramOptions) -> Result<ProgramConfig> { let mut empty = false; let mut conf = config::Config::default(); if let Some(path) = &options.config { debug!("looking for config file '{}'", path.display()); conf.merge(config::File::from(path.as_ref()))?; info!("using config from '{}'", path.canonicalize()?.display()); } else if let Some(p) = WeatherClient::directories() { let f = p.config_dir().join("config.yml"); debug!("looking for config file '{}'", f.display()); if f.exists() { info!("using config from '{}'", f.canonicalize()?.display()); conf.merge(config::File::from(f))?; } else { empty = true; } }; if empty { warn!("no config file found, using fallback"); conf.merge(config::File::from_str( DEFAULT_CONFIG, config::FileFormat::Yaml, ))?; }; fn set_conf_from_options( conf: &mut config::Config, option: &Option<String>, key: &str, ) -> Result<()> { if let Some(value) = option { conf.set(key, Some(value.as_str()))?; } Ok(()) } set_conf_from_options(&mut conf, &options.key, "key")?; set_conf_from_options(&mut conf, &options.location, "location")?; set_conf_from_options(&mut conf, &options.cache, "cache")?; set_conf_from_options(&mut conf, &options.language, "language")?; set_conf_from_options(&mut conf, &options.units, "units")?; if let Some(cache) = conf.get::<Option<String>>("cache").unwrap_or(None) { if cache == "none" { conf.set::<Option<String>>("cache", None)?; } } match conf.get::<Option<Location>>("location").unwrap_or(None) { Some(Location::Place(loc)) if loc == "auto" => { conf.set::<Option<String>>("location", None)?; } _ => {} }; let conf: ProgramConfig = conf.try_into()?; trace!("full config: {:#?}", conf); Ok(conf) } fn print_version(long: bool) { if long { println!( "{} {} ({})", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), option_env!("BUILD_ID").unwrap_or("unknown") ); println!("rustc {} ({})", env!("BUILD_RUSTC"), env!("BUILD_INFO")); if let Some(p) = WeatherClient::directories() { println!( "\nconfig location: {}", p.config_dir().join("config.yml").display() ); println!("cache location: {}", p.cache_dir().display()); } if cfg!(feature = "geoclue") { println!("features: geoclue") } } else { println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); } }
use anyhow::{anyhow, Context, Result}; use env_logger::{Builder, Env}; use girouette::{ cli::ProgramOptions, config::ProgramConfig, show, Girouette, Location, WeatherClient, }; use log::*; use std::{env, time::Duration}; use structopt::StructOpt; use termcolor::*; use tokio::runtime; static DEFAULT_CONFIG: &str = include_str!("../config.yml"); const DEFAULT_TIMEOUT_SEC: u64 = 10; const LOG_ENV_VAR: &str = "GIROUETTE_LOG"; fn main() -> Result<(), Box<dyn std::error::Error>> { let options = ProgramOptions::from_args(); let mut b = Builder::default(); b.format_timestamp(None); b.filter_level(LevelFilter::Warn); b.parse_env(Env::from(LOG_ENV_VAR)); if let Some(level) = options.log_level_with_default(2) { b.filter_level(level); }; b.try_init()?; let rt = runtime::Builder::new_current_thread() .enable_all() .build()?; std::process::exit(match rt.block_on(run_async()) { Ok(()) => 0, Err(e) => { let causes = e.chain().skip(1); if causes.len() != 0 { if log_enabled!(Level::Info) { show!("Error: {}", e); for cause in e.chain().skip(1) { info!("cause: {}", cause); } } else { show!("Error: {}; rerun with '-v' for more information", e); } } else { show!("Error: {}", e); } 1 } }) } async fn run_async() -> Result<()> { let options_matches = ProgramOptions::clap().get_matches(); let options = ProgramOptions::from_clap(&options_matches); if options.version { let i = options_matches .index_of("version") .ok_or_else(|| anyhow!("should never happen: version set yet no version flag"))?; if std::env::args().nth(i).unwrap_or_default() == "-V" { print_version(false); } else { print_version(true); } return Ok(()); } if options.clean_cache { return WeatherClient::clean_cache(); } if options.print_default_config { print!("{}", DEFAULT_CONFIG); return Ok(()); } let conf = make_config(&options)?; let cache_length = match conf.cache { Some(c) => Some( humantime::parse_duration(&c) .context("failed to parse cache length: not a valid duration")?, ), None => None, }; let timeout = match conf.timeout { Some(c) => humantime::parse_duration(&c) .context("failed to parse timeout: not a valid duration")?, None => Duration::from_secs(DEFAULT_TIMEOUT_SEC), }; let location = match conf.location { Some(loc) => loc, None => find_location(
#[cfg(feature = "geoclue")] async fn find_location(timeout: Duration) -> Result<Location> { info!("no location to query, trying geoclue"); girouette::geoclue::get_location(timeout) .await .map_err(|e| { e.context("geoclue couldn't report your location; use `-l/--location' argument`") }) } #[cfg(not(feature = "geoclue"))] async fn find_location(_timeout: Duration) -> Result<Location> { use anyhow::bail; info!("no location to query, trying geoclue"); bail!("geolocalization unsupported: set a location with '-l/--location' or in the config file") } fn make_config(options: &ProgramOptions) -> Result<ProgramConfig> { let mut empty = false; let mut conf = config::Config::default(); if let Some(path) = &options.config { debug!("looking for config file '{}'", path.display()); conf.merge(config::File::from(path.as_ref()))?; info!("using config from '{}'", path.canonicalize()?.display()); } else if let Some(p) = WeatherClient::directories() { let f = p.config_dir().join("config.yml"); debug!("looking for config file '{}'", f.display()); if f.exists() { info!("using config from '{}'", f.canonicalize()?.display()); conf.merge(config::File::from(f))?; } else { empty = true; } }; if empty { warn!("no config file found, using fallback"); conf.merge(config::File::from_str( DEFAULT_CONFIG, config::FileFormat::Yaml, ))?; }; fn set_conf_from_options( conf: &mut config::Config, option: &Option<String>, key: &str, ) -> Result<()> { if let Some(value) = option { conf.set(key, Some(value.as_str()))?; } Ok(()) } set_conf_from_options(&mut conf, &options.key, "key")?; set_conf_from_options(&mut conf, &options.location, "location")?; set_conf_from_options(&mut conf, &options.cache, "cache")?; set_conf_from_options(&mut conf, &options.language, "language")?; set_conf_from_options(&mut conf, &options.units, "units")?; if let Some(cache) = conf.get::<Option<String>>("cache").unwrap_or(None) { if cache == "none" { conf.set::<Option<String>>("cache", None)?; } } match conf.get::<Option<Location>>("location").unwrap_or(None) { Some(Location::Place(loc)) if loc == "auto" => { conf.set::<Option<String>>("location", None)?; } _ => {} }; let conf: ProgramConfig = conf.try_into()?; trace!("full config: {:#?}", conf); Ok(conf) } fn print_version(long: bool) { if long { println!( "{} {} ({})", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), option_env!("BUILD_ID").unwrap_or("unknown") ); println!("rustc {} ({})", env!("BUILD_RUSTC"), env!("BUILD_INFO")); if let Some(p) = WeatherClient::directories() { println!( "\nconfig location: {}", p.config_dir().join("config.yml").display() ); println!("cache location: {}", p.cache_dir().display()); } if cfg!(feature = "geoclue") { println!("features: geoclue") } } else { println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); } }
timeout).await?, }; let key = conf.key.clone().ok_or_else(|| { anyhow!( "no API key for OpenWeather was found you can get a key over at https://openweathermap.org/appid", ) })?; let lib = Girouette::new( conf.display_config, cache_length, timeout, key, conf.language, ); let mut stdout = StandardStream::stdout(ColorChoice::Auto); lib.display(&location, &mut stdout).await }
function_block-function_prefixed
[ { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return,\n\n Some(outdir) => outdir,\n\n };\n\n let mut app = ProgramOptions::clap();\n\n\n\n app.gen_completions(\"girouette\", Shell::Bash, &outdir);\n\n\n\n app.gen_completions(\"girouette\", S...
Rust
src/operator/helmert.rs
busstoptaktik/geodesy
2718c094001b0a2168deb4fbaaa09c4ab9f78a7c
#![allow(non_snake_case)] use crate::operator_construction::*; use crate::Context; use crate::CoordinateTuple; use crate::GeodesyError; #[derive(Debug)] pub struct Helmert { R: [[f64; 3]; 3], T0: [f64; 3], R0: [f64; 3], dR: [f64; 3], dT: [f64; 3], t_epoch: f64, t_obs: f64, scale: f64, dscale: f64, exact: bool, position_vector: bool, rotation: bool, inverted: bool, args: OperatorArgs, } fn rotation_matrix(rx: f64, ry: f64, rz: f64, exact: bool, position_vector: bool) -> [[f64; 3]; 3] { let rx = (rx / 3600.).to_radians(); let ry = (ry / 3600.).to_radians(); let rz = (rz / 3600.).to_radians(); let (mut sx, mut sy, mut sz) = (rx, ry, rz); let (mut cx, mut cy, mut cz) = (1.0, 1.0, 1.0); if exact { let scx = rx.sin_cos(); let scy = ry.sin_cos(); let scz = rz.sin_cos(); sx = scx.0; cx = scx.1; sy = scy.0; cy = scy.1; sz = scz.0; cz = scz.1; } let r11 = cy * cz; let mut r12 = cx * sz; let mut r13 = -cx * sy * cz; let r21 = -cy * sz; let mut r22 = cx * cz; let mut r23 = sx * cz; let r31 = sy; let r32 = -sx * cy; let r33 = cx * cy; if exact { r12 += sx * sy * cz; r13 += sx * sz; r22 -= sx * sy * sz; r23 += cx * sy * sz; } if position_vector { return [[r11, r21, r31], [r12, r22, r32], [r13, r23, r33]]; } [[r11, r12, r13], [r21, r22, r23], [r31, r32, r33]] } impl Helmert { fn new(args: &mut OperatorArgs) -> Result<Helmert, GeodesyError> { let x = args.numeric_value("x", 0.0)?; let y = args.numeric_value("y", 0.0)?; let z = args.numeric_value("z", 0.0)?; let rx = args.numeric_value("rx", 0.0)?; let ry = args.numeric_value("ry", 0.0)?; let rz = args.numeric_value("rz", 0.0)?; let dx = args.numeric_value("dx", 0.0)?; let dy = args.numeric_value("dy", 0.0)?; let dz = args.numeric_value("dz", 0.0)?; let drx = args.numeric_value("drx", 0.0)?; let dry = args.numeric_value("dry", 0.0)?; let drz = args.numeric_value("drz", 0.0)?; let t_epoch = args.numeric_value("t_epoch", std::f64::NAN)?; let t_obs = args.numeric_value("t_obs", std::f64::NAN)?; let scale = args.numeric_value("s", 0.0)?; let dscale = args.numeric_value("ds", 0.0)? * 1e-6; let convention = args.value("convention", ""); let exact = args.flag("exact"); let rotation = !((rx, ry, rz) == (0., 0., 0.) && (drx, dry, drz) == (0., 0., 0.)); if rotation { if convention.is_empty() { return Err(GeodesyError::General( "Helmert: Need value for convention when rotating", )); } if convention != "position_vector" && convention != "coordinate_frame" { return Err(GeodesyError::General( "Helmert: value for convention must be one of {position_vector, coordinate_frame}", )); } } let inverted = args.flag("inv"); let argsc = args.clone(); let scale = 1.0 + scale * 1e-6; let T0 = [x, y, z]; let dT = [dx, dy, dz]; let R0 = [rx, ry, rz]; let dR = [drx, dry, drz]; let position_vector = convention == "position_vector"; let R = rotation_matrix(rx, ry, rz, exact, position_vector); Ok(Helmert { R, R0, dR, T0, dT, scale, dscale, t_epoch, t_obs, exact, position_vector, rotation, inverted, args: argsc, }) } pub(crate) fn operator(args: &mut OperatorArgs) -> Result<Operator, GeodesyError> { let op = crate::operator::helmert::Helmert::new(args)?; Ok(Operator(Box::new(op))) } } impl OperatorCore for Helmert { fn fwd(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; #[allow(clippy::float_cmp)] if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } if self.rotation { let x = c[0] * R[0][0] + c[1] * R[0][1] + c[2] * R[0][2]; let y = c[0] * R[1][0] + c[1] * R[1][1] + c[2] * R[1][2]; let z = c[0] * R[2][0] + c[1] * R[2][1] + c[2] * R[2][2]; c[0] = scale * x + T[0]; c[1] = scale * y + T[1]; c[2] = scale * z + T[2]; continue; } c[0] = scale * c[0] + T[0]; c[1] = scale * c[1] + T[1]; c[2] = scale * c[2] + T[2]; } true } fn inv(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { #[allow(clippy::float_cmp)] if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } let x = (c[0] - T[0]) / scale; let y = (c[1] - T[1]) / scale; let z = (c[2] - T[2]) / scale; if self.rotation { c[0] = x * R[0][0] + y * R[1][0] + z * R[2][0]; c[1] = x * R[0][1] + y * R[1][1] + z * R[2][1]; c[2] = x * R[0][2] + y * R[1][2] + z * R[2][2]; } else { c[0] = x; c[1] = y; c[2] = z; } } true } fn name(&self) -> &'static str { "helmert" } fn is_inverted(&self) -> bool { self.inverted } fn args(&self, _step: usize) -> &OperatorArgs { &self.args } } #[cfg(test)] mod tests { use crate::operator::operator_factory; #[test] fn helmert() { use super::*; let mut ctx = Context::new(); let mut args = OperatorArgs::new(); args.name("helmert"); args.insert("x", "foo"); args.insert("y", "-96"); args.insert("z", "-120"); let h = operator_factory(&mut args, &mut ctx, 0); assert!(h.is_err()); args.insert("x", "-87"); assert_eq!(args.value("x", ""), "-87"); assert_eq!(args.value("y", ""), "-96"); assert_eq!(args.value("z", ""), "-120"); let h = operator_factory(&mut args, &mut ctx, 0).unwrap(); let mut operands = [CoordinateTuple::origin()]; h.fwd(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), -87.); assert_eq!(operands[0].second(), -96.); assert_eq!(operands[0].third(), -120.); h.inv(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), 0.); assert_eq!(operands[0].second(), 0.); assert_eq!(operands[0].third(), 0.); let definition = "helmert: { convention: coordinate_frame, x: 0.06155, rx: -0.0394924, y: -0.01087, ry: -0.0327221, z: -0.04019, rz: -0.0328979, s: -0.009994 }"; let op = ctx.operation(definition).unwrap(); let GDA94 = CoordinateTuple([-4052051.7643, 4212836.2017, -2545106.0245, 0.0]); let GDA2020 = CoordinateTuple([-4052052.7379, 4212835.9897, -2545104.5898, 0.0]); let mut operands = [GDA94]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 75e-6); ctx.inv(op, &mut operands); assert!(GDA94.hypot3(&operands[0]) < 75e-7); let definition = "helmert: { exact: true, convention: coordinate_frame, x: 0, rx: 0, dx: 0, drx: 0.00150379, y: 0, ry: 0, dy: 0, dry: 0.00118346, z: 0, rz: 0, dz: 0, drz: 0.00120716, s: 0, ds: 0, t_epoch: 2020.0 }"; let op = ctx.operation(definition).unwrap(); let ITRF2014 = CoordinateTuple([-4052052.6588, 4212835.9938, -2545104.6946, 2018.0]); let GDA2020 = CoordinateTuple([-4052052.7373, 4212835.9835, -2545104.5867, 2020.0]); let mut operands = [ITRF2014]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 40e-6); ctx.inv(op, &mut operands); assert!(ITRF2014.hypot3(&operands[0]) < 40e-8); } }
#![allow(non_snake_case)] use crate::operator_construction::*; use crate::Context; use crate::CoordinateTuple; use crate::GeodesyError; #[derive(Debug)] pub struct Helmert { R: [[f64; 3]; 3], T0: [f64; 3], R0: [f64; 3], dR: [f64; 3], dT: [f64; 3], t_epoch: f64, t_obs: f64, scale: f64, dscale: f64, exact: bool, position_vector: bool, rotation: bool, inverted: bool, args: OperatorArgs, } fn rotation_matrix(rx: f64, ry: f64, rz: f64, exact: bool, position_vector: bool) -> [[f64; 3]; 3] { let rx = (rx / 3600.).to_radians(); let ry = (ry / 3600.).to_radians(); let rz = (rz / 3600.).to_radians(); let (mut sx, mut sy, mut sz) = (rx, ry, rz); let (mut cx, mut cy, mut cz) = (1.0, 1.0, 1.0); if exact { let scx = rx.sin_cos(); let scy = ry.sin_cos(); let scz = rz.sin_cos(); sx = scx.0; cx = scx.1; sy = scy.0; cy = scy.1; sz = scz.0; cz = scz.1; } let r11 = cy * cz; let mut r12 = cx * sz; let mut r13 = -cx * sy * cz; let r21 = -cy * sz; let mut r22 = cx * cz; let mut r23 = sx * cz; let r31 = sy; let r32 = -sx * cy; let r33 = cx * cy; if exact { r12 += sx * sy * cz; r13 += sx * sz; r22 -= sx * sy * sz; r23 += cx * sy * sz; } if position_vector { return [[r11, r21, r31], [r12, r22, r32], [r13, r23, r33]]; } [[r11, r12, r13], [r21, r22, r23], [r31, r32, r33]] } impl Helmert { fn new(args: &mut OperatorArgs) -> Result<Helmert, GeodesyError> { let x = args.numeric_value("x", 0.0)?; let y = args.numeric_value("y", 0.0)?; let z = args.numeric_value("z", 0.0)?; let rx = args.numeric_value("rx", 0.0)?; let ry = args.numeric_value("ry", 0.0)?; let rz = args.numeric_value("rz", 0.0)?; let dx = args.numeric_value("dx", 0.0)?; let dy = args.numeric_value("dy", 0.0)?; let dz = args.numeric_value("dz", 0.0)?; let drx = args.numeric_value("drx", 0.0)?; let dry = args.numeric_value("dry", 0.0)?; let drz = args.numeric_value("drz", 0.0)?; let t_epoch = args.numeric_value("t_epoch", std::f64::NAN)?; let t_obs = args.numeric_value("t_obs", std::f64::NAN)?; let scale = args.numeric_value("s", 0.0)?; let dscale = args.numeric_value("ds", 0.0)? * 1e-6; let convention = args.value("convention", ""); let exact = args.flag("exact"); let rotation = !((rx, ry, rz) == (0., 0., 0.) && (drx, dry, drz) == (0., 0., 0.)); if rotation { if convention.is_empty() { return Err(GeodesyError::General( "Helmert: Need value for convention when rotating", )); } if convention != "position_vector" && convention != "coordinate_frame" { return Err(GeodesyError::General( "Helmert: value for convention must be one of {position_vector, coordinate_frame}", )); } } let inverted = args.flag("inv"); let argsc = args.clone(); let scale = 1.0 + scale * 1e-6; let T0 = [x, y, z]; let dT = [dx, dy, dz]; let R0 = [rx, ry, rz]; let dR = [drx, dry, drz]; let position_vector = convention == "position_vector"; let R = rotation_matrix(rx, ry, rz, exact, position_vector); Ok(Helmert { R, R0, dR, T0, dT, scale, dscale, t_epoch, t_obs, exact, position_vector, rotation, inverted, args: argsc, }) } pub(crate) fn operator(args: &mut OperatorArgs) -> Result<Operator, GeodesyError> { let op = crate::operator::helmert::Helmert::new(args)?; Ok(Operator(Box::new(op))) } } impl OperatorCore for Helmert { fn fwd(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; #[allow(clippy::float_cmp)] if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } if self.rotation { let x = c[0] * R[0][0] + c[1] * R[0][1] + c[2] * R[0][2]; let y = c[0] * R[1][0] + c[1] * R[1][1] + c[2] * R[1][2]; let z = c[0] * R[2][0] + c[1] * R[2][1] + c[2] * R[2][2]; c[0] = scale * x + T[0]; c[1] = scale * y + T[1]; c[2] = scale * z + T[2]; continue; } c[0] = scale * c[0] + T[0]; c[1] = scale * c[1] + T[1]; c[2] = scale * c[2] + T[2]; } true } fn inv(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { #[allow(clippy::float_cmp)] if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2];
scale += dt * self.dscale; } } let x = (c[0] - T[0]) / scale; let y = (c[1] - T[1]) / scale; let z = (c[2] - T[2]) / scale; if self.rotation { c[0] = x * R[0][0] + y * R[1][0] + z * R[2][0]; c[1] = x * R[0][1] + y * R[1][1] + z * R[2][1]; c[2] = x * R[0][2] + y * R[1][2] + z * R[2][2]; } else { c[0] = x; c[1] = y; c[2] = z; } } true } fn name(&self) -> &'static str { "helmert" } fn is_inverted(&self) -> bool { self.inverted } fn args(&self, _step: usize) -> &OperatorArgs { &self.args } } #[cfg(test)] mod tests { use crate::operator::operator_factory; #[test] fn helmert() { use super::*; let mut ctx = Context::new(); let mut args = OperatorArgs::new(); args.name("helmert"); args.insert("x", "foo"); args.insert("y", "-96"); args.insert("z", "-120"); let h = operator_factory(&mut args, &mut ctx, 0); assert!(h.is_err()); args.insert("x", "-87"); assert_eq!(args.value("x", ""), "-87"); assert_eq!(args.value("y", ""), "-96"); assert_eq!(args.value("z", ""), "-120"); let h = operator_factory(&mut args, &mut ctx, 0).unwrap(); let mut operands = [CoordinateTuple::origin()]; h.fwd(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), -87.); assert_eq!(operands[0].second(), -96.); assert_eq!(operands[0].third(), -120.); h.inv(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), 0.); assert_eq!(operands[0].second(), 0.); assert_eq!(operands[0].third(), 0.); let definition = "helmert: { convention: coordinate_frame, x: 0.06155, rx: -0.0394924, y: -0.01087, ry: -0.0327221, z: -0.04019, rz: -0.0328979, s: -0.009994 }"; let op = ctx.operation(definition).unwrap(); let GDA94 = CoordinateTuple([-4052051.7643, 4212836.2017, -2545106.0245, 0.0]); let GDA2020 = CoordinateTuple([-4052052.7379, 4212835.9897, -2545104.5898, 0.0]); let mut operands = [GDA94]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 75e-6); ctx.inv(op, &mut operands); assert!(GDA94.hypot3(&operands[0]) < 75e-7); let definition = "helmert: { exact: true, convention: coordinate_frame, x: 0, rx: 0, dx: 0, drx: 0.00150379, y: 0, ry: 0, dy: 0, dry: 0.00118346, z: 0, rz: 0, dz: 0, drz: 0.00120716, s: 0, ds: 0, t_epoch: 2020.0 }"; let op = ctx.operation(definition).unwrap(); let ITRF2014 = CoordinateTuple([-4052052.6588, 4212835.9938, -2545104.6946, 2018.0]); let GDA2020 = CoordinateTuple([-4052052.7373, 4212835.9835, -2545104.5867, 2020.0]); let mut operands = [ITRF2014]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 40e-6); ctx.inv(op, &mut operands); assert!(ITRF2014.hypot3(&operands[0]) < 40e-8); } }
if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); }
if_condition
[ { "content": "/// Handle instantiation of built-in operators.\n\nfn builtins(ctx: &mut Context, args: &mut OperatorArgs) -> Result<Operator, GeodesyError> {\n\n // Pipelines are not characterized by the name \"pipeline\", but simply by containing steps.\n\n if let Ok(steps) = args.numeric_value(\"_nsteps\...
Rust
ecap/src/common/name.rs
Mark-Simulacrum/ecap-rs
842f70a5bded587308c329ffbf7eacf3289a380c
use std::borrow::Cow; use std::cell::Cell; use std::sync::atomic::{AtomicUsize, Ordering}; static LAST_ID: AtomicUsize = AtomicUsize::new(0); #[derive(Debug, Clone)] pub struct Name<'a> { image: Option<Cow<'a, [u8]>>, id: Id, host_id: Cell<Option<u32>>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Id { Unknown, Unidentified, Id(u32), } impl<'a> Name<'a> { pub fn id(&self) -> Id { self.id } pub fn to_owned(self) -> Name<'static> { Name { id: self.id, host_id: self.host_id, image: match self.image { Some(cow) => Some(Cow::from(cow.into_owned())), None => None, }, } } pub fn from_raw<I: Into<Cow<'a, [u8]>>>(image: I, id: Id, host_id: Option<u32>) -> Self { let image = image.into(); Name { image: if image.is_empty() { None } else { Some(image) }, id, host_id: Cell::new(host_id), } } pub fn unknown() -> Name<'static> { Name { image: None, id: Id::Unknown, host_id: Cell::new(None), } } pub fn new_known<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Unidentified, host_id: Cell::new(None), } } pub fn new_identified<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Id(LAST_ID.fetch_add(1, Ordering::Relaxed) as u32), host_id: Cell::new(None), } } pub fn identified(&self) -> bool { if let Id::Id(_) = self.id { true } else { false } } pub fn known(&self) -> bool { if let Id::Unknown = self.id { false } else { true } } pub fn image(&self) -> Option<&[u8]> { self.image.as_ref().map(|s| s.as_ref()) } pub fn host_id(&self) -> Option<u32> { self.host_id.get() } pub fn assign_host_id(&self, id: u32) { assert_eq!(self.host_id.replace(Some(id)), None); } } impl<'a> PartialEq for Name<'a> { fn eq(&self, other: &Self) -> bool { self.known() && if self.identified() { self.id == other.id } else { self.image == other.image } } }
use std::borrow::Cow; use std::cell::Cell; use std::sync::atomic::{AtomicUsize, Ordering}; static LAST_ID: AtomicUsize = AtomicUsize::new(0); #[derive(Debug, Clone)] pub struct Name<'a> { image: Option
elf.id { false } else { true } } pub fn image(&self) -> Option<&[u8]> { self.image.as_ref().map(|s| s.as_ref()) } pub fn host_id(&self) -> Option<u32> { self.host_id.get() } pub fn assign_host_id(&self, id: u32) { assert_eq!(self.host_id.replace(Some(id)), None); } } impl<'a> PartialEq for Name<'a> { fn eq(&self, other: &Self) -> bool { self.known() && if self.identified() { self.id == other.id } else { self.image == other.image } } }
<Cow<'a, [u8]>>, id: Id, host_id: Cell<Option<u32>>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Id { Unknown, Unidentified, Id(u32), } impl<'a> Name<'a> { pub fn id(&self) -> Id { self.id } pub fn to_owned(self) -> Name<'static> { Name { id: self.id, host_id: self.host_id, image: match self.image { Some(cow) => Some(Cow::from(cow.into_owned())), None => None, }, } } pub fn from_raw<I: Into<Cow<'a, [u8]>>>(image: I, id: Id, host_id: Option<u32>) -> Self { let image = image.into(); Name { image: if image.is_empty() { None } else { Some(image) }, id, host_id: Cell::new(host_id), } } pub fn unknown() -> Name<'static> { Name { image: None, id: Id::Unknown, host_id: Cell::new(None), } } pub fn new_known<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Unidentified, host_id: Cell::new(None), } } pub fn new_identified<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Id(LAST_ID.fetch_add(1, Ordering::Relaxed) as u32), host_id: Cell::new(None), } } pub fn identified(&self) -> bool { if let Id::Id(_) = self.id { true } else { false } } pub fn known(&self) -> bool { if let Id::Unknown = s
random
[ { "content": "/// Reading of `(Name, Area)` pairs across the adapter/host boundary.\n\n///\n\n/// This is used to share configuration information and transaction meta-information.\n\n///\n\n/// FIXME: \"Options objects and individual option values may be temporary. They must not\n\n/// be used beyond the method...
Rust
choseong-pullup/src/lib.rs
y15un/korean-stuff
e31c92ac364ca1cff88e3f5561fbbbc4cb9b7b49
use std::convert::TryFrom; use unicode_korean_multitool::{Choseong, Jongseong, Syllable}; const RULESET: [(Option<Jongseong>, Choseong, Jongseong, bool); 28] = [ (None, Choseong::Kiyeok, Jongseong::Kiyeok, false), (None, Choseong::SsangKiyeok, Jongseong::SsangKiyeok, false), (None, Choseong::Nieun, Jongseong::Nieun, false), (None, Choseong::Tikeut, Jongseong::Tikeut, false), (None, Choseong::Rieul, Jongseong::Rieul, false), (None, Choseong::Mieum, Jongseong::Mieum, false), (None, Choseong::Pieup, Jongseong::Pieup, false), (None, Choseong::Sios, Jongseong::Sios, false), (None, Choseong::SsangSios, Jongseong::SsangSios, false), (None, Choseong::Cieuc, Jongseong::Cieuc, false), (None, Choseong::Chieuch, Jongseong::Chieuch, false), (None, Choseong::Khieukh, Jongseong::Khieukh, false), (None, Choseong::Thieuth, Jongseong::Thieuth, false), (None, Choseong::Phieuph, Jongseong::Phieuph, false), (None, Choseong::Hieuh, Jongseong::Hieuh, true), ( Some(Jongseong::Kiyeok), Choseong::Kiyeok, Jongseong::SsangKiyeok, true, ), ( Some(Jongseong::Kiyeok), Choseong::Sios, Jongseong::KiyeokSios, false, ), ( Some(Jongseong::Nieun), Choseong::Cieuc, Jongseong::NieunCieuc, false, ), ( Some(Jongseong::Nieun), Choseong::Hieuh, Jongseong::NieunHieuh, true, ), ( Some(Jongseong::Rieul), Choseong::Kiyeok, Jongseong::RieulKiyeok, false, ), ( Some(Jongseong::Rieul), Choseong::Mieum, Jongseong::RieulMieum, false, ), ( Some(Jongseong::Rieul), Choseong::Pieup, Jongseong::RieulPieup, false, ), ( Some(Jongseong::Rieul), Choseong::Sios, Jongseong::RieulSios, false, ), ( Some(Jongseong::Rieul), Choseong::Thieuth, Jongseong::RieulThieuth, false, ), ( Some(Jongseong::Rieul), Choseong::Phieuph, Jongseong::RieulPhieuph, false, ), ( Some(Jongseong::Rieul), Choseong::Hieuh, Jongseong::RieulHieuh, true, ), ( Some(Jongseong::Pieup), Choseong::Sios, Jongseong::PieupSios, false, ), ( Some(Jongseong::Sios), Choseong::Sios, Jongseong::SsangSios, true, ), ]; pub fn pullup_choseong(source: &str) -> String { pullup_choseong_config(source, false) } pub fn pullup_choseong_config(source: &str, extended_flag: bool) -> String { let mut destination = String::with_capacity(source.len()); let mut characters = source.chars().peekable(); let mut choseong_pulled = false; while let Some(current) = characters.next() { if !Syllable::is_one_of_us(current) { destination.push(current); continue; } let mut current_syllable = Syllable::try_from(current).unwrap(); if choseong_pulled { current_syllable.choseong = Choseong::Ieung; choseong_pulled = false; } if let Some(&next) = characters.peek() { if !Syllable::is_one_of_us(next) { destination.push(char::from(current_syllable)); continue; } let next_syllable = Syllable::try_from(next).unwrap(); for &( current_jongseong_match, next_choseong_match, current_jongseong_to_be, is_extended, ) in RULESET.iter() { if current_jongseong_match == current_syllable.jongseong && next_choseong_match == next_syllable.choseong && (is_extended <= extended_flag) { current_syllable.jongseong = Some(current_jongseong_to_be); choseong_pulled = true; break; } } } destination.push(char::from(current_syllable)); } destination } #[cfg(test)] mod tests { #[test] fn test_pullup_choseong() { assert_eq!( super::pullup_choseong("초성 올려 쓰기"), "촛엉 올려 쓱이".to_owned() ); assert_eq!( super::pullup_choseong("이불 밖은 위험해!"), "입울 밖은 위험해!".to_owned() ); assert_eq!( super::pullup_choseong("버터치킨 최고야!"), "벝엋잌인 쵝오야!".to_owned() ); assert_eq!( super::pullup_choseong("이 얼마나 무시무시한 생각이니"), "이 얾안아 뭇임웃이한 생각인이".to_owned() ); assert_eq!( super::pullup_choseong_config("이불 밖은 위험해!", true), "입울 밖은 윟엄해!".to_owned() ); assert_eq!( super::pullup_choseong_config("이 얼마나 무시무시한 생각이니", true), "이 얾안아 뭇임웃잏안 생각인이".to_owned() ); } }
use std::convert::TryFrom; use unicode_korean_multitool::{Choseong, Jongseong, Syllable}; const RULESET: [(Option<Jongseong>, Choseong, Jongseong, bool); 28] = [ (None, Choseong::Kiyeok, Jongseong::Kiyeok, false), (None, Choseong::SsangKiyeok, Jongseong::SsangKiyeok, false), (None, Choseong::Nieun, Jongseong::Nieun, false), (None, Choseong::Tikeut, Jongseong::Tikeut, false), (None, Choseong::Rieul, Jongseong::Rieul, false), (None, Choseong::Mieum, Jongseong::Mieum, false), (None, Choseong::Pieup, Jongseong::Pieup, false), (None, Choseong::Sios, Jongseong::Sios, false), (None, Choseong::SsangSios, Jongseong::SsangSios, false), (None, Choseong::Cieuc, Jongseong::Cieuc, false), (None, Choseong::Chieuch, Jongseong::Chieuch, false), (None, Choseong::Khieukh, Jongseong::Khieukh, false), (None, Choseong::Thieuth, Jongseong::Thieuth, false), (None, Choseong::Phieuph, Jongseong::Phieuph, false), (None, Choseong::Hieuh, Jongseong::Hieuh, true), ( Some(Jongseong::Kiyeok), Choseong::Kiyeok, Jongseong::SsangKiyeok, true, ), ( Some(Jongseong::Kiyeok), Choseong::Sios, Jongseong::KiyeokSios, false, ), ( Some(Jongseong::Nieun), Choseong::Cieuc, Jongseong::NieunCieuc, false, ), ( Some(Jongseong::Nieun), Choseong::Hieuh, Jongseong::NieunHieuh, true, ), ( Some(Jongseong::Rieul), Choseong::Kiyeok, Jongseong::RieulKiyeok, false, ), ( Some(Jongseong::Rieul), Choseong::Mieum, Jongseong::RieulMieum, false, ), ( Some(Jongseong::Rieul), Choseong::Pieup, Jongseong::RieulPieup, false, ), ( Some(Jongseong::Rieul), Choseong::Sios, Jongseong::RieulSios, false, ), ( Some(Jongseong::Rieul), Choseong::Thieuth, Jongseong::RieulThieuth, false, ), ( Some(Jongseong::Rieul), Choseong::Phieuph, Jongseong::RieulPhieuph, false, ), ( Some(Jongseong::Rieul), Choseong::Hieuh, Jongseong::RieulHieuh, true, ), ( Some(Jongseong::Pieup), Choseong::Sios, Jongseong::PieupSios, false, ), ( Some(Jongseong::Sios), Choseong::Sios, Jongseong::SsangSios, true, ), ]; pub fn pullup_choseong(source: &str) -> String { pullup_choseong_config(source, false) }
#[cfg(test)] mod tests { #[test] fn test_pullup_choseong() { assert_eq!( super::pullup_choseong("초성 올려 쓰기"), "촛엉 올려 쓱이".to_owned() ); assert_eq!( super::pullup_choseong("이불 밖은 위험해!"), "입울 밖은 위험해!".to_owned() ); assert_eq!( super::pullup_choseong("버터치킨 최고야!"), "벝엋잌인 쵝오야!".to_owned() ); assert_eq!( super::pullup_choseong("이 얼마나 무시무시한 생각이니"), "이 얾안아 뭇임웃이한 생각인이".to_owned() ); assert_eq!( super::pullup_choseong_config("이불 밖은 위험해!", true), "입울 밖은 윟엄해!".to_owned() ); assert_eq!( super::pullup_choseong_config("이 얼마나 무시무시한 생각이니", true), "이 얾안아 뭇임웃잏안 생각인이".to_owned() ); } }
pub fn pullup_choseong_config(source: &str, extended_flag: bool) -> String { let mut destination = String::with_capacity(source.len()); let mut characters = source.chars().peekable(); let mut choseong_pulled = false; while let Some(current) = characters.next() { if !Syllable::is_one_of_us(current) { destination.push(current); continue; } let mut current_syllable = Syllable::try_from(current).unwrap(); if choseong_pulled { current_syllable.choseong = Choseong::Ieung; choseong_pulled = false; } if let Some(&next) = characters.peek() { if !Syllable::is_one_of_us(next) { destination.push(char::from(current_syllable)); continue; } let next_syllable = Syllable::try_from(next).unwrap(); for &( current_jongseong_match, next_choseong_match, current_jongseong_to_be, is_extended, ) in RULESET.iter() { if current_jongseong_match == current_syllable.jongseong && next_choseong_match == next_syllable.choseong && (is_extended <= extended_flag) { current_syllable.jongseong = Some(current_jongseong_to_be); choseong_pulled = true; break; } } } destination.push(char::from(current_syllable)); } destination }
function_block-full_function
[ { "content": "pub fn pushdown_jongseong_config(source: &str, extended_flag: bool) -> String {\n\n let mut destination = String::with_capacity(source.len());\n\n\n\n let mut buffer: [u8; 4] = [0, 0, 0, 0];\n\n let mut characters = source.chars().peekable();\n\n let mut new_choseong = None;\n\n\n\n ...
Rust
gui/draw-cube/src/007-synchronization/main.rs
Shub1427/rustschool
fe45848a2101ac0cf48311e9926fe26f509059bd
use gfx_hal::{ command, format::{self as hal_format, Aspects, Swizzle}, image::{Layout, SubresourceRange, ViewKind}, pass::{Attachment, AttachmentOps, SubpassDesc}, pool::CommandPoolCreateFlags, prelude::*, window as hal_window, Backend, Features, Instance, }; use std::mem::ManuallyDrop; use std::ptr; use winit::{ dpi::{LogicalSize, PhysicalSize}, event, event_loop, window, }; #[cfg(feature = "dx12")] use gfx_backend_dx12 as back; #[cfg(feature = "metal")] use gfx_backend_metal as back; #[cfg(feature = "vulkan")] use gfx_backend_vulkan as back; use log::debug; use log4rs; const APP_NAME: &'static str = "Show Window"; const WINDOW_SIZE: [u32; 2] = [1280, 768]; pub struct Renderer<B: Backend> { instance: B::Instance, surface: ManuallyDrop<B::Surface>, device: B::Device, command_pool: Option<B::CommandPool>, swapchain: Option<B::Swapchain>, image_views: Vec<B::ImageView>, render_pass: Option<B::RenderPass>, framebuffers: Vec<B::Framebuffer>, image_available_semaphores: Vec<B::Semaphore>, render_complete_semaphores: Vec<B::Semaphore>, submission_complete_fence: Vec<B::Fence>, } impl<B: Backend> Renderer<B> { pub fn new( instance: B::Instance, mut surface: B::Surface, init_extent: hal_window::Extent2D, ) -> Result<Self, &'static str> { let mut adapters = instance.enumerate_adapters(); let (memory_types, limits, adapter) = { let adapter = adapters.remove(0); ( adapter.physical_device.memory_properties().memory_types, adapter.physical_device.limits(), adapter, ) }; let (device, queues, supported_family) = { let supported_family = adapter .queue_families .iter() .find(|family| { surface.supports_queue_family(family) && family.queue_type().supports_graphics() }) .unwrap(); let mut gpu = unsafe { adapter .physical_device .open(&[(supported_family, &[1.0])], Features::empty()) .unwrap() }; ( gpu.device, gpu.queue_groups.pop().unwrap(), supported_family, ) }; let (command_pool, mut command_buffer) = unsafe { let mut command_pool = device .create_command_pool(queues.family, CommandPoolCreateFlags::empty()) .expect("Out of memory"); let command_buffer = command_pool.allocate_one(command::Level::Primary); (command_pool, command_buffer) }; let (swapchain, backbuffer, image_extent, format) = { let caps = surface.capabilities(&adapter.physical_device); let supported_formats = surface.supported_formats(&adapter.physical_device); let format = supported_formats.map_or(hal_format::Format::Rgba8Srgb, |formats| { formats .iter() .find(|format| format.base_format().1 == hal_format::ChannelType::Srgb) .map(|format| *format) .unwrap_or(formats[0]) }); let swap_config = hal_window::SwapchainConfig::from_caps(&caps, format, init_extent); let image_extent = swap_config.extent.to_extent(); let (swapchain, backbuffer) = unsafe { device .create_swapchain(&mut surface, swap_config, None) .expect("Can't create swapchain") }; (swapchain, backbuffer, image_extent, format) }; let image_views = backbuffer .into_iter() .map(|image| unsafe { device .create_image_view( &image, ViewKind::D2, format, Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image_view for the image!") }) .collect::<Result<Vec<B::ImageView>, &str>>()?; let render_pass = { let color_attachment = Attachment { format: Some(format), samples: 1, ops: AttachmentOps::INIT, stencil_ops: AttachmentOps::DONT_CARE, layouts: Layout::Undefined..Layout::Present, }; let subpass = SubpassDesc { colors: &[(0, Layout::ColorAttachmentOptimal)], depth_stencil: None, inputs: &[], resolves: &[], preserves: &[], }; unsafe { device .create_render_pass(&[color_attachment], &[subpass], &[]) .expect("Out of memory") } }; let framebuffers = image_views .iter() .map(|image_view| unsafe { device .create_framebuffer(&render_pass, vec![image_view], image_extent) .map_err(|_| "Couldn't create the framebuffer for the image_view!") }) .collect::<Result<Vec<B::Framebuffer>, &str>>()?; let (image_available_semaphores, render_complete_semaphores, submission_complete_fence) = { let mut image_available_semaphores: Vec<B::Semaphore> = vec![]; let mut render_finished_semaphores: Vec<B::Semaphore> = vec![]; let mut submission_complete_fence: Vec<B::Fence> = vec![]; for _ in 0..image_views.len() { image_available_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create image_available_semaphores semaphore!")?, ); render_finished_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create render_finished_semaphores semaphore!")?, ); submission_complete_fence.push( device .create_fence(true) .map_err(|_| "Could not create submission_complete_fence fence!")?, ); } ( image_available_semaphores, render_finished_semaphores, submission_complete_fence, ) }; Ok(Renderer { instance, surface: ManuallyDrop::new(surface), device, command_pool: Some(command_pool), swapchain: Some(swapchain), image_views, render_pass: Some(render_pass), framebuffers, image_available_semaphores, render_complete_semaphores, submission_complete_fence, }) } } impl<B: Backend> Drop for Renderer<B> { fn drop(&mut self) { unsafe { for image_available in self.image_available_semaphores.drain(..) { self.device.destroy_semaphore(image_available); } for render_complete in self.render_complete_semaphores.drain(..) { self.device.destroy_semaphore(render_complete); } for submission_complete in self.submission_complete_fence.drain(..) { self.device.destroy_fence(submission_complete); } for framebuffer in self.framebuffers.drain(..) { self.device.destroy_framebuffer(framebuffer); } for image_view in self.image_views.drain(..) { self.device.destroy_image_view(image_view); } self.device .destroy_render_pass(self.render_pass.take().unwrap()); self.device .destroy_swapchain(self.swapchain.take().unwrap()); self.device .destroy_command_pool(self.command_pool.take().unwrap()); let surface = ManuallyDrop::into_inner(ptr::read(&self.surface)); self.instance.destroy_surface(surface); } } } fn create_backend( wb: window::WindowBuilder, ev_loop: &event_loop::EventLoop<()>, ) -> (back::Instance, back::Surface, window::Window) { let window = wb.build(ev_loop).unwrap(); let instance = back::Instance::create(APP_NAME, 1).expect("Failed to create an instance!"); let surface = unsafe { instance .create_surface(&window) .expect("Failed to create a surface!") }; (instance, surface, window) } fn build_window( ev_loop: &event_loop::EventLoop<()>, ) -> (window::WindowBuilder, hal_window::Extent2D) { let (logical_window_size, physical_window_size) = { let dpi = ev_loop.primary_monitor().scale_factor(); let logical: LogicalSize<u32> = WINDOW_SIZE.into(); let physical: PhysicalSize<u32> = logical.to_physical(dpi); (logical, physical) }; let window_builder = window::WindowBuilder::new() .with_title(APP_NAME) .with_inner_size(logical_window_size); ( window_builder, hal_window::Extent2D { width: physical_window_size.width, height: physical_window_size.height, }, ) } fn main() { log4rs::init_file("log4rs.yml", Default::default()).unwrap(); let ev_loop = event_loop::EventLoop::new(); let (window_builder, extent) = build_window(&ev_loop); let (instance, surface, window) = create_backend(window_builder, &ev_loop); let renderer = Renderer::<back::Backend>::new(instance, surface, extent); ev_loop.run(move |event, _, control_flow| { *control_flow = event_loop::ControlFlow::Wait; match event { event::Event::WindowEvent { event, .. } => { #[allow(unused_variables)] match event { event::WindowEvent::CloseRequested => { *control_flow = event_loop::ControlFlow::Exit } event::WindowEvent::Resized(dims) => { debug!("RESIZE EVENT"); } event::WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { debug!("Scale Factor Change"); } _ => (), } } event::Event::MainEventsCleared => { debug!("MainEventsCleared"); } event::Event::RedrawRequested(_) => { debug!("RedrawRequested"); } event::Event::RedrawEventsCleared => { debug!("RedrawEventsCleared"); } _ => (), } }); }
use gfx_hal::{ command, format::{self as hal_format, Aspects, Swizzle}, image::{Layout, SubresourceRange, ViewKind}, pass::{Attachment, AttachmentOps, SubpassDesc}, pool::CommandPoolCreateFlags, prelude::*, window as hal_window, Backend, Features, Instance, }; use std::mem::ManuallyDrop; use std::ptr; use winit::{ dpi::{LogicalSize, PhysicalSize}, event, event_loop, window, }; #[cfg(feature = "dx12")] use gfx_backend_dx12 as back; #[cfg(feature = "metal")] use gfx_backend_metal as back; #[cfg(feature = "vulkan")] use gfx_backend_vulkan as back; use log::debug; use log4rs; const APP_NAME: &'static str = "Show Window"; const WINDOW_SIZE: [u32; 2] = [1280, 768]; pub struct Renderer<B: Backend> { instance: B::Instance, surface: ManuallyDrop<B::Surface>, device: B::Device, command_pool: Option<B::CommandPool>, swapchain: Option<B::Swapchain>, image_views: Vec<B::ImageView>, render_pass: Option<B::RenderPass>, framebuffers: Vec<B::Framebuffer>, image_available_semaphores: Vec<B::Semaphore>, render_complete_semaphores: Vec<B::Semaphore>, submission_complete_fence: Vec<B::Fence>, } impl<B: Backend> Renderer<B> { pub fn new( instance: B::Instance, mut surface: B::Surface, init_extent: hal_window::Extent2D, ) -> Result<Self, &'static str> { let mut adapters = instance.enumerate_adapters(); let (memory_types, limits, adapter) = { let adapter = adapters.remove(0); ( adapter.physical_device.memory_properties().memory_types, adapter.physical_device.limits(), adapter, ) }; let (device, queues, supported_family) = { let supported_family = adapter .queue_families .iter() .find(|family| { surface.supports_queue_family(family) && family.queue_type().supports_graphics() }) .unwrap(); let mut gpu = unsafe { adapter .physical_device .open(&[(supported_family, &[1.0])], Features::empty()) .unwrap() }; ( gpu.device, gpu.queue_groups.pop().unwrap(), supported_family, ) }; let (command_pool, mut command_buffer) = unsafe { let mut command_pool = device .create_command_pool(queues.family, CommandPoolCreateFlags::empty()) .expect("Out of memory"); let command_buffer = command_pool.allocate_one(command::Level::Primary); (command_pool, command_buffer) }; let (swapchain, backbuffer, image_extent, format) = { let caps = surface.capabilities(&adapter.physical_device); let supported_formats = surface.supported_formats(&adapter.physical_device); let format = supported_formats.map_or(hal_format::Format::Rgba8Srgb, |formats| { formats .iter() .find(|format| format.base_format().1 == hal_format::ChannelType::Srgb) .map(|format| *format) .unwrap_or(formats[0]) }); let swap_config = hal_window::SwapchainConfig::from_caps(&caps, format, init_extent); let image_extent = swap_config.extent.to_extent(); let (swapchain, backbuffer) = unsafe { device .create_swapchain(&mut surface, swap_config, None) .expect("Can't create swapchain") }; (swapchain, backbuffer, image_extent, format) }; let image_views = backbuffer .into_iter() .map(|image| unsafe { device .create_image_view( &image, ViewKind::D2, format, Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image_view for the image!") }) .collect::<Result<Vec<B::ImageView>, &str>>()?; let render_pass = { let color_attachment = Attachment { format: Some(format), samples: 1, ops: AttachmentOps::INIT, stencil_ops: AttachmentOps::DONT_CARE, layouts: Layout::Undefined..Layout::Present, }; let subpass = SubpassDesc { colors: &[(0, Layout::ColorAttachmentOptimal)], depth_stencil: None, inputs: &[], resolves: &[], preserves: &[], }; unsafe { device .create_render_pass(&[color_attachment], &[subpass], &[]) .expect("Out of memory") } }; let framebuffers = image_views .iter() .map(|image_view| unsafe { device .create_framebuffer(&render_pass, vec![image_view], image_extent) .map_err(|_| "Couldn't create the framebuffer for the image_view!") }) .collect::<Result<Vec<B::Framebuffer>, &str>>()?; let (image_available_semaphores, render_complete_semaphores, submission_complete_fence) = { let mut image_available_semaphores: Vec<B::Semaphore> = vec![]; let mut render_finished_semaphores: Vec<B::Semaphore> = vec![]; let mut submission_complete_fence: Vec<B::Fence> = vec![]; for _ in 0..image_views.len() { image_available_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create image_available_semaphores semaphore!")?, ); render_finished_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create render_finished_semaphores semaphore!")?, ); submission_complete_fence.push( device .create_fence(true) .map_err(|_| "Could not create submission_complete_fence fence!")?, ); } ( image_available_semaphores, render_finished_semaphores, submission_complete_fence, ) };
} } impl<B: Backend> Drop for Renderer<B> { fn drop(&mut self) { unsafe { for image_available in self.image_available_semaphores.drain(..) { self.device.destroy_semaphore(image_available); } for render_complete in self.render_complete_semaphores.drain(..) { self.device.destroy_semaphore(render_complete); } for submission_complete in self.submission_complete_fence.drain(..) { self.device.destroy_fence(submission_complete); } for framebuffer in self.framebuffers.drain(..) { self.device.destroy_framebuffer(framebuffer); } for image_view in self.image_views.drain(..) { self.device.destroy_image_view(image_view); } self.device .destroy_render_pass(self.render_pass.take().unwrap()); self.device .destroy_swapchain(self.swapchain.take().unwrap()); self.device .destroy_command_pool(self.command_pool.take().unwrap()); let surface = ManuallyDrop::into_inner(ptr::read(&self.surface)); self.instance.destroy_surface(surface); } } } fn create_backend( wb: window::WindowBuilder, ev_loop: &event_loop::EventLoop<()>, ) -> (back::Instance, back::Surface, window::Window) { let window = wb.build(ev_loop).unwrap(); let instance = back::Instance::create(APP_NAME, 1).expect("Failed to create an instance!"); let surface = unsafe { instance .create_surface(&window) .expect("Failed to create a surface!") }; (instance, surface, window) } fn build_window( ev_loop: &event_loop::EventLoop<()>, ) -> (window::WindowBuilder, hal_window::Extent2D) { let (logical_window_size, physical_window_size) = { let dpi = ev_loop.primary_monitor().scale_factor(); let logical: LogicalSize<u32> = WINDOW_SIZE.into(); let physical: PhysicalSize<u32> = logical.to_physical(dpi); (logical, physical) }; let window_builder = window::WindowBuilder::new() .with_title(APP_NAME) .with_inner_size(logical_window_size); ( window_builder, hal_window::Extent2D { width: physical_window_size.width, height: physical_window_size.height, }, ) } fn main() { log4rs::init_file("log4rs.yml", Default::default()).unwrap(); let ev_loop = event_loop::EventLoop::new(); let (window_builder, extent) = build_window(&ev_loop); let (instance, surface, window) = create_backend(window_builder, &ev_loop); let renderer = Renderer::<back::Backend>::new(instance, surface, extent); ev_loop.run(move |event, _, control_flow| { *control_flow = event_loop::ControlFlow::Wait; match event { event::Event::WindowEvent { event, .. } => { #[allow(unused_variables)] match event { event::WindowEvent::CloseRequested => { *control_flow = event_loop::ControlFlow::Exit } event::WindowEvent::Resized(dims) => { debug!("RESIZE EVENT"); } event::WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { debug!("Scale Factor Change"); } _ => (), } } event::Event::MainEventsCleared => { debug!("MainEventsCleared"); } event::Event::RedrawRequested(_) => { debug!("RedrawRequested"); } event::Event::RedrawEventsCleared => { debug!("RedrawEventsCleared"); } _ => (), } }); }
Ok(Renderer { instance, surface: ManuallyDrop::new(surface), device, command_pool: Some(command_pool), swapchain: Some(swapchain), image_views, render_pass: Some(render_pass), framebuffers, image_available_semaphores, render_complete_semaphores, submission_complete_fence, })
call_expression
[ { "content": "// The &'static here means the return type has a static lifetime.\n\n// This is a Rust feature that you don't need to worry about now.\n\npub fn hello() -> &'static str {\n\n \"Hello, World!\"\n\n}\n", "file_path": "exercism/rust/hello-world/src/lib.rs", "rank": 0, "score": 250871.1...
Rust
rmqtt/src/settings/mod.rs
phial3/rmqtt
8c29529e273007178fd0af73dccb6b0bf6729339
use std::fmt; use std::net::SocketAddr; use std::ops::{Deref, DerefMut}; use std::sync::Arc; use std::time::Duration; use config::{Config, ConfigError, File}; use parking_lot::RwLock; use serde::de::{Deserialize, Deserializer}; use crate::{NodeId, Result}; use self::listener::Listeners; use self::log::Log; pub mod listener; pub mod log; #[derive(Clone)] pub struct Settings(Arc<Inner>); #[derive(Debug, Clone, Deserialize)] pub struct Inner { #[serde(default = "inner_api_addr_default", deserialize_with = "deserialize_addr")] pub inner_api_addr: SocketAddr, #[serde(default)] pub node: Node, #[serde(default)] pub rpc: Rpc, #[serde(default)] pub log: Log, #[serde(rename = "listener")] #[serde(default)] pub listeners: Listeners, #[serde(default)] pub plugins: Plugins, #[serde(default)] pub mqtt: Mqtt, } fn inner_api_addr_default() -> SocketAddr { ([0, 0, 0, 0], 6063).into() } impl Deref for Settings { type Target = Inner; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl Settings { pub fn new() -> Result<Self, ConfigError> { let mut s = Config::new(); if let Ok(cfg_filename) = std::env::var("RMQTT-CONFIG-FILENAME") { s.merge(File::with_name(&cfg_filename).required(false))?; } s.merge(File::with_name("/etc/rmqtt/rmqtt").required(false))?; s.merge(File::with_name("/etc/rmqtt").required(false))?; s.merge(File::with_name("rmqtt").required(false))?; let mut inner: Inner = match s.try_into() { Ok(c) => c, Err(e) => { return Err(e); } }; inner.listeners.init(); if inner.listeners.tcps.is_empty() && inner.listeners.tlss.is_empty() { return Err(ConfigError::Message( "Settings::new() error, listener.tcp or listener.tls is not exist".into(), )); } Ok(Self(Arc::new(inner))) } } impl fmt::Debug for Settings { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Settings ...")?; Ok(()) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Node { #[serde(default)] pub id: NodeId, #[serde(default = "Node::cookie_default")] pub cookie: String, #[serde(default = "Node::crash_dump_default")] pub crash_dump: String, } impl Node { fn cookie_default() -> String { "rmqttsecretcookie".into() } fn crash_dump_default() -> String { "/var/log/rmqtt/crash.dump".into() } } #[derive(Debug, Clone, Deserialize)] pub struct Rpc { #[serde(default = "Rpc::mode_default")] pub mode: String, #[serde(default = "Rpc::server_addr_default", deserialize_with = "deserialize_addr")] pub server_addr: SocketAddr, #[serde(default = "Rpc::server_workers_default")] pub server_workers: usize, #[serde(default = "Rpc::client_concurrency_limit_default")] pub client_concurrency_limit: usize, #[serde(default = "Rpc::client_timeout_default", deserialize_with = "deserialize_duration")] pub client_timeout: Duration, #[serde(default = "Rpc::batch_size_default")] pub batch_size: usize, } impl Default for Rpc { #[inline] fn default() -> Self { Self { mode: Self::mode_default(), batch_size: Self::batch_size_default(), server_addr: Self::server_addr_default(), server_workers: Self::server_workers_default(), client_concurrency_limit: Self::client_concurrency_limit_default(), client_timeout: Self::client_timeout_default(), } } } impl Rpc { fn mode_default() -> String { "async".into() } fn batch_size_default() -> usize { 128 } fn server_addr_default() -> SocketAddr { ([0, 0, 0, 0], 5363).into() } fn server_workers_default() -> usize { 4 } fn client_concurrency_limit_default() -> usize { 128 } fn client_timeout_default() -> Duration { Duration::from_secs(5) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Plugins { #[serde(default = "Plugins::dir_default")] pub dir: String, #[serde(default)] pub default_startups: Vec<String>, } impl Plugins { fn dir_default() -> String { "./plugins/".into() } pub fn load_config<'de, T: serde::Deserialize<'de>>(&self, name: &str) -> Result<T, ConfigError> { let dir = self.dir.trim_end_matches(|c| c == '/' || c == '\\'); let mut s = Config::new(); s.merge(File::with_name(&format!("{}/{}", dir, name)).required(true))?; s.try_into::<T>() } } #[derive(Debug, Clone, Default, Deserialize)] pub struct Mqtt {} #[derive(Debug, Clone)] pub struct ValueMut<T>(Arc<RwLock<T>>); impl<T> ValueMut<T> where T: Copy, { #[inline] pub fn new(v: T) -> Self { Self(Arc::new(RwLock::new(v))) } #[inline] pub fn get(&self) -> T { *self.0.read() } #[inline] pub fn set(&self, v: T) { *self.0.write() = v; } } impl<'de, T: serde::Deserialize<'de> + Copy> Deserialize<'de> for ValueMut<T> { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = T::deserialize(deserializer)?; Ok(ValueMut::new(v)) } } #[derive(Debug, Clone)] pub struct Bytesize(usize); impl Bytesize { #[inline] pub fn as_u32(&self) -> u32 { self.0 as u32 } } impl Deref for Bytesize { type Target = usize; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for Bytesize { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<'de> Deserialize<'de> for Bytesize { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = to_bytesize(&String::deserialize(deserializer)?); Ok(Bytesize(v)) } } #[inline] pub fn to_bytesize(text: &str) -> usize { let text = text.to_uppercase().replace("GB", "G").replace("MB", "M").replace("KB", "K"); text.split_inclusive(|x| x == 'G' || x == 'M' || x == 'K' || x == 'B') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<usize>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'B' => v, 'K' => v * 1024, 'M' => v * 1048576, 'G' => v * 1073741824, _ => 0, } }) .sum() } #[inline] pub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { let v = String::deserialize(deserializer)?; Ok(to_duration(&v)) } #[inline] pub fn to_duration(text: &str) -> Duration { let text = text.to_lowercase().replace("ms", "Y"); let ms: u64 = text .split_inclusive(|x| x == 's' || x == 'm' || x == 'h' || x == 'd' || x == 'w' || x == 'f' || x == 'Y') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<u64>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'Y' => v, 's' => v * 1000, 'm' => v * 60000, 'h' => v * 3600000, 'd' => v * 86400000, 'w' => v * 604800000, 'f' => v * 1209600000, _ => 0, } }) .sum(); Duration::from_millis(ms) } #[inline] pub fn deserialize_addr<'de, D>(deserializer: D) -> Result<SocketAddr, D::Error> where D: Deserializer<'de>, { let addr = String::deserialize(deserializer)? .parse::<std::net::SocketAddr>() .map_err(serde::de::Error::custom)?; Ok(addr) }
use std::fmt; use std::net::SocketAddr; use std::ops::{Deref, DerefMut}; use std::sync::Arc; use std::time::Duration; use config::{Config, ConfigError, File}; use parking_lot::RwLock; use serde::de::{Deserialize, Deserializer}; use crate::{NodeId, Result}; use self::listener::Listeners; use self::log::Log; pub mod listener; pub mod log; #[derive(Clone)] pub struct Settings(Arc<Inner>); #[derive(Debug, Clone, Deserialize)] pub struct Inner { #[serde(default = "inner_api_addr_default", deserialize_with = "deserialize_addr")] pub inner_api_addr: SocketAddr, #[serde(default)] pub node: Node, #[serde(default)] pub rpc: Rpc, #[serde(default)] pub log: Log, #[serde(rename = "listener")] #[serde(default)] pub listeners: Listeners, #[serde(default)] pub plugins: Plugins, #[serde(default)] pub mqtt: Mqtt, } fn inner_api_addr_default() -> SocketAddr { ([0, 0, 0, 0], 6063).into() } impl Deref for Settings { type Target = Inner; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl Settings { pub fn new() -> Result<Self, ConfigError> { let mut s = Config::new(); if let Ok(cfg_filename) = std::env::var("RMQTT-CONFIG-FILENAME") { s.merge(File::with_name(&cfg_filename).required(false))?; } s.merge(File::with_name("/etc/rmqtt/rmqtt").required(false))?; s.merge(File::with_name("/etc/rmqtt").required(false))?; s.merge(File::with_name("rmqtt").required(false))?; let mut inner: Inner = match s.try_into() { Ok(c) => c, Err(e) => { return Err(e); } }; inner.listeners.init(); if inner.listeners.tcps.is_empty() && inner.listeners.tlss.is_empty() { return Err(ConfigError::Message( "Settings::new() error, listener.tcp or listener.tls is not exist".into(), )); } Ok(Self(Arc::new(inner))) } } impl fmt::Debug for Settings { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Settings ...")?; Ok(()) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Node { #[serde(default)] pub id: NodeId, #[serde(default = "Node::cookie_default")] pub cookie: String, #[serde(default = "Node::crash_dump_default")] pub crash_dump: String, } impl Node { fn cookie_default() -> String { "rmqttsecretcookie".into() } fn crash_dump_default() -> String { "/var/log/rmqtt/crash.dump".into() } } #[derive(Debug, Clone, Deserialize)] pub struct Rpc { #[serde(default = "Rpc::mode_default")] pub mode: String, #[serde(default = "Rpc::server_addr_default", deserialize_with = "deserialize_addr")] pub server_addr: SocketAddr, #[serde(default = "Rpc::server_workers_default")] pub server_workers: usize, #[serde(default = "Rpc::client_concurrency_limit_default")] pub client_concurrency_limit: usize, #[serde(default = "Rpc::client_timeout_default", deserialize_with = "deserialize_duration")] pub client_timeout: Duration, #[serde(default = "Rpc::batch_size_default")] pub batch_size: usize, } impl Default for Rpc { #[inline] fn default() -> Self { Self { mode: Self::mode_default(), batch_size: Self::batch_size_default(), server_addr: Self::server_addr_default(), server_workers: Self::server_workers_default(), client_concurrency_limit: Self::client_concurrency_limit_default(), client_timeout: Self::client_timeout_default(), } } } impl Rpc { fn mode_default() -> String { "async".into() } fn batch_size_default() -> usize { 128 } fn server_addr_default() -> SocketAddr { ([0, 0, 0, 0], 5363).into() } fn server_workers_default() -> usize { 4 } fn client_concurrency_limit_default() -> usize { 128 } fn client_timeout_default() -> Duration { Duration::from_secs(5) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Plugins { #[serde(default = "Plugins::dir_default")] pub dir: String, #[serde(default)] pub default_startups: Vec<String>, } impl Plugins { fn dir_default() -> String { "./plugins/".into() } pub fn load_config<'de, T: serde::Deserialize<'de>>(&self, name: &str) -> Result<T, ConfigError> { let dir = self.dir.trim_end_matches(|c| c == '/' || c == '\\'); let mut s = Config::new(); s.merge(File::with_name(&format!("{}/{}", dir, name)).required(true))?; s.try_into::<T>() } } #[derive(Debug, Clone, Default, Deserialize)] pub struct Mqtt {} #[derive(Debug, Clone)] pub struct ValueMut<T>(Arc<RwLock<T>>); impl<T> ValueMut<T> where T: Copy, { #[inline] pub fn new(v: T) -> Self { Self(Arc::new(RwLock::new(v))) } #[inline] pub fn get(&self) -> T { *self.0.read() } #[inline] pub fn set(&self, v: T) { *self.0.write() = v; } } impl<'de, T: serde::Deserialize<'de> + Copy> Deserialize<'de> for ValueMut<T> { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = T::deserialize(deserializer)?; Ok(ValueMut::new(v)) } } #[derive(Debug, Clone)] pub struct Bytesize(usize); impl Bytesize { #[inline] pub fn as_u32(&self) -> u32 { self.0 as u32 } } impl Deref for Bytesize { type Target = usize; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for Bytesize { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<'de> Deserialize<'de> for Bytesize { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = to_bytesize(&String::deserialize(deserializer)?); Ok(Bytesize(v)) } } #[inline] pub fn to_bytesize(text: &str) -> usize { let text = text.to_uppercase().replace("GB", "G").replace("MB", "M").replace("KB", "K"); text.split_inclusive(|x| x == 'G' || x == 'M' || x == 'K' || x == 'B') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<usize>() { Err(_e) => return 0, Ok(v) => v, };
}) .sum() } #[inline] pub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { let v = String::deserialize(deserializer)?; Ok(to_duration(&v)) } #[inline] pub fn to_duration(text: &str) -> Duration { let text = text.to_lowercase().replace("ms", "Y"); let ms: u64 = text .split_inclusive(|x| x == 's' || x == 'm' || x == 'h' || x == 'd' || x == 'w' || x == 'f' || x == 'Y') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<u64>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'Y' => v, 's' => v * 1000, 'm' => v * 60000, 'h' => v * 3600000, 'd' => v * 86400000, 'w' => v * 604800000, 'f' => v * 1209600000, _ => 0, } }) .sum(); Duration::from_millis(ms) } #[inline] pub fn deserialize_addr<'de, D>(deserializer: D) -> Result<SocketAddr, D::Error> where D: Deserializer<'de>, { let addr = String::deserialize(deserializer)? .parse::<std::net::SocketAddr>() .map_err(serde::de::Error::custom)?; Ok(addr) }
match u { 'B' => v, 'K' => v * 1024, 'M' => v * 1048576, 'G' => v * 1073741824, _ => 0, }
if_condition
[ { "content": "fn open_file(filename: &str) -> Result<File> {\n\n OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(filename)\n\n .map_err(|e| MqttError::from(format!(\"logger file config error, filename: {}, {:?}\", filename, e)))\n\n}\n", ...
Rust
geo/src/algorithm/relate/geomgraph/topology_position.rs
phreeheeler/geo
ab0fe46cec04ebe358d66455a6b73415c925b18d
use super::{CoordPos, Direction}; use std::fmt; #[derive(Copy, Clone)] pub(crate) enum TopologyPosition { Area { on: Option<CoordPos>, left: Option<CoordPos>, right: Option<CoordPos>, }, LineOrPoint { on: Option<CoordPos>, }, } impl fmt::Debug for TopologyPosition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt_position(position: &Option<CoordPos>, f: &mut fmt::Formatter) -> fmt::Result { match position { Some(CoordPos::Inside) => write!(f, "i"), Some(CoordPos::OnBoundary) => write!(f, "b"), Some(CoordPos::Outside) => write!(f, "e"), None => write!(f, "_"), } } match self { Self::LineOrPoint { on } => fmt_position(on, f)?, Self::Area { on, left, right } => { fmt_position(left, f)?; fmt_position(on, f)?; fmt_position(right, f)?; } } Ok(()) } } impl TopologyPosition { pub fn area(on: CoordPos, left: CoordPos, right: CoordPos) -> Self { Self::Area { on: Some(on), left: Some(left), right: Some(right), } } pub fn empty_area() -> Self { Self::Area { on: None, left: None, right: None, } } pub fn line_or_point(on: CoordPos) -> Self { Self::LineOrPoint { on: Some(on) } } pub fn empty_line_or_point() -> Self { Self::LineOrPoint { on: None } } pub fn get(&self, direction: Direction) -> Option<CoordPos> { match (direction, self) { (Direction::Left, Self::Area { left, .. }) => *left, (Direction::Right, Self::Area { right, .. }) => *right, (Direction::On, Self::LineOrPoint { on }) | (Direction::On, Self::Area { on, .. }) => { *on } (_, Self::LineOrPoint { .. }) => { panic!("LineOrPoint only has a position for `Direction::On`") } } } pub fn is_empty(&self) -> bool { matches!( self, Self::LineOrPoint { on: None } | Self::Area { on: None, left: None, right: None, } ) } pub fn is_any_empty(&self) -> bool { !matches!( self, Self::LineOrPoint { on: Some(_) } | Self::Area { on: Some(_), left: Some(_), right: Some(_), } ) } pub fn is_area(&self) -> bool { matches!(self, Self::Area { .. }) } pub fn is_line(&self) -> bool { matches!(self, Self::LineOrPoint { .. }) } pub fn flip(&mut self) { match self { Self::LineOrPoint { .. } => {} Self::Area { left, right, .. } => { std::mem::swap(left, right); } } } pub fn set_all_positions(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { *on = Some(position); } Self::Area { on, left, right } => { *on = Some(position); *left = Some(position); *right = Some(position); } } } pub fn set_all_positions_if_empty(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { if on.is_none() { *on = Some(position); } } Self::Area { on, left, right } => { if on.is_none() { *on = Some(position); } if left.is_none() { *left = Some(position); } if right.is_none() { *right = Some(position); } } } } pub fn set_position(&mut self, direction: Direction, position: CoordPos) { match (direction, self) { (Direction::On, Self::LineOrPoint { on }) => *on = Some(position), (_, Self::LineOrPoint { .. }) => { panic!("invalid assignment dimensions for Self::Line") } (Direction::On, Self::Area { on, .. }) => *on = Some(position), (Direction::Left, Self::Area { left, .. }) => *left = Some(position), (Direction::Right, Self::Area { right, .. }) => *right = Some(position), } } pub fn set_on_position(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } | Self::Area { on, .. } => { *on = Some(position); } } } pub fn set_locations(&mut self, new_on: CoordPos, new_left: CoordPos, new_right: CoordPos) { match self { Self::LineOrPoint { .. } => { error!("invalid assignment dimensions for {:?}", self); debug_assert!(false, "invalid assignment dimensions for {:?}", self); } Self::Area { on, left, right } => { *on = Some(new_on); *left = Some(new_left); *right = Some(new_right); } } } }
use super::{CoordPos, Direction}; use std::fmt; #[derive(Copy, Clone)] pub(crate) enum TopologyPosition { Area { on: Option<CoordPos>, left: Option<CoordPos>, right: Option<CoordPos>, }, LineOrPoint { on: Option<CoordPos>, }, } impl fmt::Debug for TopologyPosition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt_position(position: &Option<CoordPos>, f: &mut fmt::Formatter) -> fmt::Result { match position { Some(CoordPos::Inside) => write!(f, "i"), Some(CoordPos::OnBoundary) => write!(f, "b"), Some(CoordPos::Outside) => write!(f, "e"), None => write!(f, "_"), } } match self { Self::LineOrPoint { on } => fmt_position(on, f)?, Self::Area { on, left, right } => { fmt_position(left, f)?; fmt_position(on, f)?; fmt_position(right, f)?; } } Ok(()) } } impl TopologyPosition { pub fn area(on: CoordPos, left: CoordPos, right: CoordPos) -> Self { Self::Area { on: Some(on), left: Some(left), right: Some(right), } } pub fn empty_area() -> Self { Self::Area { on: None, left: None, right: None, } } pub fn line_or_point(on: CoordPos) -> Self { Self::LineOrPoint { on: Some(on) } } pub fn empty_line_or_point() -> Self { Self::LineOrPoint { on: None } } pub fn get(&self, direction: Direction) -> Option<CoordPos> { match (direction, self) { (Direction::Left, Self::Area { left, .. }) => *left, (Direction::Right, Self::Area { right, .. }) => *right, (Direction::On, Self::LineOrPoint { on }) | (Direction::On, Self::Area { on, .. }) => { *on } (_, Self::LineOrPoint { .. }) => { panic!("LineOrPoint only has a position for `Direction::On`") } } } pub fn is_empty(&self) -> bool { matches!( self, Self::LineOrPoint { on: None } | Self::Area { on: None, left: None, right: None, } ) } pub fn is_any_empty(&self) -> bool { !matches!( self, Self::LineOrPoint { on: Some(_) } | Self::Area { on: Some(_), left: Some(_), right: Some(_), } ) } pub fn is_area(&self) -> bool { matches!(self, Self::Area { .. }) } pub fn is_line(&self) -> bool { matches!(self, Self::LineOrPoint { .. }) } pub fn flip(&mut self) { match self { Self::LineOrPoint { .. } => {} Self::Area { left, right, .. } => { std::mem::swap(left, right); } } }
pub fn set_all_positions_if_empty(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { if on.is_none() { *on = Some(position); } } Self::Area { on, left, right } => { if on.is_none() { *on = Some(position); } if left.is_none() { *left = Some(position); } if right.is_none() { *right = Some(position); } } } } pub fn set_position(&mut self, direction: Direction, position: CoordPos) { match (direction, self) { (Direction::On, Self::LineOrPoint { on }) => *on = Some(position), (_, Self::LineOrPoint { .. }) => { panic!("invalid assignment dimensions for Self::Line") } (Direction::On, Self::Area { on, .. }) => *on = Some(position), (Direction::Left, Self::Area { left, .. }) => *left = Some(position), (Direction::Right, Self::Area { right, .. }) => *right = Some(position), } } pub fn set_on_position(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } | Self::Area { on, .. } => { *on = Some(position); } } } pub fn set_locations(&mut self, new_on: CoordPos, new_left: CoordPos, new_right: CoordPos) { match self { Self::LineOrPoint { .. } => { error!("invalid assignment dimensions for {:?}", self); debug_assert!(false, "invalid assignment dimensions for {:?}", self); } Self::Area { on, left, right } => { *on = Some(new_on); *left = Some(new_left); *right = Some(new_right); } } } }
pub fn set_all_positions(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { *on = Some(position); } Self::Area { on, left, right } => { *on = Some(position); *left = Some(position); *right = Some(position); } } }
function_block-full_function
[]
Rust
crates/tor-chanmgr/src/event.rs
Mohsen7s/arti
1dbb23982fa36d431f2dd2b501cfc043438d9de5
#![allow(dead_code, unreachable_pub)] use futures::{Stream, StreamExt}; use postage::watch; use std::{ fmt, time::{Duration, Instant}, }; #[derive(Default, Debug, Clone)] pub struct ConnStatus { online: Option<bool>, tls_works: Option<bool>, } #[derive(Debug, Clone, Eq, PartialEq, derive_more::Display)] #[non_exhaustive] pub enum ConnBlockage { #[display(fmt = "unable to connect to the internet")] NoTcp, #[display(fmt = "our internet connection seems to be filtered")] NoHandshake, } impl ConnStatus { fn eq(&self, other: &ConnStatus) -> bool { self.online == other.online && self.tls_works == other.tls_works } pub fn usable(&self) -> bool { self.online == Some(true) && self.tls_works == Some(true) } pub fn frac(&self) -> f32 { match self { Self { online: Some(true), tls_works: Some(true), } => 1.0, Self { online: Some(true), .. } => 0.5, _ => 0.0, } } pub fn blockage(&self) -> Option<ConnBlockage> { match self { Self { online: Some(false), .. } => Some(ConnBlockage::NoTcp), Self { tls_works: Some(false), .. } => Some(ConnBlockage::NoHandshake), _ => None, } } } impl fmt::Display for ConnStatus { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ConnStatus { online: None, .. } => write!(f, "connecting to the internet"), ConnStatus { online: Some(false), .. } => write!(f, "unable to connect to the internet"), ConnStatus { tls_works: None, .. } => write!(f, "handshaking with Tor relays"), ConnStatus { tls_works: Some(false), .. } => write!(f, "unable to handshake with Tor relays"), ConnStatus { online: Some(true), tls_works: Some(true), } => write!(f, "connecting successfully"), } } } #[derive(Clone)] pub struct ConnStatusEvents { inner: watch::Receiver<ConnStatus>, } impl fmt::Debug for ConnStatusEvents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ConnStatusEvents").finish_non_exhaustive() } } impl Stream for ConnStatusEvents { type Item = ConnStatus; fn poll_next( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Option<Self::Item>> { self.inner.poll_next_unpin(cx) } } #[derive(Debug, Clone)] struct ChanMgrStatus { startup: Instant, n_attempts: usize, last_tcp_success: Option<Instant>, last_tls_success: Option<Instant>, last_chan_success: Option<Instant>, } impl ChanMgrStatus { fn new_at(now: Instant) -> ChanMgrStatus { ChanMgrStatus { startup: now, n_attempts: 0, last_tcp_success: None, last_tls_success: None, last_chan_success: None, } } fn conn_status_at(&self, now: Instant) -> ConnStatus { const MIN_DURATION: Duration = Duration::from_secs(60); const MIN_ATTEMPTS: usize = 6; let early = now < self.startup + MIN_DURATION || self.n_attempts < MIN_ATTEMPTS; let online = match (self.last_tcp_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; let tls_works = match (self.last_chan_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; ConnStatus { online, tls_works } } fn record_attempt(&mut self) { self.n_attempts += 1; } fn record_tcp_success(&mut self, now: Instant) { self.last_tcp_success = Some(now); } fn record_tls_finished(&mut self, now: Instant) { self.last_tls_success = Some(now); } fn record_handshake_done(&mut self, now: Instant) { self.last_chan_success = Some(now); } } pub(crate) struct ChanMgrEventSender { last_conn_status: ConnStatus, mgr_status: ChanMgrStatus, sender: watch::Sender<ConnStatus>, } impl ChanMgrEventSender { fn push_at(&mut self, now: Instant) { let status = self.mgr_status.conn_status_at(now); if !status.eq(&self.last_conn_status) { self.last_conn_status = status.clone(); let mut b = self.sender.borrow_mut(); *b = status; } } pub(crate) fn record_attempt(&mut self) { self.mgr_status.record_attempt(); self.push_at(Instant::now()); } pub(crate) fn record_tcp_success(&mut self) { let now = Instant::now(); self.mgr_status.record_tcp_success(now); self.push_at(now); } pub(crate) fn record_tls_finished(&mut self) { let now = Instant::now(); self.mgr_status.record_tls_finished(now); self.push_at(now); } pub(crate) fn record_handshake_done(&mut self) { let now = Instant::now(); self.mgr_status.record_handshake_done(now); self.push_at(now); } } pub(crate) fn channel() -> (ChanMgrEventSender, ConnStatusEvents) { let (sender, receiver) = watch::channel(); let receiver = ConnStatusEvents { inner: receiver }; let sender = ChanMgrEventSender { last_conn_status: ConnStatus::default(), mgr_status: ChanMgrStatus::new_at(Instant::now()), sender, }; (sender, receiver) } #[cfg(test)] #[allow(clippy::unwrap_used, clippy::cognitive_complexity)] mod test { use super::*; use float_eq::assert_float_eq; const TOL: f32 = 0.00001; #[test] fn status_basics() { let s1 = ConnStatus::default(); assert_eq!(s1.to_string(), "connecting to the internet"); assert_float_eq!(s1.frac(), 0.0, abs <= TOL); assert!(s1.eq(&s1)); assert!(s1.blockage().is_none()); let s2 = ConnStatus { online: Some(false), tls_works: None, }; assert_eq!(s2.to_string(), "unable to connect to the internet"); assert_float_eq!(s2.frac(), 0.0, abs <= TOL); assert!(s2.eq(&s2)); assert!(!s2.eq(&s1)); assert_eq!(s2.blockage(), Some(ConnBlockage::NoTcp)); assert_eq!( s2.blockage().unwrap().to_string(), "unable to connect to the internet" ); let s3 = ConnStatus { online: Some(true), tls_works: None, }; assert_eq!(s3.to_string(), "handshaking with Tor relays"); assert_float_eq!(s3.frac(), 0.5, abs <= TOL); assert_eq!(s3.blockage(), None); assert!(!s3.eq(&s1)); let s4 = ConnStatus { online: Some(true), tls_works: Some(false), }; assert_eq!(s4.to_string(), "unable to handshake with Tor relays"); assert_float_eq!(s4.frac(), 0.5, abs <= TOL); assert_eq!(s4.blockage(), Some(ConnBlockage::NoHandshake)); assert_eq!( s4.blockage().unwrap().to_string(), "our internet connection seems to be filtered" ); assert!(!s4.eq(&s1)); assert!(!s4.eq(&s2)); assert!(!s4.eq(&s3)); assert!(s4.eq(&s4)); let s5 = ConnStatus { online: Some(true), tls_works: Some(true), }; assert_eq!(s5.to_string(), "connecting successfully"); assert_float_eq!(s5.frac(), 1.0, abs <= TOL); assert!(s5.blockage().is_none()); assert!(s5.eq(&s5)); assert!(!s5.eq(&s4)); } #[test] fn derive_status() { let start = Instant::now(); let sec = Duration::from_secs(1); let hour = Duration::from_secs(3600); let mut ms = ChanMgrStatus::new_at(start); let s0 = ms.conn_status_at(start); assert!(s0.online.is_none()); assert!(s0.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert!(s.eq(&s0)); for _ in 0..10 { ms.record_attempt(); } let s = ms.conn_status_at(start); assert!(s.eq(&s0)); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(false)); assert_eq!(s.tls_works, Some(false)); ms.record_tcp_success(start + sec); let s = ms.conn_status_at(start + sec * 2); assert_eq!(s.online, Some(true)); assert!(s.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(false)); ms.record_handshake_done(start + sec * 2); let s = ms.conn_status_at(start + sec * 3); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(true)); } #[test] fn sender() { let (mut snd, rcv) = channel(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 0.0, abs <= TOL); } snd.record_attempt(); snd.record_tcp_success(); snd.record_tls_finished(); snd.record_handshake_done(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 1.0, abs <= TOL); } } }
#![allow(dead_code, unreachable_pub)] use futures::{Stream, StreamExt}; use postage::watch; use std::{ fmt, time::{Duration, Instant}, }; #[derive(Default, Debug, Clone)] pub struct ConnStatus { online: Option<bool>, tls_works: Option<bool>, } #[derive(Debug, Clone, Eq, PartialEq, derive_more::Display)] #[non_exhaustive] pub enum ConnBlockage { #[display(fmt = "unable to connect to the internet")] NoTcp, #[display(fmt = "our internet connection seems to be filtered")] NoHandshake, } impl ConnStatus { fn eq(&self, other: &ConnStatus) -> bool { self.online == other.online && self.tls_works == other.tls_works } pub fn usable(&self) -> bool { self.online == Some(true) && self.tls_works == Some(true) } pub fn frac(&self) -> f32 { match self { Self { online: Some(true), tls_works: Some(true), } => 1.0, Self { online: Some(true), .. } => 0.5, _ => 0.0, } } pub fn blockage(&self) -> Option<ConnBlockage> { match self { Self { online: Some(false), .. } => Some(ConnBlockage::NoTcp), Self { tls_works: Some(false), .. } => Some(ConnBlockage::NoHandshake), _ => None, } } } impl fmt::Display for ConnStatus { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ConnStatus { online: None, .. } => write!(f, "connecting to the internet"), ConnStatus { online: Some(false), .. } => write!(f, "unable to connect to the internet"), ConnStatus { tls_works: None, .. } => write!(f, "handshaking with Tor relays"), ConnStatus { tls_works: Some(false), .. } => write!(f, "unable to handshake with Tor relays"), ConnStatus { online: Some(true), tls_works: Some(true), } => write!(f, "connecting successfully"), } } } #[derive(Clone)] pub struct ConnStatusEvents { inner: watch::Receiver<ConnStatus>, } impl fmt::Debug for ConnStatusEvents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ConnStatusEvents").finish_non_exhaustive() } } impl Stream for ConnStatusEvents { type Item = ConnStatus; fn poll_next( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Option<Self::Item>> { self.inner.poll_next_unpin(cx) } } #[derive(Debug, Clone)] struct ChanMgrStatus { startup: Instant, n_attempts: usize, last_tcp_success: Option<Instant>, last_tls_success: Option<Instant>, last_chan_success: Option<Instant>, } impl ChanMgrStatus { fn new_at(now: Instant) -> ChanMgrStatus { ChanMgrStatus { startup: now, n_attempts: 0, last_tcp_success: None, last_tls_success: None, last_chan_success: None, } } fn conn_status_at(&self, now: Instant) -> ConnStatus { const MIN_DURATION: Duration = Duration::from_secs(60); const MIN_ATTEMPTS: usize = 6; let early = now < self.startup + MIN_DURATION || self.n_attempts < MIN_ATTEMPTS; let online = match (self.last_tcp_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; let tls_works =
; ConnStatus { online, tls_works } } fn record_attempt(&mut self) { self.n_attempts += 1; } fn record_tcp_success(&mut self, now: Instant) { self.last_tcp_success = Some(now); } fn record_tls_finished(&mut self, now: Instant) { self.last_tls_success = Some(now); } fn record_handshake_done(&mut self, now: Instant) { self.last_chan_success = Some(now); } } pub(crate) struct ChanMgrEventSender { last_conn_status: ConnStatus, mgr_status: ChanMgrStatus, sender: watch::Sender<ConnStatus>, } impl ChanMgrEventSender { fn push_at(&mut self, now: Instant) { let status = self.mgr_status.conn_status_at(now); if !status.eq(&self.last_conn_status) { self.last_conn_status = status.clone(); let mut b = self.sender.borrow_mut(); *b = status; } } pub(crate) fn record_attempt(&mut self) { self.mgr_status.record_attempt(); self.push_at(Instant::now()); } pub(crate) fn record_tcp_success(&mut self) { let now = Instant::now(); self.mgr_status.record_tcp_success(now); self.push_at(now); } pub(crate) fn record_tls_finished(&mut self) { let now = Instant::now(); self.mgr_status.record_tls_finished(now); self.push_at(now); } pub(crate) fn record_handshake_done(&mut self) { let now = Instant::now(); self.mgr_status.record_handshake_done(now); self.push_at(now); } } pub(crate) fn channel() -> (ChanMgrEventSender, ConnStatusEvents) { let (sender, receiver) = watch::channel(); let receiver = ConnStatusEvents { inner: receiver }; let sender = ChanMgrEventSender { last_conn_status: ConnStatus::default(), mgr_status: ChanMgrStatus::new_at(Instant::now()), sender, }; (sender, receiver) } #[cfg(test)] #[allow(clippy::unwrap_used, clippy::cognitive_complexity)] mod test { use super::*; use float_eq::assert_float_eq; const TOL: f32 = 0.00001; #[test] fn status_basics() { let s1 = ConnStatus::default(); assert_eq!(s1.to_string(), "connecting to the internet"); assert_float_eq!(s1.frac(), 0.0, abs <= TOL); assert!(s1.eq(&s1)); assert!(s1.blockage().is_none()); let s2 = ConnStatus { online: Some(false), tls_works: None, }; assert_eq!(s2.to_string(), "unable to connect to the internet"); assert_float_eq!(s2.frac(), 0.0, abs <= TOL); assert!(s2.eq(&s2)); assert!(!s2.eq(&s1)); assert_eq!(s2.blockage(), Some(ConnBlockage::NoTcp)); assert_eq!( s2.blockage().unwrap().to_string(), "unable to connect to the internet" ); let s3 = ConnStatus { online: Some(true), tls_works: None, }; assert_eq!(s3.to_string(), "handshaking with Tor relays"); assert_float_eq!(s3.frac(), 0.5, abs <= TOL); assert_eq!(s3.blockage(), None); assert!(!s3.eq(&s1)); let s4 = ConnStatus { online: Some(true), tls_works: Some(false), }; assert_eq!(s4.to_string(), "unable to handshake with Tor relays"); assert_float_eq!(s4.frac(), 0.5, abs <= TOL); assert_eq!(s4.blockage(), Some(ConnBlockage::NoHandshake)); assert_eq!( s4.blockage().unwrap().to_string(), "our internet connection seems to be filtered" ); assert!(!s4.eq(&s1)); assert!(!s4.eq(&s2)); assert!(!s4.eq(&s3)); assert!(s4.eq(&s4)); let s5 = ConnStatus { online: Some(true), tls_works: Some(true), }; assert_eq!(s5.to_string(), "connecting successfully"); assert_float_eq!(s5.frac(), 1.0, abs <= TOL); assert!(s5.blockage().is_none()); assert!(s5.eq(&s5)); assert!(!s5.eq(&s4)); } #[test] fn derive_status() { let start = Instant::now(); let sec = Duration::from_secs(1); let hour = Duration::from_secs(3600); let mut ms = ChanMgrStatus::new_at(start); let s0 = ms.conn_status_at(start); assert!(s0.online.is_none()); assert!(s0.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert!(s.eq(&s0)); for _ in 0..10 { ms.record_attempt(); } let s = ms.conn_status_at(start); assert!(s.eq(&s0)); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(false)); assert_eq!(s.tls_works, Some(false)); ms.record_tcp_success(start + sec); let s = ms.conn_status_at(start + sec * 2); assert_eq!(s.online, Some(true)); assert!(s.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(false)); ms.record_handshake_done(start + sec * 2); let s = ms.conn_status_at(start + sec * 3); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(true)); } #[test] fn sender() { let (mut snd, rcv) = channel(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 0.0, abs <= TOL); } snd.record_attempt(); snd.record_tcp_success(); snd.record_tls_finished(); snd.record_handshake_done(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 1.0, abs <= TOL); } } }
match (self.last_chan_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }
if_condition
[ { "content": "pub fn create_runtime() -> std::io::Result<impl Runtime> {\n\n PreferredRuntime::create()\n\n}\n\n\n\n/// Helpers for test_with_all_runtimes\n\npub mod testing__ {\n", "file_path": "crates/tor-rtcompat/src/lib.rs", "rank": 0, "score": 283516.07034823194 }, { "content": "/// ...
Rust
tornado-kernel/src/user/space.rs
HUST-OS/tornado-os
e086b451a0836c00bc13ab940f6a4fa55c1855c6
use crate::memory::{AddressSpaceId, PhysicalAddress, PhysicalPageNumber, PAGE_SIZE}; use alloc::boxed::Box; use async_mutex::AsyncMutex; use lazy_static::lazy_static; #[cfg(feature = "qemu")] const BASE: usize = 0x8400_0000; #[cfg(feature = "k210")] const BASE: usize = 0x8050_0000; lazy_static! { pub static ref USER_SPACE: AsyncMutex<UserSpaceManager<2000, BASE>> = AsyncMutex::new(UserSpaceManager::new()); } pub struct UserSpaceManager<const N: usize, const B: usize> { used: ListNode<AddressSpaceId>, free: ListNode<AddressSpaceId>, len: usize, } impl<const N: usize, const B: usize> UserSpaceManager<N, B> { pub fn new() -> Self { let used = ListNode { id: 0, val: unsafe { AddressSpaceId::from_raw(0) }, next: None, }; let mut free = used.clone(); for i in 0..N { let prev = free.next.take(); let node = ListNode { id: N - 1 - i, val: unsafe { AddressSpaceId::from_raw(0) }, next: prev, }; free.next = Some(Box::new(node)); } Self { used, free, len: 0 } } pub fn alloc(&mut self, pages: usize, _asid: AddressSpaceId) -> Option<PhysicalPageNumber> { assert!(PAGE_SIZE % 2 == 0); if pages > N - self.len { None } else { let base = self.free.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); for _ in 0..pages { let mut node = self.free.next.take().unwrap(); self.free.next = node.next.take(); let prev = self.used.next.take(); node.next = prev; self.used.next = Some(node); } self.len += pages; Some(base) } } #[allow(unused)] pub fn dealloc(&mut self, asid: AddressSpaceId) -> Option<(PhysicalPageNumber, usize)> { let mut prev = &mut self.used; loop { if prev.next.is_none() { break; } if prev.next.as_ref().unwrap().val == asid { let mut num = 0; let base = prev.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); while prev.next.as_ref().is_some() && prev.next.as_ref().unwrap().val == asid { let mut node = prev.next.take().unwrap(); prev.next = node.next.take(); let temp = self.free.next.take(); node.next = temp; self.free.next = Some(node); num += 1; } self.len -= num; return Some((base, num)); } else { prev = prev.next.as_mut().unwrap(); } } None } } #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode<T> { pub id: usize, pub val: T, pub next: Option<Box<ListNode<T>>>, }
use crate::memory::{AddressSpaceId, PhysicalAddress, PhysicalPageNumber, PAGE_SIZE}; use alloc::boxed::Box; use async_mutex::AsyncMutex; use lazy_static::lazy_static; #[cfg(feature = "qemu")] const BASE: usize = 0x8400_0000; #[cfg(feature = "k210")] const BASE: usize = 0x8050_0000; lazy_static! { pub static ref USER_SPACE: AsyncMutex<UserSpaceManager<2000, BASE>> = AsyncMutex::new(UserSpaceManager::new()); } pub struct UserSpaceManager<const N: usize, const B: usize> { used: ListNode<AddressSpaceId>, free: ListNode<AddressSpaceId>, len: usize, } impl<const N: usize, const B: usize> UserSpaceManager<N, B> { pub fn new() -> Self { let used = ListNode { id: 0, val: unsafe { AddressSpaceId::from_raw(0) }, next: None, }; let mut free = used.clone(); for i in 0..N { let prev = free.next.take(); let node = ListNode { id: N - 1 - i, val: unsafe { AddressSpaceId::from_raw(0) }, next: prev, }; free.next = Some(Box::new(node)); } Self { used, free, len: 0 } } pub fn alloc(&mut self, pages: usize, _asid: AddressSpaceId) -> Option<PhysicalPageNumber> { assert!(PAGE_SIZE % 2 == 0);
} #[allow(unused)] pub fn dealloc(&mut self, asid: AddressSpaceId) -> Option<(PhysicalPageNumber, usize)> { let mut prev = &mut self.used; loop { if prev.next.is_none() { break; } if prev.next.as_ref().unwrap().val == asid { let mut num = 0; let base = prev.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); while prev.next.as_ref().is_some() && prev.next.as_ref().unwrap().val == asid { let mut node = prev.next.take().unwrap(); prev.next = node.next.take(); let temp = self.free.next.take(); node.next = temp; self.free.next = Some(node); num += 1; } self.len -= num; return Some((base, num)); } else { prev = prev.next.as_mut().unwrap(); } } None } } #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode<T> { pub id: usize, pub val: T, pub next: Option<Box<ListNode<T>>>, }
if pages > N - self.len { None } else { let base = self.free.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); for _ in 0..pages { let mut node = self.free.next.take().unwrap(); self.free.next = node.next.take(); let prev = self.used.next.take(); node.next = prev; self.used.next = Some(node); } self.len += pages; Some(base) }
if_condition
[ { "content": "pub fn bounded<T, const N: usize>() -> (Sender<T, N>, Receiver<T, N>) {\n\n let buf = Arc::new(AsyncMutex::new(ChannelBuf::new()));\n\n let tx_event = Arc::new(Event::new());\n\n let rx_event = Arc::new(Event::new());\n\n let sender = Sender {\n\n buf: Arc::clone(&buf),\n\n ...
Rust
src/spv/logical/parser/parser.rs
Trark/magmaflow
c5ebdbedfa576b63aa54f29b8bafcf46b5716b1c
use spv::op::*; use spv::types::*; use spv::raw::*; use spv::logical::*; use super::OpByBlock; #[derive(Clone, Debug, PartialEq)] pub enum ValidationError { MemoryModelMissing, ExpectedOpFunction(usize), ExpectedOpFunctionEnd(usize), ExpectedBranch(usize), UnexpectedInstruction(usize), } pub type ValidationResult<T> = Result<T, ValidationError>; pub fn validate(raw: RawModule) -> ValidationResult<LogicalModule> { let group = Into::<OpByBlock>::into; let insts_storage = raw.instructions.into_iter().map(group).collect::<Vec<_>>(); let insts = OpSlice::new(&insts_storage); let (capabilities, insts) = try!(read_many(insts, read_capability)); let (extensions, insts) = try!(read_many(insts, read_extension)); let (ext_inst_imports, insts) = try!(read_many(insts, read_ext_inst_import)); let (memory_model, insts) = try!(read_memory_model(insts)); let (entry_points, insts) = try!(read_many(insts, read_entry_point)); let (execution_modes, insts) = try!(read_many(insts, read_execution_mode)); let (debug, insts) = try!(read_many(insts, read_debug)); let (annotations, insts) = try!(read_many(insts, read_annotation)); let (globals, insts) = try!(read_many(insts, read_global)); let (function_declarations, insts) = try!(read_many(insts, read_function_declaration)); let (function_definitions, insts) = try!(read_many(insts, read_function_definition)); if insts.get_remaining() > 0 { return Err(ValidationError::UnexpectedInstruction(insts.get_slot())); } Ok(LogicalModule { capabilities: capabilities, extensions: extensions, ext_inst_imports: ext_inst_imports, memory_model: memory_model, entry_points: entry_points, execution_modes: execution_modes, debug: debug, annotations: annotations, globals: globals, function_declarations: function_declarations, function_definitions: function_definitions, }) } #[derive(Clone, Copy)] struct OpSlice<'a> { insts: &'a [OpByBlock], index: usize, } impl<'a> OpSlice<'a> { fn new(insts: &'a [OpByBlock]) -> OpSlice { OpSlice { insts: insts, index: 0, } } fn first(&self) -> Option<&OpByBlock> { if self.index < self.insts.len() { Some(&self.insts[self.index]) } else { None } } fn advance(self) -> OpSlice<'a> { assert!(self.index < self.insts.len()); OpSlice { insts: self.insts, index: self.index + 1, } } fn get_slot(&self) -> usize { self.index } fn get_remaining(&self) -> usize { self.insts.len() - self.index } } enum PhaseResult<'a, T> { Ok(T, OpSlice<'a>), Next(OpSlice<'a>), Err(ValidationError), } fn read_many<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>) -> ValidationResult<(Vec<T>, OpSlice)> { fn read_rest<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>, mut output: Vec<T>) -> ValidationResult<(Vec<T>, OpSlice)> { match f(insts) { PhaseResult::Ok(item, next) => { output.push(item); read_rest(next, f, output) } PhaseResult::Next(next) => Ok((output, next)), PhaseResult::Err(err) => Err(err), } } read_rest(insts, f, Vec::new()) } fn read_capability(insts: OpSlice) -> PhaseResult<Capability> { if let Some(&OpByBlock::OpCapability(ref op)) = insts.first() { PhaseResult::Ok(op.capability.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_extension(insts: OpSlice) -> PhaseResult<String> { if let Some(&OpByBlock::OpExtension(ref op)) = insts.first() { PhaseResult::Ok(op.name.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_ext_inst_import(insts: OpSlice) -> PhaseResult<OpExtInstImport> { if let Some(&OpByBlock::OpExtInstImport(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_memory_model(insts: OpSlice) -> ValidationResult<(OpMemoryModel, OpSlice)> { if let Some(&OpByBlock::OpMemoryModel(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::MemoryModelMissing) } } fn read_entry_point(insts: OpSlice) -> PhaseResult<OpEntryPoint> { if let Some(&OpByBlock::OpEntryPoint(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_execution_mode(insts: OpSlice) -> PhaseResult<OpExecutionMode> { if let Some(&OpByBlock::OpExecutionMode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_debug(insts: OpSlice) -> PhaseResult<GroupDebug> { if let Some(&OpByBlock::GroupDebug(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_annotation(insts: OpSlice) -> PhaseResult<GroupAnnotation> { if let Some(&OpByBlock::GroupAnnotation(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_global(insts: OpSlice) -> PhaseResult<GroupGlobal> { if let Some(&OpByBlock::GroupGlobal(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_parameter(insts: OpSlice) -> PhaseResult<OpFunctionParameter> { if let Some(&OpByBlock::OpFunctionParameter(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_declaration(insts: OpSlice) -> PhaseResult<FunctionDeclaration> { let start_insts = insts; if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let decl = FunctionDeclaration { function: op.clone(), parameters: params, }; PhaseResult::Ok(decl, insts.advance()) } else { PhaseResult::Next(start_insts) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } } fn read_code(insts: OpSlice) -> PhaseResult<GroupCode> { if let Some(&OpByBlock::GroupCode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_branch(insts: OpSlice) -> ValidationResult<(GroupBranch, OpSlice)> { if let Some(&OpByBlock::GroupBranch(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::ExpectedBranch(insts.get_slot())) } } fn read_merge(insts: OpSlice) -> (Option<GroupMerge>, OpSlice) { if let Some(&OpByBlock::GroupMerge(ref op)) = insts.first() { (Some(op.clone()), insts.advance()) } else { (None, insts) } } fn read_basic_block(insts: OpSlice) -> PhaseResult<BasicBlock> { if let Some(&OpByBlock::OpLabel(ref op)) = insts.first() { let label = op.clone(); let (code, insts) = match read_many(insts.advance(), read_code) { Ok((code, insts)) => (code, insts), Err(err) => return PhaseResult::Err(err), }; let (merge, insts) = read_merge(insts); let (branch, insts) = match read_branch(insts) { Ok((branch, insts)) => (branch, insts), Err(err) => return PhaseResult::Err(err), }; let block = BasicBlock { label: label, code: code, merge: merge, branch: branch, }; PhaseResult::Ok(block, insts) } else { PhaseResult::Next(insts) } } fn read_function_definition(insts: OpSlice) -> PhaseResult<FunctionDefinition> { if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { let (blocks, insts) = match read_many(insts, read_basic_block) { Ok((blocks, insts)) => (blocks, insts), Err(err) => return PhaseResult::Err(err), }; if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let insts = insts.advance(); let def = FunctionDefinition { function: op.clone(), parameters: params, blocks: blocks, }; PhaseResult::Ok(def, insts) } else { PhaseResult::Err(ValidationError::ExpectedOpFunctionEnd(insts.get_slot())) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } }
use spv::op::*; use spv::types::*; use spv::raw::*; use spv::logical::*; use super::OpByBlock; #[derive(Clone, Debug, PartialEq)] pub enum ValidationError { MemoryModelMissing, ExpectedOpFunction(usize), ExpectedOpFunctionEnd(usize), ExpectedBranch(usize), UnexpectedInstruction(usize), } pub type ValidationResult<T> = Result<T, ValidationError>; pub fn validate(raw: RawModule) -> ValidationResult<LogicalModule> { let group = Into::<OpByBlock>::into; let insts_storage = raw.instructions.into_iter().map(group).collect::<Vec<_>>(); let insts = OpSlice::new(&insts_storage); let (capabilities, insts) = try!(read_many(insts, read_capability)); let (extensions, insts) = try!(read_many(insts, read_extension)); let (ext_inst_imports, insts) = try!(read_many(insts, read_ext_inst_import)); let (memory_model, insts) = try!(read_memory_model(insts)); let (entry_points, insts) = try!(read_many(insts, read_entry_point)); let (execution_modes, insts) = try!(read_many(insts, read_execution_mode)); let (debug, insts) = try!(read_many(insts, read_debug)); let (annotations, insts) = try!(read_many(insts, read_annotation)); let (globals, insts) = try!(read_many(insts, read_global)); let (function_declarations, insts) = try!(read_many(insts, read_function_declaration)); let (function_definitions, insts) = try!(read_many(insts, read_function_definition)); if insts.get_remaining() > 0 { return Err(ValidationError::UnexpectedInstruction(insts.get_slot())); } Ok(LogicalModule { capabilities: capabilities, extensions: extensions, ext_inst_imports: ext_inst_imports, memory_model: memory_model, entry_points: entry_points, execution_modes: execution_modes, debug: debug, annotations: annotations, globals: globals, function_declarations: function_declarations, function_definitions: function_definitions, }) } #[derive(Clone, Copy)] struct OpSlice<'a> { insts: &'a [OpByBlock], index: usize, } impl<'a> OpSlice<'a> { fn new(insts: &'a [OpByBlock]) -> OpSlice { OpSlice { insts: insts, index: 0, } } fn first(&self) -> Option<&OpByBlock> { if self.index < self.insts.len() { Some(&self.insts[self.index]) } else { None } } fn advance(self) -> OpSlice<'a> { assert!(self.index < self.insts.len()); OpSlice { insts: self.insts, index: self.index + 1, } } fn get_slot(&self) -> usize { self.index } fn get_remaining(&self) -> usize { self.insts.len() - self.index } } enum PhaseResult<'a, T> { Ok(T, OpSlice<'a>), Next(OpSlice<'a>), Err(ValidationError), } fn read_many<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>) -> ValidationResult<(Vec<T>, OpSlice)> { fn read_rest<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>, mut output: Vec<T>) -> ValidationResult<(Vec<T>, OpSlice)> { match f(insts) { PhaseResult::Ok(item, next) => { output.push(item); read_rest(next, f
locks, insts), Err(err) => return PhaseResult::Err(err), }; if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let insts = insts.advance(); let def = FunctionDefinition { function: op.clone(), parameters: params, blocks: blocks, }; PhaseResult::Ok(def, insts) } else { PhaseResult::Err(ValidationError::ExpectedOpFunctionEnd(insts.get_slot())) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } }
, output) } PhaseResult::Next(next) => Ok((output, next)), PhaseResult::Err(err) => Err(err), } } read_rest(insts, f, Vec::new()) } fn read_capability(insts: OpSlice) -> PhaseResult<Capability> { if let Some(&OpByBlock::OpCapability(ref op)) = insts.first() { PhaseResult::Ok(op.capability.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_extension(insts: OpSlice) -> PhaseResult<String> { if let Some(&OpByBlock::OpExtension(ref op)) = insts.first() { PhaseResult::Ok(op.name.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_ext_inst_import(insts: OpSlice) -> PhaseResult<OpExtInstImport> { if let Some(&OpByBlock::OpExtInstImport(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_memory_model(insts: OpSlice) -> ValidationResult<(OpMemoryModel, OpSlice)> { if let Some(&OpByBlock::OpMemoryModel(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::MemoryModelMissing) } } fn read_entry_point(insts: OpSlice) -> PhaseResult<OpEntryPoint> { if let Some(&OpByBlock::OpEntryPoint(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_execution_mode(insts: OpSlice) -> PhaseResult<OpExecutionMode> { if let Some(&OpByBlock::OpExecutionMode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_debug(insts: OpSlice) -> PhaseResult<GroupDebug> { if let Some(&OpByBlock::GroupDebug(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_annotation(insts: OpSlice) -> PhaseResult<GroupAnnotation> { if let Some(&OpByBlock::GroupAnnotation(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_global(insts: OpSlice) -> PhaseResult<GroupGlobal> { if let Some(&OpByBlock::GroupGlobal(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_parameter(insts: OpSlice) -> PhaseResult<OpFunctionParameter> { if let Some(&OpByBlock::OpFunctionParameter(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_declaration(insts: OpSlice) -> PhaseResult<FunctionDeclaration> { let start_insts = insts; if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let decl = FunctionDeclaration { function: op.clone(), parameters: params, }; PhaseResult::Ok(decl, insts.advance()) } else { PhaseResult::Next(start_insts) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } } fn read_code(insts: OpSlice) -> PhaseResult<GroupCode> { if let Some(&OpByBlock::GroupCode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_branch(insts: OpSlice) -> ValidationResult<(GroupBranch, OpSlice)> { if let Some(&OpByBlock::GroupBranch(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::ExpectedBranch(insts.get_slot())) } } fn read_merge(insts: OpSlice) -> (Option<GroupMerge>, OpSlice) { if let Some(&OpByBlock::GroupMerge(ref op)) = insts.first() { (Some(op.clone()), insts.advance()) } else { (None, insts) } } fn read_basic_block(insts: OpSlice) -> PhaseResult<BasicBlock> { if let Some(&OpByBlock::OpLabel(ref op)) = insts.first() { let label = op.clone(); let (code, insts) = match read_many(insts.advance(), read_code) { Ok((code, insts)) => (code, insts), Err(err) => return PhaseResult::Err(err), }; let (merge, insts) = read_merge(insts); let (branch, insts) = match read_branch(insts) { Ok((branch, insts)) => (branch, insts), Err(err) => return PhaseResult::Err(err), }; let block = BasicBlock { label: label, code: code, merge: merge, branch: branch, }; PhaseResult::Ok(block, insts) } else { PhaseResult::Next(insts) } } fn read_function_definition(insts: OpSlice) -> PhaseResult<FunctionDefinition> { if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { let (blocks, insts) = match read_many(insts, read_basic_block) { Ok((blocks, insts)) => (b
random
[ { "content": "/// An instruction from an extended instruction set\n\n///\n\n/// This is expected to itself be an enum variant in the instruction set\n\npub trait ExtInst: Any + ExtInstClone + Debug + Display {\n\n /// Returns the op in the extended instruction\n\n fn get_op(&self) -> &Op;\n\n\n\n fn as...
Rust
examples/src/bin/smoltcp.rs
akiles/ppproto
b13f98d99b5afb2ffee18cebe6bef6ffdc340c84
#[path = "../serial_port.rs"] mod serial_port; use as_slice::{AsMutSlice, AsSlice}; use clap::Clap; use std::fmt::Write as _; use std::io::{Read, Write}; use std::marker::PhantomData; use std::ops::Range; use std::os::unix::io::AsRawFd; use std::path::Path; use std::str; use log::*; use smoltcp::iface::InterfaceBuilder; use smoltcp::phy::wait as phy_wait; use smoltcp::phy::{Device, DeviceCapabilities, Medium, RxToken, TxToken}; use smoltcp::socket::SocketSet; use smoltcp::socket::{TcpSocket, TcpSocketBuffer}; use smoltcp::socket::{UdpPacketMetadata, UdpSocket, UdpSocketBuffer}; use smoltcp::time::{Duration, Instant}; use smoltcp::wire::{IpCidr, Ipv4Address}; use smoltcp::Result; use ppproto::{Config, PPPoS, PPPoSAction}; use serial_port::SerialPort; #[derive(Clap)] struct Opts { #[clap(short, long)] device: String, } const MTU: usize = 1520; struct Buf(Box<[u8; MTU]>); impl Buf { pub fn new() -> Self { Self(Box::new([0; MTU])) } } impl AsSlice for Buf { type Element = u8; fn as_slice(&self) -> &[Self::Element] { &*self.0 } } impl AsMutSlice for Buf { fn as_mut_slice(&mut self) -> &mut [Self::Element] { &mut *self.0 } } type PPP = PPPoS<'static, Buf>; struct PPPDevice { ppp: PPP, port: SerialPort, } impl PPPDevice { fn new(ppp: PPP, port: SerialPort) -> Self { Self { ppp, port } } } impl<'a> Device<'a> for PPPDevice { type RxToken = PPPRxToken<'a>; type TxToken = PPPTxToken<'a>; fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.port.set_nonblocking(true).unwrap(); let mut tx_buf = [0; 2048]; let mut read_buf = [0; 2048]; let mut data: &[u8] = &[]; loop { match self.ppp.poll(&mut tx_buf) { PPPoSAction::None => {} PPPoSAction::Transmit(n) => self.port.write_all(&tx_buf[..n]).unwrap(), PPPoSAction::Received(buf, range) => { self.ppp.put_rx_buf(Buf::new()); return Some(( PPPRxToken { buf, range, _phantom: PhantomData, }, PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }, )); } } if data.len() == 0 { let n = match self.port.read(&mut read_buf) { Ok(n) => n, Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return None, Err(e) => panic!("error reading: {:?}", e), }; data = &read_buf[..n]; } let n = self.ppp.consume(data); data = &data[n..]; } } fn transmit(&'a mut self) -> Option<Self::TxToken> { Some(PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }) } fn capabilities(&self) -> DeviceCapabilities { let mut caps: DeviceCapabilities = Default::default(); caps.max_transmission_unit = 1500; caps.medium = Medium::Ip; caps } } struct PPPRxToken<'a> { buf: Buf, range: Range<usize>, _phantom: PhantomData<&'a mut PPP>, } impl<'a> RxToken for PPPRxToken<'a> { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.buf.0[self.range]) } } struct PPPTxToken<'a> { port: &'a mut SerialPort, ppp: &'a mut PPP, } impl<'a> TxToken for PPPTxToken<'a> { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut pkt_buf = [0; 2048]; let pkt = &mut pkt_buf[..len]; let r = f(pkt)?; let mut tx_buf = [0; 2048]; let n = self.ppp.send(pkt, &mut tx_buf).unwrap(); self.port.set_nonblocking(false).unwrap(); self.port.write_all(&tx_buf[..n]).unwrap(); Ok(r) } } fn main() { env_logger::init(); let opts: Opts = Opts::parse(); let port = SerialPort::new(Path::new(&opts.device)).unwrap(); let fd = port.as_raw_fd(); let config = Config { username: b"myuser", password: b"mypass", }; let mut ppp = PPPoS::new(config); ppp.put_rx_buf(Buf::new()); ppp.open().unwrap(); let device = PPPDevice::new(ppp, port); let udp_rx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 64]); let udp_tx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 128]); let udp_socket = UdpSocket::new(udp_rx_buffer, udp_tx_buffer); let tcp1_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp1_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp1_socket = TcpSocket::new(tcp1_rx_buffer, tcp1_tx_buffer); let tcp2_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp2_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp2_socket = TcpSocket::new(tcp2_rx_buffer, tcp2_tx_buffer); let tcp3_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_socket = TcpSocket::new(tcp3_rx_buffer, tcp3_tx_buffer); let tcp4_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_socket = TcpSocket::new(tcp4_rx_buffer, tcp4_tx_buffer); let ip_addrs = [IpCidr::new(Ipv4Address::UNSPECIFIED.into(), 0)]; let mut iface = InterfaceBuilder::new(device).ip_addrs(ip_addrs).finalize(); let mut sockets = SocketSet::new(vec![]); let udp_handle = sockets.add(udp_socket); let tcp1_handle = sockets.add(tcp1_socket); let tcp2_handle = sockets.add(tcp2_socket); let tcp3_handle = sockets.add(tcp3_socket); let tcp4_handle = sockets.add(tcp4_socket); let mut tcp_6970_active = false; loop { let timestamp = Instant::now(); match iface.poll(&mut sockets, timestamp) { Ok(_) => {} Err(e) => { debug!("poll error: {}", e); } } let status = iface.device().ppp.status(); if let Some(ipv4) = status.ipv4 { if let Some(want_addr) = ipv4.address { iface.update_ip_addrs(|addrs| { let addr = &mut addrs[0]; if addr.address() != want_addr.into() { *addr = IpCidr::new(want_addr.into(), 0); info!("Assigned a new IPv4 address: {}", want_addr); } }); } } { let mut socket = sockets.get::<UdpSocket>(udp_handle); if !socket.is_open() { socket.bind(6969).unwrap() } let client = match socket.recv() { Ok((data, endpoint)) => { debug!( "udp:6969 recv data: {:?} from {}", str::from_utf8(data.as_ref()).unwrap(), endpoint ); Some(endpoint) } Err(_) => None, }; if let Some(endpoint) = client { let data = b"hello\n"; debug!( "udp:6969 send data: {:?}", str::from_utf8(data.as_ref()).unwrap() ); socket.send_slice(data, endpoint).unwrap(); } } { let mut socket = sockets.get::<TcpSocket>(tcp1_handle); if !socket.is_open() { socket.listen(6969).unwrap(); } if socket.can_send() { debug!("tcp:6969 send greeting"); write!(socket, "hello\n").unwrap(); debug!("tcp:6969 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp2_handle); if !socket.is_open() { socket.listen(6970).unwrap() } if socket.is_active() && !tcp_6970_active { debug!("tcp:6970 connected"); } else if !socket.is_active() && tcp_6970_active { debug!("tcp:6970 disconnected"); } tcp_6970_active = socket.is_active(); if socket.may_recv() { let data = socket .recv(|buffer| { let recvd_len = buffer.len(); let mut data = buffer.to_owned(); if data.len() > 0 { debug!( "tcp:6970 recv data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); data = data.split(|&b| b == b'\n').collect::<Vec<_>>().concat(); data.reverse(); data.extend(b"\n"); } (recvd_len, data) }) .unwrap(); if socket.can_send() && data.len() > 0 { debug!( "tcp:6970 send data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); socket.send_slice(&data[..]).unwrap(); } } else if socket.may_send() { debug!("tcp:6970 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp3_handle); if !socket.is_open() { socket.listen(6971).unwrap(); socket.set_keep_alive(Some(Duration::from_millis(1000))); socket.set_timeout(Some(Duration::from_millis(2000))); } if socket.may_recv() { socket .recv(|buffer| { if buffer.len() > 0 { debug!("tcp:6971 recv {:?} octets", buffer.len()); } (buffer.len(), ()) }) .unwrap(); } else if socket.may_send() { socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp4_handle); if !socket.is_open() { socket.listen(6972).unwrap() } if socket.may_send() { socket .send(|data| { if data.len() > 0 { debug!("tcp:6972 send {:?} octets", data.len()); for (i, b) in data.iter_mut().enumerate() { *b = (i % 256) as u8; } } (data.len(), ()) }) .unwrap(); } } phy_wait(fd, iface.poll_delay(&sockets, timestamp)).expect("wait error"); } }
#[path = "../serial_port.rs"] mod serial_port; use as_slice::{AsMutSlice, AsSlice}; use clap::Clap; use std::fmt::Write as _; use std::io::{Read, Write}; use std::marker::PhantomData; use std::ops::Range; use std::os::unix::io::AsRawFd; use std::path::Path; use std::str; use log::*; use smoltcp::iface::InterfaceBuilder; use smoltcp::phy::wait as phy_wait; use smoltcp::phy::{Device, DeviceCapabilities, Medium, RxToken, TxToken}; use smoltcp::socket::SocketSet; use smoltcp::socket::{TcpSocket, TcpSocketBuffer}; use smoltcp::socket::{UdpPacketMetadata, UdpSocket, UdpSocketBuffer}; use smoltcp::time::{Duration, Instant}; use smoltcp::wire::{IpCidr, Ipv4Address}; use smoltcp::Result; use ppproto::{Config, PPPoS, PPPoSAction}; use serial_port::SerialPort; #[derive(Clap)] struct Opts { #[clap(short, long)] device: String, } const MTU: usize = 1520; struct Buf(Box<[u8; MTU]>); impl Buf { pub fn new() -> Self { Self(Box::new([0; MTU])) } } impl AsSlice for Buf { type Element = u8; fn as_slice(&self) -> &[Self::Element] { &*self.0 } } impl AsMutSlice for Buf { fn as_mut_slice(&mut self) -> &mut [Self::Element] { &mut *self.0 } } type PPP = PPPoS<'static, Buf>; struct PPPDevice { ppp: PPP, port: SerialPort, } impl PPPDevice { fn new(ppp: PPP, port: SerialPort) -> Self { Self { ppp, port } } } impl<'a> Device<'a> for PPPDevice { type RxToken = PPPRxToken<'a>; type TxToken = PPPTxToken<'a>; fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.port.set_nonblocking(true).unwrap(); let mut tx_buf = [0; 2048]; let mut read_buf = [0; 2048]; let mut data: &[u8] = &[]; loop { match self.ppp.poll(&mut tx_buf) { PPPoSAction::None => {} PPPoSAction::Transmit(n) => self.port.write_all(&tx_buf[..n]).unwrap(), PPPoSAction::Received(buf, range) => { self.ppp.put_rx_buf(Buf::new()); return Some(( PPPRxToken { buf, range, _phantom: PhantomData, }, PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }, )); } } if data.len() == 0 { let n = match self.port.read(&mut read_buf) { Ok(n) => n, Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return None, Err(e) => panic!("error reading: {:?}", e), }; data = &read_buf[..n]; } let n = self.ppp.consume(data); data = &data[n..]; } } fn transmit(&'a mut self) -> Option<Self::TxToken> { Some(PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }) } fn capabilities(&self) -> DeviceCapabilities { let mut caps: DeviceCapabilities = Default::default(); caps.max_transmission_unit = 1500; caps.medium = Medium::Ip; caps } } struct PPPRxToken<'a> { buf: Buf, range: Range<usize>, _phantom: PhantomData<&'a mut PPP>, } impl<'a> RxToken for PPPRxToken<'a> { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.buf.0[self.range]) } } struct PPPTxToken<'a> { port: &'a mut SerialPort, ppp: &'a mut PPP, } impl<'a> TxToken for PPPTxToken<'a> { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut pkt_buf = [0; 2048]; let pkt = &mut pkt_buf[..len]; let r = f(pkt)?; let mut tx_buf = [0; 2048]; let n = self.ppp.send(pkt, &mut tx_buf).unwrap(); self.port.set_nonblocking(false).unwrap(); self.port.write_all(&tx_buf[..n]).unwrap(); Ok(r) } } fn main() { env_logger::init(); let opts: Opts = Opts::parse(); let port = SerialPort::new(Path::new(&opts.device)).unwrap(); let fd = port.as_raw_fd(); let config = Config { username: b"myuser", password: b"mypass", }; let mut ppp = PPPoS::new(config); ppp.put_rx_buf(Buf::new()); ppp.open().unwrap(); let device = PPPDevice::new(ppp, port); let udp_rx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 64]); let udp_tx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 128]); let udp_socket = UdpSocket::new(udp_rx_buffer, udp_tx_buffer); let tcp1_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp1_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp1_socket = TcpSocket::new(tcp1_rx_buffer, tcp1_tx_buffer); let tcp2_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp2_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp2_socket = TcpSocket::new(tcp2_rx_buffer, tcp2_tx_buffer); let tcp3_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_socket = TcpSocket::new(tcp3_rx_buffer, tcp3_tx_buffer); let tcp4_rx_buffer = TcpSocketBuffer::ne
w(vec![0; 65535]); let tcp4_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_socket = TcpSocket::new(tcp4_rx_buffer, tcp4_tx_buffer); let ip_addrs = [IpCidr::new(Ipv4Address::UNSPECIFIED.into(), 0)]; let mut iface = InterfaceBuilder::new(device).ip_addrs(ip_addrs).finalize(); let mut sockets = SocketSet::new(vec![]); let udp_handle = sockets.add(udp_socket); let tcp1_handle = sockets.add(tcp1_socket); let tcp2_handle = sockets.add(tcp2_socket); let tcp3_handle = sockets.add(tcp3_socket); let tcp4_handle = sockets.add(tcp4_socket); let mut tcp_6970_active = false; loop { let timestamp = Instant::now(); match iface.poll(&mut sockets, timestamp) { Ok(_) => {} Err(e) => { debug!("poll error: {}", e); } } let status = iface.device().ppp.status(); if let Some(ipv4) = status.ipv4 { if let Some(want_addr) = ipv4.address { iface.update_ip_addrs(|addrs| { let addr = &mut addrs[0]; if addr.address() != want_addr.into() { *addr = IpCidr::new(want_addr.into(), 0); info!("Assigned a new IPv4 address: {}", want_addr); } }); } } { let mut socket = sockets.get::<UdpSocket>(udp_handle); if !socket.is_open() { socket.bind(6969).unwrap() } let client = match socket.recv() { Ok((data, endpoint)) => { debug!( "udp:6969 recv data: {:?} from {}", str::from_utf8(data.as_ref()).unwrap(), endpoint ); Some(endpoint) } Err(_) => None, }; if let Some(endpoint) = client { let data = b"hello\n"; debug!( "udp:6969 send data: {:?}", str::from_utf8(data.as_ref()).unwrap() ); socket.send_slice(data, endpoint).unwrap(); } } { let mut socket = sockets.get::<TcpSocket>(tcp1_handle); if !socket.is_open() { socket.listen(6969).unwrap(); } if socket.can_send() { debug!("tcp:6969 send greeting"); write!(socket, "hello\n").unwrap(); debug!("tcp:6969 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp2_handle); if !socket.is_open() { socket.listen(6970).unwrap() } if socket.is_active() && !tcp_6970_active { debug!("tcp:6970 connected"); } else if !socket.is_active() && tcp_6970_active { debug!("tcp:6970 disconnected"); } tcp_6970_active = socket.is_active(); if socket.may_recv() { let data = socket .recv(|buffer| { let recvd_len = buffer.len(); let mut data = buffer.to_owned(); if data.len() > 0 { debug!( "tcp:6970 recv data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); data = data.split(|&b| b == b'\n').collect::<Vec<_>>().concat(); data.reverse(); data.extend(b"\n"); } (recvd_len, data) }) .unwrap(); if socket.can_send() && data.len() > 0 { debug!( "tcp:6970 send data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); socket.send_slice(&data[..]).unwrap(); } } else if socket.may_send() { debug!("tcp:6970 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp3_handle); if !socket.is_open() { socket.listen(6971).unwrap(); socket.set_keep_alive(Some(Duration::from_millis(1000))); socket.set_timeout(Some(Duration::from_millis(2000))); } if socket.may_recv() { socket .recv(|buffer| { if buffer.len() > 0 { debug!("tcp:6971 recv {:?} octets", buffer.len()); } (buffer.len(), ()) }) .unwrap(); } else if socket.may_send() { socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp4_handle); if !socket.is_open() { socket.listen(6972).unwrap() } if socket.may_send() { socket .send(|data| { if data.len() > 0 { debug!("tcp:6972 send {:?} octets", data.len()); for (i, b) in data.iter_mut().enumerate() { *b = (i % 256) as u8; } } (data.len(), ()) }) .unwrap(); } } phy_wait(fd, iface.poll_delay(&sockets, timestamp)).expect("wait error"); } }
function_block-function_prefixed
[ { "content": "fn parse_options(mut pkt: &[u8], mut f: impl FnMut(u8, &[u8])) -> Result<(), MalformedError> {\n\n while pkt.len() != 0 {\n\n if pkt.len() < 2 {\n\n return Err(MalformedError);\n\n }\n\n\n\n let code = pkt[0];\n\n let len = pkt[1] as usize;\n\n\n\n ...
Rust
rav1e-worker/src/main.rs
rust-av/rav1e-by-gop
2fad32cd518dcf95e88fffbd2f3c8f66ab1ab41d
use std::{collections::BTreeMap, env, net::SocketAddrV4, path::PathBuf, time::Duration}; use clap::{App, Arg}; use lazy_static::lazy_static; use log::{debug, log_enabled}; use parking_lot::RwLock; use rand::Rng; use rav1e_by_gop::{EncodeOptions, EncodeState, VideoDetails}; use server::*; use tokio::time::sleep; use uuid::{v1::Context, Uuid}; use worker::*; mod server; mod worker; #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; lazy_static! { pub static ref ENCODER_QUEUE: RwLock<BTreeMap<Uuid, RwLock<EncodeItem>>> = RwLock::new(BTreeMap::new()); pub static ref UUID_CONTEXT: Context = Context::new(0); pub static ref UUID_NODE_ID: Box<[u8]> = { let mut id = Vec::with_capacity(6); let mut rng = rand::thread_rng(); for _ in 0..6 { id.push(rng.gen()); } id.into_boxed_slice() }; } pub struct EncodeItem { pub state: EncodeState, pub options: EncodeOptions, pub video_info: VideoDetails, } impl EncodeItem { fn new(options: EncodeOptions, video_info: VideoDetails) -> Self { EncodeItem { state: EncodeState::Enqueued, options, video_info, } } } impl std::fmt::Debug for EncodeItem { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.state { EncodeState::Enqueued => f.write_str("Enqueued"), EncodeState::AwaitingInfo { .. } => f.write_str("Awaiting Segment Info"), EncodeState::AwaitingData { .. } => f.write_str("Awaiting Data"), EncodeState::Ready { ref raw_frames, .. } => f.write_fmt(format_args!( "Ready to encode {} frames", raw_frames.frame_count() )), EncodeState::InProgress { ref progress } => f.write_fmt(format_args!( "Encoding {} of {} frames", progress.frame_info.len(), progress.total_frames )), EncodeState::EncodingDone { ref encoded_data, .. } => f.write_fmt(format_args!("Done encoding {} bytes", encoded_data.len())), } } } #[tokio::main] async fn main() { env::var("SERVER_PASSWORD").expect("SERVER_PASSWORD env var MUST be set!"); if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "rav1e_worker=info"); } env_logger::init(); let matches = App::new("rav1e-worker") .arg( Arg::with_name("LISTEN_IP") .help("Select which IP to listen on") .long("ip") .visible_alias("host") .default_value("0.0.0.0") .takes_value(true), ) .arg( Arg::with_name("LISTEN_PORT") .help("Select which port to listen on") .long("port") .short("p") .default_value("13415") .takes_value(true), ) .arg( Arg::with_name("MAX_THREADS") .help( "Limit the number of threads that can be used for workers [default: num cpus]", ) .long("threads") .takes_value(true), ) .arg( Arg::with_name("TEMP_DIR") .help( "Store input segments in temp files in the specified directory; by default \ stores in memory", ) .long("temp-dir") .takes_value(true), ) .get_matches(); let server_ip = SocketAddrV4::new( matches.value_of("LISTEN_IP").unwrap().parse().unwrap(), matches.value_of("LISTEN_PORT").unwrap().parse().unwrap(), ); let mut threads = num_cpus::get(); if let Some(thread_setting) = matches .value_of("MAX_THREADS") .and_then(|val| val.parse().ok()) { threads = threads.min(thread_setting); } let temp_dir = if let Some(temp_dir) = matches.value_of("TEMP_DIR") { let dir = PathBuf::from(temp_dir); if !dir.is_dir() { panic!("Specified temp dir does not exist or is not a directory"); } if dir.metadata().unwrap().permissions().readonly() { panic!("Specified temp dir is not writeable"); } Some(dir) } else { None }; start_listener(server_ip, temp_dir, threads).await; start_workers(threads).await; loop { if log_enabled!(log::Level::Debug) { let queue_handle = ENCODER_QUEUE.read(); let mut items = Vec::with_capacity(queue_handle.len()); for (key, item) in queue_handle.iter() { items.push((key, item.read())); } debug!("Items in queue: {:?}", items); } sleep(Duration::from_secs(5)).await; } }
use std::{collections::BTreeMap, env, net::SocketAddrV4, path::PathBuf, time::Duration}; use clap::{App, Arg}; use lazy_static::lazy_static; use log::{debug, log_enabled}; use parking_lot::RwLock; use rand::Rng; use rav1e_by_gop::{EncodeOptions, EncodeState, VideoDetails}; use server::*; use tokio::time::sleep; use uuid::{v1::Context, Uuid}; use worker::*; mod server; mod worker; #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; lazy_static! { pub static ref ENCODER_QUEUE: RwLock<BTreeMap<Uuid, RwLock<EncodeItem>>> = RwLock::new(BTreeMap::new()); pub static ref UUID_CONTEXT: Context = Context::new(0); pub static ref UUID_NODE_ID: Box<[u8]> = { let mut id = Vec::with_capacity(6); let mut rng = rand::thread_rng(); for _ in 0..6 { id.push(rng.gen()); } id.into_boxed_slice() }; } pub struct EncodeItem { pub state: EncodeState, pub options: EncodeOptions, pub video_info: VideoDetails, } impl EncodeItem { fn new(options: EncodeOptions, video_info: VideoDetails) -> Self { EncodeItem { state: EncodeState::Enqueued, options, video_info, } } } impl std::fmt::Debug for EncodeItem { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.state { EncodeState::Enqueued => f.write_str("Enqueued"), EncodeState::AwaitingInfo { .. } => f.write_str("Awaiting Segment Info"), EncodeState::AwaitingData { .. } => f.write_str("Awaiting Data"), EncodeState::Ready { ref raw_frames, .. } => f.write_fmt(format_args!( "Ready to encode {} frames", raw_frames.frame_count() )), EncodeState::InProgress { ref progress } => f.write_fmt(format_args!( "Encoding {} of {} frames", progress.frame_info.len(), progress.total_frames )), EncodeState::EncodingDone { ref encoded_data, .. } => f.write_fmt(format_args!("Done encoding {} bytes", encoded_data.len())), } } } #[tokio::main]
async fn main() { env::var("SERVER_PASSWORD").expect("SERVER_PASSWORD env var MUST be set!"); if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "rav1e_worker=info"); } env_logger::init(); let matches = App::new("rav1e-worker") .arg( Arg::with_name("LISTEN_IP") .help("Select which IP to listen on") .long("ip") .visible_alias("host") .default_value("0.0.0.0") .takes_value(true), ) .arg( Arg::with_name("LISTEN_PORT") .help("Select which port to listen on") .long("port") .short("p") .default_value("13415") .takes_value(true), ) .arg( Arg::with_name("MAX_THREADS") .help( "Limit the number of threads that can be used for workers [default: num cpus]", ) .long("threads") .takes_value(true), ) .arg( Arg::with_name("TEMP_DIR") .help( "Store input segments in temp files in the specified directory; by default \ stores in memory", ) .long("temp-dir") .takes_value(true), ) .get_matches(); let server_ip = SocketAddrV4::new( matches.value_of("LISTEN_IP").unwrap().parse().unwrap(), matches.value_of("LISTEN_PORT").unwrap().parse().unwrap(), ); let mut threads = num_cpus::get(); if let Some(thread_setting) = matches .value_of("MAX_THREADS") .and_then(|val| val.parse().ok()) { threads = threads.min(thread_setting); } let temp_dir = if let Some(temp_dir) = matches.value_of("TEMP_DIR") { let dir = PathBuf::from(temp_dir); if !dir.is_dir() { panic!("Specified temp dir does not exist or is not a directory"); } if dir.metadata().unwrap().permissions().readonly() { panic!("Specified temp dir is not writeable"); } Some(dir) } else { None }; start_listener(server_ip, temp_dir, threads).await; start_workers(threads).await; loop { if log_enabled!(log::Level::Debug) { let queue_handle = ENCODER_QUEUE.read(); let mut items = Vec::with_capacity(queue_handle.len()); for (key, item) in queue_handle.iter() { items.push((key, item.read())); } debug!("Items in queue: {:?}", items); } sleep(Duration::from_secs(5)).await; } }
function_block-full_function
[ { "content": "pub fn encode_segment(\n\n opts: EncodeOptions,\n\n video_info: VideoDetails,\n\n data: SegmentData,\n\n thread_pool: &mut ThreadPool,\n\n rayon_pool: Arc<rayon::ThreadPool>,\n\n progress_sender: ProgressSender,\n\n segment_output_file: Output,\n\n) -> Result<()> {\n\n let ...
Rust
rust-runtime/aws-smithy-http-server/src/routing/tiny_map.rs
eduardomourar/smithy-rs
817bf68e69da1d1ef14f8e79a27ec39a6d92bbad
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ use std::{borrow::Borrow, collections::HashMap, hash::Hash}; #[derive(Clone, Debug)] pub struct TinyMap<K, V, const CUTOFF: usize> { inner: TinyMapInner<K, V, CUTOFF>, } #[derive(Clone, Debug)] enum TinyMapInner<K, V, const CUTOFF: usize> { Vec(Vec<(K, V)>), HashMap(HashMap<K, V>), } enum OrIterator<Left, Right> { Left(Left), Right(Right), } impl<Left, Right> Iterator for OrIterator<Left, Right> where Left: Iterator, Right: Iterator<Item = Left::Item>, { type Item = Left::Item; fn next(&mut self) -> Option<Self::Item> { match self { Self::Left(left) => left.next(), Self::Right(right) => right.next(), } } } pub struct IntoIter<K, V> { inner: OrIterator<std::vec::IntoIter<(K, V)>, std::collections::hash_map::IntoIter<K, V>>, } impl<K, V> Iterator for IntoIter<K, V> { type Item = (K, V); fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } impl<K, V, const CUTOFF: usize> IntoIterator for TinyMap<K, V, CUTOFF> { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> Self::IntoIter { let inner = match self.inner { TinyMapInner::Vec(vec) => OrIterator::Left(vec.into_iter()), TinyMapInner::HashMap(hash_map) => OrIterator::Right(hash_map.into_iter()), }; IntoIter { inner } } } impl<K, V, const CUTOFF: usize> FromIterator<(K, V)> for TinyMap<K, V, CUTOFF> where K: Hash + Eq, { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self { let mut vec = Vec::with_capacity(CUTOFF); let mut iter = iter.into_iter().enumerate(); while let Some((index, pair)) = iter.next() { if index == CUTOFF { let inner = TinyMapInner::HashMap(vec.into_iter().chain(iter.map(|(_, pair)| pair)).collect()); return TinyMap { inner }; } vec.push(pair); } TinyMap { inner: TinyMapInner::Vec(vec), } } } impl<K, V, const CUTOFF: usize> TinyMap<K, V, CUTOFF> where K: Eq + Hash, { pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Hash + Eq, { match &self.inner { TinyMapInner::Vec(vec) => vec .iter() .find(|(key_inner, _)| key_inner.borrow() == key) .map(|(_, value)| value), TinyMapInner::HashMap(hash_map) => hash_map.get(key), } } } #[cfg(test)] mod tests { use super::*; const CUTOFF: usize = 5; const SMALL_VALUES: [(&'static str, usize); 3] = [("a", 0), ("b", 1), ("c", 2)]; const MEDIUM_VALUES: [(&'static str, usize); 5] = [("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4)]; const LARGE_VALUES: [(&'static str, usize); 10] = [ ("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4), ("f", 5), ("g", 6), ("h", 7), ("i", 8), ("j", 9), ]; #[test] fn collect_small() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_medium() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_large() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::HashMap(_))) } #[test] fn get_small_success() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("a"), Some(&0)) } #[test] fn get_medium_success() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("d"), Some(&3)) } #[test] fn get_large_success() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("h"), Some(&7)) } #[test] fn get_small_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("x"), None) } #[test] fn get_medium_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("y"), None) } #[test] fn get_large_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("z"), None) } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ use std::{borrow::Borrow, collections::HashMap, hash::Hash}; #[derive(Clone, Debug)] pub struct TinyMap<K, V, const CUTOFF: usize> { inner: TinyMapInner<K, V, CUTOFF>, } #[derive(Clone, Debug)] enum TinyMapInner<K, V, const CUTOFF: usize> { Vec(Vec<(K, V)>), HashMap(HashMap<K, V>), } enum OrIterator<Left, Right> { Left(Left), Right(Right), } impl<Left, Right> Iterator for OrIterator<Left, Right> where Left: Iterator, Right: Iterator<Item = Left::Item>, { type Item = Left::Item; fn next(&mut self) -> Option<Self::Item> { match self { Self::Left(left) => left.next(), Self::Right(right) => right.next(), } } } pub struct IntoIter<K, V> { inner: OrIterator<std::vec::IntoIter<(K, V)>, std::collections::hash_map::IntoIter<K, V>>, } impl<K, V> Iterator for IntoIter<K, V> { type Item = (K, V); fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } impl<K, V, const CUTOFF: usize> IntoIterator for
("c", 2), ("d", 3), ("e", 4)]; const LARGE_VALUES: [(&'static str, usize); 10] = [ ("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4), ("f", 5), ("g", 6), ("h", 7), ("i", 8), ("j", 9), ]; #[test] fn collect_small() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_medium() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_large() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::HashMap(_))) } #[test] fn get_small_success() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("a"), Some(&0)) } #[test] fn get_medium_success() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("d"), Some(&3)) } #[test] fn get_large_success() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("h"), Some(&7)) } #[test] fn get_small_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("x"), None) } #[test] fn get_medium_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("y"), None) } #[test] fn get_large_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("z"), None) } }
TinyMap<K, V, CUTOFF> { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> Self::IntoIter { let inner = match self.inner { TinyMapInner::Vec(vec) => OrIterator::Left(vec.into_iter()), TinyMapInner::HashMap(hash_map) => OrIterator::Right(hash_map.into_iter()), }; IntoIter { inner } } } impl<K, V, const CUTOFF: usize> FromIterator<(K, V)> for TinyMap<K, V, CUTOFF> where K: Hash + Eq, { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self { let mut vec = Vec::with_capacity(CUTOFF); let mut iter = iter.into_iter().enumerate(); while let Some((index, pair)) = iter.next() { if index == CUTOFF { let inner = TinyMapInner::HashMap(vec.into_iter().chain(iter.map(|(_, pair)| pair)).collect()); return TinyMap { inner }; } vec.push(pair); } TinyMap { inner: TinyMapInner::Vec(vec), } } } impl<K, V, const CUTOFF: usize> TinyMap<K, V, CUTOFF> where K: Eq + Hash, { pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Hash + Eq, { match &self.inner { TinyMapInner::Vec(vec) => vec .iter() .find(|(key_inner, _)| key_inner.borrow() == key) .map(|(_, value)| value), TinyMapInner::HashMap(hash_map) => hash_map.get(key), } } } #[cfg(test)] mod tests { use super::*; const CUTOFF: usize = 5; const SMALL_VALUES: [(&'static str, usize); 3] = [("a", 0), ("b", 1), ("c", 2)]; const MEDIUM_VALUES: [(&'static str, usize); 5] = [("a", 0), ("b", 1),
random
[ { "content": "#[track_caller]\n\npub fn assert_uris_match(left: &Uri, right: &Uri) {\n\n if left == right {\n\n return;\n\n }\n\n assert_eq!(left.authority(), right.authority());\n\n assert_eq!(left.scheme(), right.scheme());\n\n assert_eq!(left.path(), right.path());\n\n assert_eq!(\n\...
Rust
src/util/test_helpers.rs
aimerib/nightrunner-lib
55d8a326a35e1f397fa9e8741d02bfe05359c2c8
use crate::config::{ determiners::AllowedDeterminers, directions::{AllowedDirections, Directions}, movements::AllowedMovements, prepositions::AllowedPrepositions, rooms::{Exits, Item, Room, Storage}, Config, Event, Narrative, State, Subject, Verb, VerbFunction, }; pub fn mock_config() -> Config { Config { allowed_prepositions: AllowedPrepositions::init(), allowed_determiners: AllowedDeterminers::init(), allowed_directions: AllowedDirections::init(), allowed_movements: AllowedMovements::init(), intro: "The introduction text to be displayed at the begining of the game.".to_string(), allowed_verbs: vec![ Verb { id: 1, names: vec![String::from("quit"), String::from(":q"), String::from("q")], verb_function: VerbFunction::Quit, }, Verb { id: 2, names: vec![String::from("help")], verb_function: VerbFunction::Help, }, Verb { id: 3, names: vec![String::from("look"), String::from("stare")], verb_function: VerbFunction::Look, }, Verb { id: 4, names: vec![String::from("inventory"), String::from("i")], verb_function: VerbFunction::Inventory, }, Verb { id: 5, names: vec![ String::from("pick"), String::from("take"), String::from("grab"), String::from("pi"), String::from("tk"), String::from("gr"), String::from("get"), String::from("g"), ], verb_function: VerbFunction::Take, }, Verb { id: 6, names: vec![String::from("drop"), String::from("place")], verb_function: VerbFunction::Drop, }, Verb { id: 7, names: vec![String::from("give"), String::from("hand")], verb_function: VerbFunction::Normal, }, Verb { id: 8, names: vec![String::from("talk"), String::from("chat")], verb_function: VerbFunction::Talk, }, Verb { id: 9, names: vec![String::from("hug")], verb_function: VerbFunction::Normal, }, ], items: vec![ Item { id: 1, name: String::from("item1"), description: String::from("item 1 description"), can_pick: false, }, Item { id: 2, name: String::from("item2"), description: String::from("item 2 description"), can_pick: true, }, ], narratives: vec![ Narrative { id: 1, text: String::from("text"), description: String::from("text"), }, Narrative { id: 2, text: String::from( "this is a templated which exists in the game {item1}.\n\nthis is a templated subject that exists in the game {subject1}.", ), description: String::from("text"), }, Narrative { id: 3, text: String::from("this narrative should replace the old one."), description: String::from("a replaced narrative"), }, Narrative { id: 4, text: String::from("this narrative should be returned along with the text of room 1."), description: String::from("a narrative that is added to the room narrative"), }, Narrative { id: 5, text: "this narrative should be returned along with the text of room 1 when completing event 6.".to_string(), description: "a narrative that is added to the room narrative".to_string() }, ], rooms: vec![ Room { id: 1, name: String::from("room 1"), description: String::from("first room"), exits: vec![Exits { room_id: 2, direction: Directions::South, }], stash: Storage { items: Vec::new(), item_ids: vec![1, 2], }, room_events: vec![1, 4, 2, 6], narrative: 1, subjects: vec![1], }, Room { id: 2, name: String::from("room 2"), description: String::from("second room"), exits: vec![Exits { room_id: 1, direction: Directions::North, }], stash: Storage { items: Vec::new(), item_ids: Vec::new(), }, room_events: vec![5], narrative: 2, subjects: vec![2], }, ], events: vec![ Event { id: 1, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(1), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 2, name: "event 2".to_string(), description: "hug subject 2 - requires event 4".to_string(), location: 1, destination: None, narrative: Some(3), required_verb: Some(9), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![4], }, Event { id: 3, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(2), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![2], }, Event { id: 4, name: String::from("event 4"), description: String::from("talk to subject 1"), location: 1, destination: None, narrative: Some(1), required_verb: Some(8), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![], }, Event { id: 5, name: "event 5".to_string(), description: "gives item 2 to player when talking to subject2".to_string(), location: 2, destination: Some(1), narrative: Some(4), required_verb: Some(8), required_subject: Some(2), required_item: None, completed: false, add_item: Some(2), remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 6, name: "event 6".to_string(), description: "gives item 2 to subject1 when talking to subject1 after event 5".to_string(), location: 1, destination: None, narrative: Some(4), required_verb: Some(7), required_subject: Some(1), required_item: Some(2), completed: false, add_item: None, remove_old_narrative: false, remove_item: Some(2), required_events: vec![5], } ], subjects: vec![ Subject { id: 1, name: String::from("subject1"), description: String::from("a subject description"), default_text: String::from("default text"), }, Subject { id: 2, name: String::from("subject2"), description: String::from("subject2 description"), default_text: String::from("default text"), } ], } } pub fn mock_json_data() -> String { let data = mock_config(); serde_json::to_string(&data).unwrap() } pub fn mock_state() -> State { State::init(mock_config()).borrow().clone() }
use crate::config::{ determiners::AllowedDeterminers, directions::{AllowedDirections,
String::from("pick"), String::from("take"), String::from("grab"), String::from("pi"), String::from("tk"), String::from("gr"), String::from("get"), String::from("g"), ], verb_function: VerbFunction::Take, }, Verb { id: 6, names: vec![String::from("drop"), String::from("place")], verb_function: VerbFunction::Drop, }, Verb { id: 7, names: vec![String::from("give"), String::from("hand")], verb_function: VerbFunction::Normal, }, Verb { id: 8, names: vec![String::from("talk"), String::from("chat")], verb_function: VerbFunction::Talk, }, Verb { id: 9, names: vec![String::from("hug")], verb_function: VerbFunction::Normal, }, ], items: vec![ Item { id: 1, name: String::from("item1"), description: String::from("item 1 description"), can_pick: false, }, Item { id: 2, name: String::from("item2"), description: String::from("item 2 description"), can_pick: true, }, ], narratives: vec![ Narrative { id: 1, text: String::from("text"), description: String::from("text"), }, Narrative { id: 2, text: String::from( "this is a templated which exists in the game {item1}.\n\nthis is a templated subject that exists in the game {subject1}.", ), description: String::from("text"), }, Narrative { id: 3, text: String::from("this narrative should replace the old one."), description: String::from("a replaced narrative"), }, Narrative { id: 4, text: String::from("this narrative should be returned along with the text of room 1."), description: String::from("a narrative that is added to the room narrative"), }, Narrative { id: 5, text: "this narrative should be returned along with the text of room 1 when completing event 6.".to_string(), description: "a narrative that is added to the room narrative".to_string() }, ], rooms: vec![ Room { id: 1, name: String::from("room 1"), description: String::from("first room"), exits: vec![Exits { room_id: 2, direction: Directions::South, }], stash: Storage { items: Vec::new(), item_ids: vec![1, 2], }, room_events: vec![1, 4, 2, 6], narrative: 1, subjects: vec![1], }, Room { id: 2, name: String::from("room 2"), description: String::from("second room"), exits: vec![Exits { room_id: 1, direction: Directions::North, }], stash: Storage { items: Vec::new(), item_ids: Vec::new(), }, room_events: vec![5], narrative: 2, subjects: vec![2], }, ], events: vec![ Event { id: 1, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(1), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 2, name: "event 2".to_string(), description: "hug subject 2 - requires event 4".to_string(), location: 1, destination: None, narrative: Some(3), required_verb: Some(9), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![4], }, Event { id: 3, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(2), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![2], }, Event { id: 4, name: String::from("event 4"), description: String::from("talk to subject 1"), location: 1, destination: None, narrative: Some(1), required_verb: Some(8), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![], }, Event { id: 5, name: "event 5".to_string(), description: "gives item 2 to player when talking to subject2".to_string(), location: 2, destination: Some(1), narrative: Some(4), required_verb: Some(8), required_subject: Some(2), required_item: None, completed: false, add_item: Some(2), remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 6, name: "event 6".to_string(), description: "gives item 2 to subject1 when talking to subject1 after event 5".to_string(), location: 1, destination: None, narrative: Some(4), required_verb: Some(7), required_subject: Some(1), required_item: Some(2), completed: false, add_item: None, remove_old_narrative: false, remove_item: Some(2), required_events: vec![5], } ], subjects: vec![ Subject { id: 1, name: String::from("subject1"), description: String::from("a subject description"), default_text: String::from("default text"), }, Subject { id: 2, name: String::from("subject2"), description: String::from("subject2 description"), default_text: String::from("default text"), } ], } } pub fn mock_json_data() -> String { let data = mock_config(); serde_json::to_string(&data).unwrap() } pub fn mock_state() -> State { State::init(mock_config()).borrow().clone() }
Directions}, movements::AllowedMovements, prepositions::AllowedPrepositions, rooms::{Exits, Item, Room, Storage}, Config, Event, Narrative, State, Subject, Verb, VerbFunction, }; pub fn mock_config() -> Config { Config { allowed_prepositions: AllowedPrepositions::init(), allowed_determiners: AllowedDeterminers::init(), allowed_directions: AllowedDirections::init(), allowed_movements: AllowedMovements::init(), intro: "The introduction text to be displayed at the begining of the game.".to_string(), allowed_verbs: vec![ Verb { id: 1, names: vec![String::from("quit"), String::from(":q"), String::from("q")], verb_function: VerbFunction::Quit, }, Verb { id: 2, names: vec![String::from("help")], verb_function: VerbFunction::Help, }, Verb { id: 3, names: vec![String::from("look"), String::from("stare")], verb_function: VerbFunction::Look, }, Verb { id: 4, names: vec![String::from("inventory"), String::from("i")], verb_function: VerbFunction::Inventory, }, Verb { id: 5, names: vec![
random
[ { "content": "use std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display};\n\nuse std::iter::FromIterator;\n\n\n\nuse regex::Regex;\n\nuse serde::{Deserialize, Serialize};\n\n/// Module containing a few utility functions to\n\n/// make testing a little easier.\n\npub mod test_help...
Rust
src/lib.rs
w4/scoped-vec.rs
e8913a9c571a94d44e8c7f5a364ccca1fe034441
use std::sync::{Arc, RwLock, RwLockReadGuard}; use owning_ref::OwningHandle; #[derive(Clone)] pub struct ScopedVec<T: Clone> { inner: Arc<RwLock<Vec<T>>>, children: Arc<RwLock<Vec<ScopedVec<T>>>>, } impl<T: Clone> ScopedVec<T> { pub fn new() -> Self { Self { inner: Arc::new(RwLock::default()), children: Arc::new(RwLock::default()) } } pub fn scope(&mut self) -> ScopedVec<T> { let new = ScopedVec::new(); self.children.write().unwrap().push(new.clone()); new } pub fn push(&mut self, val: T) { self.inner.write().unwrap().push(val); } pub fn iter(&self) -> ScopedVecIterator<T> { ScopedVecIterator::new(self) } } impl<T: Clone + PartialEq> ScopedVec<T> { pub fn contains(&self, val: &T) -> bool { self.iter().any(|f| *f == *val) } } pub struct ScopedVecGuardHolder<'a, T: Clone> { inner: RwLockReadGuard<'a, Vec<T>>, children: RwLockReadGuard<'a, Vec<ScopedVec<T>>>, } pub struct ScopedVecIterator<'a, T: Clone> { iterator: OwningHandle<Box<ScopedVecGuardHolder<'a, T>>, Box<dyn Iterator<Item = &'a T> + 'a>>, } impl<'a, T: Clone> ScopedVecIterator<'a, T> { fn new(vec: &'a ScopedVec<T>) -> Self { Self { iterator: OwningHandle::new_with_fn( Box::new(ScopedVecGuardHolder { inner: vec.inner.read().unwrap(), children: vec.children.read().unwrap() }), |g| { let guards = unsafe { &*g }; Box::new(guards.inner.iter() .chain( guards.children.iter() .map(ScopedVec::iter) .flatten() )) as Box<dyn Iterator<Item = &'a T>> } ) } } } impl<'a, T: Clone> Iterator for ScopedVecIterator<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iterator.next() } } #[cfg(test)] mod tests { use crate::ScopedVec; #[test] fn unscoped_standard() { let mut root = ScopedVec::new(); root.push(3); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), None); } #[test] fn scoped_cant_read_root() { let mut root = ScopedVec::new(); root.push(3); let scoped = root.scope(); let mut iter = scoped.iter(); assert_eq!(iter.next(), None); } #[test] fn root_can_read_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); } #[test] fn root_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn scoped_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn nested_scoped_cant_read_backwards() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn can_drop_scopes() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); drop(root); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); { let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } drop(scoped); { let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } } #[test] fn diverged_scopes_can_be_read() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped1 = scoped.scope(); nested_scoped1.push(5); let mut nested_scoped2 = scoped.scope(); nested_scoped2.push(6); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.next(), None); } #[test] fn diverged_adjacent_scopes_cant_interact() { let mut root = ScopedVec::new(); root.push(3); let mut scoped1 = root.scope(); scoped1.push(4); let mut scoped2 = root.scope(); scoped2.push(5); let mut iter = scoped1.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); let mut iter = scoped2.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } }
use std::sync::{Arc, RwLock, RwLockReadGuard}; use owning_ref::OwningHandle; #[derive(Clone)] pub struct ScopedVec<T: Clone> { inner: Arc<RwLock<Vec<T>>>, children: Arc<RwLock<Vec<ScopedVec<T>>>>, } impl<T: Clone> ScopedVec<T> { pub fn new() -> Self { Self { inner: Arc::new(RwLock::default()), children: Arc::new(RwLock::default()) } } pub fn scope(&mut self) -> ScopedVec<T> { let new = ScopedVec::new(); self.children.write().unwrap().push(new.clone()); new } pub fn push(&mut self, val: T) { self.inner.write().unwrap().push(val); } pub fn iter(&self) -> ScopedVecIterator<T> { ScopedVecIterator::new(self) } } impl<T: Clone + PartialEq> ScopedVec<T> { pub fn contains(&self, val: &T) -> bool { self.iter().any(|f| *f == *val) } } pub struct ScopedVecGuardHolder<'a, T: Clone> { inner: RwLockReadGuard<'a, Vec<T>>, children: RwLockReadGuard<'a, Vec<ScopedVec<T>>>, } pub struct ScopedVecIterator<'a, T: Clone> { iterator: OwningHandle<Box<ScopedVecGuardHolder<'a, T>>, Box<dyn Iterator<Item = &'a T> + 'a>>, } impl<'a, T: Clone> ScopedVecIterator<'a, T> { fn new(vec: &'a ScopedVec<T>) -> Self { Self { iterator: OwningHandle::new_with_fn( Box::new(ScopedVecGuardHolder { inner: vec.inner.read().unwrap(), children: vec.children.read().unwrap() }), |g| { let guards = unsafe { &*g }; Box::new(guards.inner.iter() .chain( guards.children.iter() .map(ScopedVec::iter) .flatten() )) as Box<dyn Iterator<Item = &'a T>> } ) } } } impl<'a, T: Clone> Iterator for ScopedVecIterator<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iterator.next() } } #[cfg(test)] mod tests { use crate::ScopedVec; #[test] fn unscoped_standard() { let mut root = ScopedVec::new(); root.push(3); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), None); } #[test] fn scoped_cant_read_root() { let mut root = ScopedVec::new(); root.push(3); let scoped = root.scope(); let mut iter = scoped.iter(); assert_eq!(iter.next(), None); } #[test] fn root_can_read_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); } #[test] fn root_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn scoped_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn nested_scoped_cant_read_backwards() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn can_drop_scopes() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); drop(root); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); { let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } drop(scoped); { let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } } #[test] fn diverged_scopes_can_be_read() {
#[test] fn diverged_adjacent_scopes_cant_interact() { let mut root = ScopedVec::new(); root.push(3); let mut scoped1 = root.scope(); scoped1.push(4); let mut scoped2 = root.scope(); scoped2.push(5); let mut iter = scoped1.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); let mut iter = scoped2.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } }
let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped1 = scoped.scope(); nested_scoped1.push(5); let mut nested_scoped2 = scoped.scope(); nested_scoped2.push(6); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.next(), None); }
function_block-function_prefix_line
[ { "content": "# scoped-vec.rs\n\n\n\n[![License: WTFPL](https://img.shields.io/badge/License-WTFPL-brightgreen.svg?style=flat-square&logo=appveyor)](http://www.wtfpl.net/about/) ![https://docs.rs/scoped-vec/](https://docs.rs/scoped-vec/badge.svg) [![Downloads](https://img.shields.io/crates/d/scoped-vec.svg?styl...
Rust
libranoc/src/syntax/parse/statement/expression/operator.rs
rano-lang/rano
fe047a40dd17e4b35457a375d7749ca881dac4dc
use crate::{ core::ast::*, syntax::{parse::*, Span, TokenKind}, }; pub struct OperatorBindingPowerPrefix { pub constructor: Box<dyn FnOnce(Box<Expression>) -> PrefixOperator>, pub right_binding_power: u8, } pub struct OperatorBindingPowerInfix { pub operator: Token, pub constructor: Box<dyn FnOnce(Box<Expression>, Span, Box<Expression>) -> InfixOperator>, pub left_binding_power: u8, pub right_binding_power: u8, } pub struct OperatorBindingPowerPostfix { pub constructor: Box<dyn FnOnce(Box<Expression>, Vec<Expression>) -> PostfixOperator>, pub left_binding_power: u8, pub tails: Box<dyn FnOnce(ParseInput) -> ParseResult<Vec<Expression>>>, pub close: Box<dyn FnOnce(ParseInput) -> ParseResult<()>>, } pub fn parse_prefix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPrefix> { alt(( map(tag(TokenKind::PunctuationExclamationMark), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::Not(Not(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationPlusSign), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryPlus(UnaryPlus(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationHyphenMinus), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryMinus(UnaryMinus(expr))), right_binding_power: 13, } }), ))(i) } pub fn parse_infix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerInfix> { alt(( map(tag(TokenKind::PunctuationsLogicalOr), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalOr(lhs, span, rhs)), left_binding_power: 1, right_binding_power: 2, } }), map(tag(TokenKind::PunctuationsLogicalAnd), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalAnd(lhs, span, rhs)), left_binding_power: 3, right_binding_power: 4, } }), map(tag(TokenKind::PunctuationsEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::EqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationsNotEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::NotEqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationGreaterThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GreaterThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationLessThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LessThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map( tag(TokenKind::PunctuationsGreaterThanOrEqualTo), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GreaterThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, }, ), map(tag(TokenKind::PunctuationsLessThanOrEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::LessThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationPlusSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Add(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationHyphenMinus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Subtract(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationAsterisk), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Multiply(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationSolidus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Divide(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationPercentSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, operator, rhs| { InfixOperator::Remainder(lhs, operator, rhs) }), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationFullStop), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GetField(GetField(lhs, rhs))), left_binding_power: 17, right_binding_power: 16, } }), map(tag(TokenKind::PunctuationsGetFieldNullable), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GetFieldNullable(GetFieldNullable(lhs, rhs)) }), left_binding_power: 17, right_binding_power: 16, } }), map( tag(TokenKind::PunctuationsRangeRightExclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightExclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), map( tag(TokenKind::PunctuationsRangeRightInclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightInclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), ))(i) } pub fn parse_postfix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPostfix> { map(tag(TokenKind::PunctuationLeftSquareBracket), |_| { OperatorBindingPowerPostfix { constructor: Box::new(|expr, tails| PostfixOperator::Index(Index(expr, tails))), left_binding_power: 14, tails: Box::new(map(parse_expression, |expr| vec![expr])), close: Box::new(map(tag(TokenKind::PunctuationRightSquareBracket), |_| ())), } })(i) }
use crate::{ core::ast::*, syntax::{parse::*, Span, TokenKind}, }; pub struct OperatorBindingPowerPrefix { pub constructor: Box<dyn FnOnce(Box<Expression>) -> PrefixOperator>, pub right_binding_power: u8, } pub struct OperatorBindingPowerInfix { pub operator: Token, pub constructor: Box<dyn FnOnce(Box<Expression>, Span, Box<Expression>) -> InfixOperator>, pub left_binding_power: u8, pub right_binding_power: u8, } pub struct OperatorBindingPowerPostfix { pub constructor: Box<dyn FnOnce(Box<Expression>, Vec<Expression>) -> PostfixOperator>, pub left_binding_power: u8, pub tails: Box<dyn FnOnce(ParseInput) -> ParseResult<Vec<Expression>>>, pub close: Box<dyn FnOnce(ParseInput) -> ParseResult<()>>, } pub fn parse_prefix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPrefix> { alt(( map(tag(TokenKind::PunctuationExclamationMark), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::Not(Not(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationPlusSign),
mainder(lhs, operator, rhs) }), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationFullStop), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GetField(GetField(lhs, rhs))), left_binding_power: 17, right_binding_power: 16, } }), map(tag(TokenKind::PunctuationsGetFieldNullable), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GetFieldNullable(GetFieldNullable(lhs, rhs)) }), left_binding_power: 17, right_binding_power: 16, } }), map( tag(TokenKind::PunctuationsRangeRightExclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightExclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), map( tag(TokenKind::PunctuationsRangeRightInclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightInclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), ))(i) } pub fn parse_postfix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPostfix> { map(tag(TokenKind::PunctuationLeftSquareBracket), |_| { OperatorBindingPowerPostfix { constructor: Box::new(|expr, tails| PostfixOperator::Index(Index(expr, tails))), left_binding_power: 14, tails: Box::new(map(parse_expression, |expr| vec![expr])), close: Box::new(map(tag(TokenKind::PunctuationRightSquareBracket), |_| ())), } })(i) }
|_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryPlus(UnaryPlus(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationHyphenMinus), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryMinus(UnaryMinus(expr))), right_binding_power: 13, } }), ))(i) } pub fn parse_infix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerInfix> { alt(( map(tag(TokenKind::PunctuationsLogicalOr), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalOr(lhs, span, rhs)), left_binding_power: 1, right_binding_power: 2, } }), map(tag(TokenKind::PunctuationsLogicalAnd), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalAnd(lhs, span, rhs)), left_binding_power: 3, right_binding_power: 4, } }), map(tag(TokenKind::PunctuationsEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::EqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationsNotEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::NotEqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationGreaterThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GreaterThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationLessThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LessThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map( tag(TokenKind::PunctuationsGreaterThanOrEqualTo), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GreaterThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, }, ), map(tag(TokenKind::PunctuationsLessThanOrEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::LessThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationPlusSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Add(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationHyphenMinus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Subtract(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationAsterisk), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Multiply(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationSolidus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Divide(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationPercentSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, operator, rhs| { InfixOperator::Re
random
[ { "content": "pub fn parse(tokens: Vec<Token>) -> crate::core::Result<Module> {\n\n let i = ParseInput::new(tokens);\n\n let (_, nodes) = all_consuming(many0(parse_statement_node))(i)?;\n\n Ok(Module { nodes })\n\n}\n", "file_path": "libranoc/src/syntax/parse/mod.rs", "rank": 0, "score": 15...
Rust
coresimd/mod.rs
peterhj/stdsimd-nvptx
ee6e4c833e4cc76ab437e0f5d0c4899ad7138478
#[macro_use] mod macros; mod simd; #[stable(feature = "simd_arch", since = "1.27.0")] pub mod arch { #[cfg(any(target_arch = "x86", dox))] #[doc(cfg(target_arch = "x86"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; } #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86_64 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86_64::*; } #[cfg(any(target_arch = "arm", dox))] #[doc(cfg(target_arch = "arm"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod arm { pub use coresimd::arm::*; } #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod aarch64 { pub use coresimd::aarch64::*; pub use coresimd::arm::*; } #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] #[stable(feature = "simd_wasm32", since = "1.33.0")] pub mod wasm32 { #[stable(feature = "simd_wasm32", since = "1.33.0")] pub use coresimd::wasm32::*; } #[cfg(any(target_arch = "mips", dox))] #[doc(cfg(target_arch = "mips"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips { pub use coresimd::mips::*; } #[cfg(any(target_arch = "mips64", dox))] #[doc(cfg(target_arch = "mips64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips64 { pub use coresimd::mips::*; } #[cfg(any(target_arch = "powerpc", dox))] #[doc(cfg(target_arch = "powerpc"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc { pub use coresimd::powerpc::*; } #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc64 { pub use coresimd::powerpc64::*; } #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod nvptx { pub use coresimd::nvptx::*; } } mod simd_llvm; #[cfg(any(target_arch = "x86", target_arch = "x86_64", dox))] #[doc(cfg(any(target_arch = "x86", target_arch = "x86_64")))] mod x86; #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] mod x86_64; #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] mod aarch64; #[cfg(any(target_arch = "arm", target_arch = "aarch64", dox))] #[doc(cfg(any(target_arch = "arm", target_arch = "aarch64")))] mod arm; #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] mod wasm32; #[cfg(any(target_arch = "mips", target_arch = "mips64", dox))] #[doc(cfg(any(target_arch = "mips", target_arch = "mips64")))] mod mips; #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64", dox))] #[doc(cfg(any(target_arch = "powerpc", target_arch = "powerpc64")))] mod powerpc; #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] mod powerpc64; #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] mod nvptx;
#[macro_use] mod macros; mod simd; #[stable(feature = "simd_arch", since = "1.27.0")] pub mod arch { #[cfg(any(target_arch = "x86", dox))] #[doc(cfg(target_arch = "x86"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; }
t_arch = "nvptx64")))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod nvptx { pub use coresimd::nvptx::*; } } mod simd_llvm; #[cfg(any(target_arch = "x86", target_arch = "x86_64", dox))] #[doc(cfg(any(target_arch = "x86", target_arch = "x86_64")))] mod x86; #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] mod x86_64; #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] mod aarch64; #[cfg(any(target_arch = "arm", target_arch = "aarch64", dox))] #[doc(cfg(any(target_arch = "arm", target_arch = "aarch64")))] mod arm; #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] mod wasm32; #[cfg(any(target_arch = "mips", target_arch = "mips64", dox))] #[doc(cfg(any(target_arch = "mips", target_arch = "mips64")))] mod mips; #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64", dox))] #[doc(cfg(any(target_arch = "powerpc", target_arch = "powerpc64")))] mod powerpc; #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] mod powerpc64; #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] mod nvptx;
#[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86_64 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86_64::*; } #[cfg(any(target_arch = "arm", dox))] #[doc(cfg(target_arch = "arm"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod arm { pub use coresimd::arm::*; } #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod aarch64 { pub use coresimd::aarch64::*; pub use coresimd::arm::*; } #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] #[stable(feature = "simd_wasm32", since = "1.33.0")] pub mod wasm32 { #[stable(feature = "simd_wasm32", since = "1.33.0")] pub use coresimd::wasm32::*; } #[cfg(any(target_arch = "mips", dox))] #[doc(cfg(target_arch = "mips"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips { pub use coresimd::mips::*; } #[cfg(any(target_arch = "mips64", dox))] #[doc(cfg(target_arch = "mips64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips64 { pub use coresimd::mips::*; } #[cfg(any(target_arch = "powerpc", dox))] #[doc(cfg(target_arch = "powerpc"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc { pub use coresimd::powerpc::*; } #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc64 { pub use coresimd::powerpc64::*; } #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", targe
random
[ { "content": "#[proc_macro_attribute]\n\npub fn simd_test(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let tokens = TokenStream::from(attr).into_iter().collect::<Vec<_>>();\n\n if tokens.len() != 3 {\n\n panic!(\"expected #[simd_...
Rust
nalgebra-sparse/src/ops/impl_std_ops.rs
ThatGeoGuy/nalgebra
10deb03b71793c5d98de04267a99d7af69a715d5
use super::serial::{scalar::*, spadd::*, spmm::*, spsub::*}; use crate::cs::{ CompressedColumnStorage, CompressedRowStorage, Compression, CsMatrix, CscMatrix, CsrMatrix, }; use nalgebra::{Dim, Matrix, RawStorage, RawStorageMut, Scalar}; use num_traits::Zero; use std::{ borrow::Borrow, ops::{Add, AddAssign, Div, Mul, Neg, Sub}, }; impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T2 as Add<T1>>::Output>, T2: Scalar + Into<<T2 as Add<T1>>::Output> + Add<T1>, <T2 as Add<T1>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Add<T1>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + AddAssign + Zero, T1: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CscMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csr_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedRowStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedRowStorage>) -> Self::Output { spmm_dense_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedColumnStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>) -> Self::Output { spmm_dense_csc(self, rhs).unwrap() } } macro_rules! impl_sparse_scalar_product_and_div { ($($t:ty)*) => ($( impl<T1, MO, MI, D, C> Mul<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: $t) -> Self::Output { sp_cs_scalar_prod(self, rhs) } } impl<T1, MO, MI, D, C> Mul<CsMatrix<T1, MO, MI, D, C>> for $t where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: CsMatrix<T1, MO, MI, D, C>) -> Self::Output { sp_cs_scalar_prod(rhs, self) } } impl<T1, MO, MI, D, C> Div<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Div<$t>, <T1 as Div<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Div<$t>>::Output, MO, MI, Vec<<T1 as Div<$t>>::Output>, C>; fn div(self, rhs: $t) -> Self::Output { sp_cs_scalar_div(self, rhs) } } )*) } impl_sparse_scalar_product_and_div!(isize usize u8 i8 u16 i16 u32 i32 u64 i64 f32 f64);
use super::serial::{scalar::*, spadd::*, spmm::*, spsub::*}; use crate::cs::{ CompressedColumnStorage, CompressedRowStorage, Compression, CsMatrix, CscMatrix, CsrMatrix, }; use nalgebra::{Dim, Matrix, RawStorage, RawStorageMut, Scalar}; use num_traits::Zero; use std::{ borrow::Borrow, ops::{Add, AddAssign, Div, Mul, Neg, Sub}, }; impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T2 as Add<T1>>::Output>, T2: Scalar + Into<<T2 as Add<T1>>::Output> + Add<T1>, <T2 as Add<T1>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Add<T1>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2
type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + AddAssign + Zero, T1: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CscMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csr_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedRowStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedRowStorage>) -> Self::Output { spmm_dense_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedColumnStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>) -> Self::Output { spmm_dense_csc(self, rhs).unwrap() } } macro_rules! impl_sparse_scalar_product_and_div { ($($t:ty)*) => ($( impl<T1, MO, MI, D, C> Mul<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: $t) -> Self::Output { sp_cs_scalar_prod(self, rhs) } } impl<T1, MO, MI, D, C> Mul<CsMatrix<T1, MO, MI, D, C>> for $t where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: CsMatrix<T1, MO, MI, D, C>) -> Self::Output { sp_cs_scalar_prod(rhs, self) } } impl<T1, MO, MI, D, C> Div<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Div<$t>, <T1 as Div<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Div<$t>>::Output, MO, MI, Vec<<T1 as Div<$t>>::Output>, C>; fn div(self, rhs: $t) -> Self::Output { sp_cs_scalar_div(self, rhs) } } )*) } impl_sparse_scalar_product_and_div!(isize usize u8 i8 u16 i16 u32 i32 u64 i64 f32 f64);
, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, {
random
[ { "content": "/// Sparse-sparse matrix subtraction.\n\n///\n\n/// This function takes two arguments, a CSC matrix and a CSR matrix, and performs sparse-matrix\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`Operat...
Rust
smart_contracts/interpreter/iflow/lib.rs
Liqum/rust-onchain-execution-engine
9db88f17283a320ac896b4431a7579384ae22f8f
#![feature(proc_macro_hygiene)] #![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract(version = "0.1.0")] mod iflow { use ink_core::storage::{self, Flush}; use ink_prelude::vec::Vec; #[ink(storage)] struct Iflow { start_event: storage::Value<u128>, factory: storage::Value<AccountId>, interpreter: storage::Value<AccountId>, cond_table: storage::HashMap<u128, [u128; 3]>, next_elem: storage::HashMap<u128, Vec<u128>>, subprocesses: storage::Value<Vec<u128>>, events: storage::Value<Vec<u128>>, attached_to: storage::HashMap<u128, u128>, event_code: storage::HashMap<u128, [u8; 32]>, parent_references: storage::HashMap<u128, AccountId>, instance_count: storage::HashMap<u128, u128>, } impl Iflow { #[ink(constructor)] fn new(&mut self) { self.start_event.set(0); self.factory.set(AccountId::default()); self.interpreter.set(AccountId::default()); self.events.set(Vec::new()); } #[ink(message)] fn get_pre_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[0]) } #[ink(message)] fn get_post_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[1]) } #[ink(message)] fn get_type_info(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[2]) } #[ink(message)] fn get_first_element(&self) -> u128 { *self.start_event } #[ink(message)] fn get_element_info(&self, element_index: u128) -> ([u128; 3], Vec<u128>) { ( *self.cond_table.get(&element_index).unwrap_or(&[0; 3]), self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone(), ) } #[ink(message)] fn get_ady_elements(&self, element_index: u128) -> Vec<u128> { self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone() } #[ink(message)] fn get_subprocess_list(&self) -> Vec<u128> { self.subprocesses.clone() } #[ink(message)] fn get_instance_count(&self, element_index: u128) -> u128 { *self.instance_count.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_event_code(&self, element_index: u128) -> [u8; 32] { *self.event_code.get(&element_index).unwrap_or(&[0; 32]) } #[ink(message)] fn get_event_list(&self) -> Vec<u128> { self.events.clone() } #[ink(message)] fn get_attached_to(&self, element_index: u128) -> u128 { *self.attached_to.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_subprocess_instance(&self, element_index: u128) -> AccountId { *self .parent_references .get(&element_index) .unwrap_or(&AccountId::default()) } #[ink(message)] fn get_factory_instance(&self) -> AccountId { *self.factory } #[ink(message)] fn set_factory_instance(&mut self, _factory: AccountId) { self.factory.set(_factory) } #[ink(message)] fn get_interpreter_instance(&self) -> AccountId { *self.interpreter } #[ink(message)] fn set_interpreter_instance(&mut self, _inerpreter: AccountId) { self.interpreter.set(_inerpreter) } #[ink(message)] fn set_element( &mut self, element_index: u128, pre_condition: u128, post_condition: u128, type_info: u128, event_code: [u8; 32], _next_elem: Vec<u128>, ) { let _type_info = self.get_type_info(element_index); match _type_info { 0 => { if type_info & 4 == 4 { self.events.push(element_index); if type_info & 36 == 36 { self.start_event.set(element_index); } self.event_code.insert(element_index, event_code); } else if type_info & 33 == 33 { self.subprocesses.push(element_index); } } _ => { if type_info != _type_info { return; } } } self.cond_table .insert(element_index, [pre_condition, post_condition, type_info]); self.next_elem.insert(element_index, _next_elem); } #[ink(message)] fn link_sub_process( &mut self, parent_index: u128, child_flow_inst: AccountId, attached_events: Vec<u128>, count_instances: u128, ) { if self.get_type_info(parent_index) & 33 != 33 { return; } self.parent_references.insert(parent_index, child_flow_inst); for attached_event in attached_events.iter() { if self.get_type_info(parent_index) & 4 == 4 { self.attached_to.insert(*attached_event, parent_index); } } self.instance_count.insert(parent_index, count_instances); } } }
#![feature(proc_macro_hygiene)] #![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract(version = "0.1.0")] mod iflow { use ink_core::storage::{self, Flush}; use ink_prelude::vec::Vec; #[ink(storage)] struct Iflow { start_event: storage::Value<u128>, factory: storage::Value<AccountId>, interpreter: storage::Value<AccountId>, cond_table: storage::HashMap<u128, [u128; 3]>, next_elem: storage::HashMap<u128, Vec<u128>>, subprocesses: storage::Value<Vec<u128>>, events: storage::Value<Vec<u128>>, attached_to: storage::HashMap<u128, u128>, event_code: storage::HashMap<u128, [u8; 32]>, parent_references: storage::HashMap<u128, AccountId>, instance_count: storage::HashMap<u128, u128>, } impl Iflow { #[ink(constructor)] fn new(&mut self) { self.start_event.set(0); self.factory.set(AccountId::default()); self.interpreter.set(AccountId::default()); self.events.set(Vec::new()); } #[ink(message)] fn get_pre_condition(&self, element_index: u128) -> u128 { self.cond_ta
33 { self.subprocesses.push(element_index); } } _ => { if type_info != _type_info { return; } } } self.cond_table .insert(element_index, [pre_condition, post_condition, type_info]); self.next_elem.insert(element_index, _next_elem); } #[ink(message)] fn link_sub_process( &mut self, parent_index: u128, child_flow_inst: AccountId, attached_events: Vec<u128>, count_instances: u128, ) { if self.get_type_info(parent_index) & 33 != 33 { return; } self.parent_references.insert(parent_index, child_flow_inst); for attached_event in attached_events.iter() { if self.get_type_info(parent_index) & 4 == 4 { self.attached_to.insert(*attached_event, parent_index); } } self.instance_count.insert(parent_index, count_instances); } } }
ble .get(&element_index) .map_or(0, |cond| cond[0]) } #[ink(message)] fn get_post_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[1]) } #[ink(message)] fn get_type_info(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[2]) } #[ink(message)] fn get_first_element(&self) -> u128 { *self.start_event } #[ink(message)] fn get_element_info(&self, element_index: u128) -> ([u128; 3], Vec<u128>) { ( *self.cond_table.get(&element_index).unwrap_or(&[0; 3]), self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone(), ) } #[ink(message)] fn get_ady_elements(&self, element_index: u128) -> Vec<u128> { self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone() } #[ink(message)] fn get_subprocess_list(&self) -> Vec<u128> { self.subprocesses.clone() } #[ink(message)] fn get_instance_count(&self, element_index: u128) -> u128 { *self.instance_count.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_event_code(&self, element_index: u128) -> [u8; 32] { *self.event_code.get(&element_index).unwrap_or(&[0; 32]) } #[ink(message)] fn get_event_list(&self) -> Vec<u128> { self.events.clone() } #[ink(message)] fn get_attached_to(&self, element_index: u128) -> u128 { *self.attached_to.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_subprocess_instance(&self, element_index: u128) -> AccountId { *self .parent_references .get(&element_index) .unwrap_or(&AccountId::default()) } #[ink(message)] fn get_factory_instance(&self) -> AccountId { *self.factory } #[ink(message)] fn set_factory_instance(&mut self, _factory: AccountId) { self.factory.set(_factory) } #[ink(message)] fn get_interpreter_instance(&self) -> AccountId { *self.interpreter } #[ink(message)] fn set_interpreter_instance(&mut self, _inerpreter: AccountId) { self.interpreter.set(_inerpreter) } #[ink(message)] fn set_element( &mut self, element_index: u128, pre_condition: u128, post_condition: u128, type_info: u128, event_code: [u8; 32], _next_elem: Vec<u128>, ) { let _type_info = self.get_type_info(element_index); match _type_info { 0 => { if type_info & 4 == 4 { self.events.push(element_index); if type_info & 36 == 36 { self.start_event.set(element_index); } self.event_code.insert(element_index, event_code); } else if type_info & 33 ==
random
[ { "content": "/// Construct a `BpmnInterpreter::continue_execution` call\n\npub fn continue_execution(instance_id: u64, element_index: u128) -> Call {\n\n BpmnInterpreter::continue_execution(instance_id, element_index).into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{calls, NodeRuntimeTypes}...
Rust
src/main.rs
jameslahm/convertio
a11f96b9af7e3ae9e6dad7d0b41b8cce2a4c07d1
use base64::{decode, encode}; use clap::{load_yaml, App}; use futures::future::try_join_all; use serde::{Deserialize, Deserializer}; use std::{ collections::HashMap, fs::File, io::{Read, Write}, path::PathBuf, time::Duration, u64, }; const API_BASE_URL: &str = "http://api.convertio.co/convert"; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; #[derive(Deserialize)] struct NewConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize)] struct StatusConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize, Clone)] struct ConvertioData { id: String, step: Option<String>, #[serde(default)] #[serde(deserialize_with = "deserialize_u64_or_empty_string")] step_percent: Option<u64>, } #[derive(Deserialize)] struct FileData { content: String, } #[derive(Deserialize)] struct FileDownloadResp { code: i32, data: FileData, } fn deserialize_u64_or_empty_string<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error> where D: Deserializer<'de>, { let s: Option<u64> = Option::deserialize(deserializer).unwrap_or(Some(0)); Ok(s) } struct ConversionTask { conversion_id: String, done: bool, input_file_name: String, output_format: String, progress: u64, } async fn start_conversion( input_file_name: &str, output_format: &str, api_key: &str, ) -> Result<ConversionTask, Box<dyn std::error::Error>> { let mut map = HashMap::new(); map.insert("apikey", api_key); map.insert("input", "base64"); let mut file = File::open(input_file_name).expect("file open failed"); let mut buf = vec![]; file.read_to_end(&mut buf).expect("file read failed"); let encode_buf = encode(&buf); map.insert("file", encode_buf.as_str()); map.insert("filename", input_file_name); map.insert("outputformat", output_format); let client = reqwest::Client::new(); let resp = client .post(API_BASE_URL) .json(&map) .send() .await? .json::<NewConversionResp>() .await?; if resp.code != 200 { return Err(resp.error.unwrap().into()); } let conversion_id = resp.data.unwrap().id; Ok(ConversionTask { conversion_id, done: false, input_file_name: input_file_name.to_owned(), output_format: output_format.to_owned(), progress: 0, }) } async fn wait_for_status(task: &mut ConversionTask) -> Result<(), Box<dyn std::error::Error>> { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/status", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<StatusConversionResp>() .await?; if resp.code == 200 { if resp.data.clone().unwrap().step.as_deref().unwrap() == "finish" { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/dl/base64", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<FileDownloadResp>() .await?; if resp.code == 200 { let mut output_path = PathBuf::from(task.input_file_name.as_str()); output_path.set_extension(task.output_format.as_str()); let mut file = File::create(output_path).expect("create file failed"); let decode_buf = decode(&resp.data.content).unwrap(); file.write_all(&decode_buf).expect("write file failed"); } task.done = true; task.progress = 100; } else { task.progress = *resp.data.clone().unwrap().step_percent.as_ref().unwrap(); } } if resp.code != 200 { task.done = true; println!("{}", resp.error.unwrap()) } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let yaml = load_yaml!("cli.yml"); let matches = App::from(yaml).get_matches(); let output_format = matches.value_of("format").unwrap(); let input_file_names = matches.values_of("input").unwrap(); let api_key = match std::env::var("CONVERTIO_API_KEY") { Ok(s) => s, Err(_) => "0750521d6eee3603ac222d0422891eea".to_owned(), }; let mut conversions: Vec<ConversionTask> = try_join_all(input_file_names.map(|input_file_name| { start_conversion(input_file_name, output_format, api_key.as_str()) })) .await .unwrap(); let sty = ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .progress_chars("##-"); let mut progress_bars = vec![]; let m = MultiProgress::new(); conversions.iter().for_each(|conversion| { let pb = m.add(ProgressBar::new(100)); pb.set_style(sty.clone()); pb.set_position(0); pb.set_message(&conversion.input_file_name); progress_bars.push(pb); }); tokio::spawn(async move { m.join().unwrap(); }); loop { if conversions.is_empty() { break; } try_join_all( conversions .iter_mut() .map(|conversion| wait_for_status(conversion)), ) .await .unwrap(); for (index, e) in conversions.iter().enumerate() { progress_bars[index].set_position(e.progress); progress_bars[index].set_message(&e.input_file_name); if e.progress == 100 { progress_bars[index].finish_and_clear(); } } conversions.retain(|conversion| !conversion.done); progress_bars.retain(|progress_bar| !progress_bar.position() != 100); tokio::time::sleep(Duration::from_secs(2)).await; } Ok(()) }
use base64::{decode, encode}; use clap::{load_yaml, App}; use futures::future::try_join_all; use serde::{Deserialize, Deserializer}; use std::{ collections::HashMap, fs::File, io::{Read, Write}, path::PathBuf, time::Duration, u64, }; const API_BASE_URL: &str = "http://api.convertio.co/convert"; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; #[derive(Deserialize)] struct NewConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize)] struct StatusConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize, Clone)] struct ConvertioData { id: String, step: Option<String>, #[serde(default)] #[serde(deserialize_with = "deserialize_u64_or_empty_string")] step_percent: Option<u64>, } #[derive(Deserialize)] struct FileData { content: String, } #[derive(Deserialize)] struct FileDownloadResp { code: i32, data: FileData, } fn deserialize_u64_or_empty_string<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error> where D: Deserializer<'de>, { let s: Option<u64> = Option::deserialize(deserializer).unwrap_or(Some(0)); Ok(s) } struct ConversionTask { conversion_id: String, done: bool, input_file_name: String, output_format: String, progress: u64, }
async fn wait_for_status(task: &mut ConversionTask) -> Result<(), Box<dyn std::error::Error>> { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/status", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<StatusConversionResp>() .await?; if resp.code == 200 { if resp.data.clone().unwrap().step.as_deref().unwrap() == "finish" { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/dl/base64", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<FileDownloadResp>() .await?; if resp.code == 200 { let mut output_path = PathBuf::from(task.input_file_name.as_str()); output_path.set_extension(task.output_format.as_str()); let mut file = File::create(output_path).expect("create file failed"); let decode_buf = decode(&resp.data.content).unwrap(); file.write_all(&decode_buf).expect("write file failed"); } task.done = true; task.progress = 100; } else { task.progress = *resp.data.clone().unwrap().step_percent.as_ref().unwrap(); } } if resp.code != 200 { task.done = true; println!("{}", resp.error.unwrap()) } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let yaml = load_yaml!("cli.yml"); let matches = App::from(yaml).get_matches(); let output_format = matches.value_of("format").unwrap(); let input_file_names = matches.values_of("input").unwrap(); let api_key = match std::env::var("CONVERTIO_API_KEY") { Ok(s) => s, Err(_) => "0750521d6eee3603ac222d0422891eea".to_owned(), }; let mut conversions: Vec<ConversionTask> = try_join_all(input_file_names.map(|input_file_name| { start_conversion(input_file_name, output_format, api_key.as_str()) })) .await .unwrap(); let sty = ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .progress_chars("##-"); let mut progress_bars = vec![]; let m = MultiProgress::new(); conversions.iter().for_each(|conversion| { let pb = m.add(ProgressBar::new(100)); pb.set_style(sty.clone()); pb.set_position(0); pb.set_message(&conversion.input_file_name); progress_bars.push(pb); }); tokio::spawn(async move { m.join().unwrap(); }); loop { if conversions.is_empty() { break; } try_join_all( conversions .iter_mut() .map(|conversion| wait_for_status(conversion)), ) .await .unwrap(); for (index, e) in conversions.iter().enumerate() { progress_bars[index].set_position(e.progress); progress_bars[index].set_message(&e.input_file_name); if e.progress == 100 { progress_bars[index].finish_and_clear(); } } conversions.retain(|conversion| !conversion.done); progress_bars.retain(|progress_bar| !progress_bar.position() != 100); tokio::time::sleep(Duration::from_secs(2)).await; } Ok(()) }
async fn start_conversion( input_file_name: &str, output_format: &str, api_key: &str, ) -> Result<ConversionTask, Box<dyn std::error::Error>> { let mut map = HashMap::new(); map.insert("apikey", api_key); map.insert("input", "base64"); let mut file = File::open(input_file_name).expect("file open failed"); let mut buf = vec![]; file.read_to_end(&mut buf).expect("file read failed"); let encode_buf = encode(&buf); map.insert("file", encode_buf.as_str()); map.insert("filename", input_file_name); map.insert("outputformat", output_format); let client = reqwest::Client::new(); let resp = client .post(API_BASE_URL) .json(&map) .send() .await? .json::<NewConversionResp>() .await?; if resp.code != 200 { return Err(resp.error.unwrap().into()); } let conversion_id = resp.data.unwrap().id; Ok(ConversionTask { conversion_id, done: false, input_file_name: input_file_name.to_owned(), output_format: output_format.to_owned(), progress: 0, }) }
function_block-full_function
[ { "content": "## Convertio\n\nConvert anything from one format to another using [convertio.co](https://convertio.co/)\n\n\n\n### example\n\n```bash\n\ncargo run -- -f pdf \"Chp 2.pptx\" \"Chp 1.ppt\"\n\n```\n", "file_path": "README.md", "rank": 14, "score": 1.2168407813751934 } ]
Rust
ltn/src/select_boundary.rs
aclk/abstreet
611ab9fce155ae2ed37a224cbbd417fcebaa0536
use std::collections::BTreeSet; use anyhow::Result; use geom::Distance; use map_model::Block; use widgetry::mapspace::ToggleZoomed; use widgetry::mapspace::{World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::partition::BlockID; use crate::{App, NeighborhoodID, Partitioning, Transition}; pub struct SelectBoundary { panel: Panel, id: NeighborhoodID, world: World<BlockID>, draw_outline: ToggleZoomed, frontier: BTreeSet<BlockID>, orig_partitioning: Partitioning, last_failed_change: Option<(BlockID, bool)>, } impl SelectBoundary { pub fn new_state(ctx: &mut EventCtx, app: &App, id: NeighborhoodID) -> Box<dyn State<App>> { let mut state = SelectBoundary { panel: make_panel(ctx, app), id, world: World::bounded(app.map.get_bounds()), draw_outline: ToggleZoomed::empty(ctx), frontier: BTreeSet::new(), orig_partitioning: app.session.partitioning.clone(), last_failed_change: None, }; let initial_boundary = app.session.partitioning.neighborhood_block(id); state.frontier = app .session .partitioning .calculate_frontier(&initial_boundary.perimeter); for id in app.session.partitioning.all_block_ids() { state.add_block(ctx, app, id); } state.redraw_outline(ctx, initial_boundary); state.world.initialize_hover(ctx); Box::new(state) } fn add_block(&mut self, ctx: &mut EventCtx, app: &App, id: BlockID) { let neighborhood = app.session.partitioning.block_to_neighborhood(id); let color = app.session.partitioning.neighborhood_color(neighborhood); if self.frontier.contains(&id) { let have_block = self.currently_have_block(app, id); let mut obj = self .world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(0.5)) .hover_alpha(0.8) .clickable(); if have_block { obj = obj .hotkey(Key::Space, "remove") .hotkey(Key::LeftShift, "remove") } else { obj = obj .hotkey(Key::Space, "add") .hotkey(Key::LeftControl, "add") } obj.build(ctx); } else { let alpha = if self.id == neighborhood { 0.5 } else { 0.1 }; self.world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(alpha)) .build(ctx); } } fn redraw_outline(&mut self, ctx: &mut EventCtx, block: &Block) { let mut batch = ToggleZoomed::builder(); if let Ok(outline) = block.polygon.to_outline(Distance::meters(10.0)) { batch.unzoomed.push(Color::RED, outline); } if let Ok(outline) = block.polygon.to_outline(Distance::meters(5.0)) { batch.zoomed.push(Color::RED.alpha(0.5), outline); } self.draw_outline = batch.build(ctx); } fn toggle_block(&mut self, ctx: &mut EventCtx, app: &mut App, id: BlockID) -> Transition { if self.last_failed_change == Some((id, self.currently_have_block(app, id))) { return Transition::Keep; } self.last_failed_change = None; match self.try_toggle_block(app, id) { Ok(Some(new_neighborhood)) => { app.session.partitioning.recalculate_coloring(); return Transition::Replace(SelectBoundary::new_state(ctx, app, new_neighborhood)); } Ok(None) => { let old_frontier = std::mem::take(&mut self.frontier); self.frontier = app.session.partitioning.calculate_frontier( &app.session .partitioning .neighborhood_block(self.id) .perimeter, ); let mut changed_blocks: Vec<BlockID> = old_frontier .symmetric_difference(&self.frontier) .cloned() .collect(); changed_blocks.push(id); if app.session.partitioning.recalculate_coloring() { changed_blocks.clear(); changed_blocks.extend(app.session.partitioning.all_block_ids()); } for changed in changed_blocks { self.world.delete_before_replacement(changed); self.add_block(ctx, app, changed); } self.redraw_outline(ctx, app.session.partitioning.neighborhood_block(self.id)); self.panel = make_panel(ctx, app); } Err(err) => { self.last_failed_change = Some((id, self.currently_have_block(app, id))); let label = err.to_string().text_widget(ctx); self.panel.replace(ctx, "warning", label); } } Transition::Keep } fn try_toggle_block(&mut self, app: &mut App, id: BlockID) -> Result<Option<NeighborhoodID>> { if self.currently_have_block(app, id) { app.session .partitioning .remove_block_from_neighborhood(&app.map, id, self.id) } else { let old_owner = app.session.partitioning.block_to_neighborhood(id); app.session .partitioning .transfer_block(&app.map, id, old_owner, self.id)?; Ok(None) } } fn currently_have_block(&self, app: &App, id: BlockID) -> bool { app.session.partitioning.block_to_neighborhood(id) == self.id } } impl State<App> for SelectBoundary { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if let Outcome::Clicked(x) = self.panel.event(ctx) { match x.as_ref() { "Cancel" => { app.session.partitioning = self.orig_partitioning.clone(); return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } "Confirm" => { return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } x => { return crate::handle_app_header_click(ctx, app, x).unwrap(); } } } match self.world.event(ctx) { WorldOutcome::Keypress("add" | "remove", id) | WorldOutcome::ClickedObject(id) => { return self.toggle_block(ctx, app, id); } _ => {} } if ctx.redo_mouseover() { if let Some(id) = self.world.get_hovering() { if ctx.is_key_down(Key::LeftControl) { if !self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } else if ctx.is_key_down(Key::LeftShift) { if self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } } } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.world.draw(g); self.draw_outline.draw(g); self.panel.draw(g); } } fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel { Panel::new_builder(Widget::col(vec![ crate::app_header(ctx, app), "Draw a custom boundary for a neighborhood" .text_widget(ctx) .centered_vert(), Text::from_all(vec![ Line("Click").fg(ctx.style().text_hotkey_color), Line(" to add/remove a block"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftControl.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to add"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftShift.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to remove"), ]) .into_widget(ctx), Widget::row(vec![ ctx.style() .btn_solid_primary .text("Confirm") .hotkey(Key::Enter) .build_def(ctx), ctx.style() .btn_solid_destructive .text("Cancel") .hotkey(Key::Escape) .build_def(ctx), ]), Text::new().into_widget(ctx).named("warning"), ])) .aligned(HorizontalAlignment::Left, VerticalAlignment::Top) .build(ctx) }
use std::collections::BTreeSet; use anyhow::Result; use geom::Distance; use map_model::Block; use widgetry::mapspace::ToggleZoomed; use widgetry::mapspace::{World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::partition::BlockID; use crate::{App, NeighborhoodID, Partitioning, Transition}; pub struct SelectBoundary { panel: Panel, id: NeighborhoodID, world: World<BlockID>, draw_outline: ToggleZoomed, frontier: BTreeSet<BlockID>, orig_partitioning: Partitioning, last_failed_change: Option<(BlockID, bool)>, } impl SelectBoundary { pub fn new_state(ctx: &mut EventCtx, app: &App, id: NeighborhoodID) -> Box<dyn State<App>> { let mut state = SelectBoundary { panel: make_panel(ctx, app), id, world: World::bounded(app.map.get_bounds()), draw_outline: ToggleZoomed::empty(ctx), frontier: BTreeSet::new(), orig_partitioning: app.session.partitioning.clone(), last_failed_change: None, }; let initial_boundary = app.session.partitioning.neighborhood_block(id); state.frontier = app .session .partitioning .calculate_frontier(&initial_boundary.perimeter); for id in app.session.partitioning.all_block_ids() { state.add_block(ctx, app, id); } state.redraw_outline(ctx, initial_boundary); state.world.initialize_hover(ctx); Box::new(state) } fn add_block(&mut self, ctx: &mut EventCtx, app: &App, id: BlockID) { let neighborhood = app.session.partitioning.block_to_neighborhood(id); let color = app.session.partitioning.neighborhood_color(neighborhood); if self.frontier.contains(&id) { let have_block = self.currently_have_block(app, id); let mut obj = self .world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(0.5)) .hover_alpha(0.8) .clickable(); if have_block { obj = obj .hotkey(Key::Space, "remove") .hotkey(Key::LeftShift, "remove") } else { obj = obj .hotkey(Key::Space, "add") .hotkey(Key::LeftControl, "add") } obj.build(ctx); } else { let alpha = if self.id == neighborhood { 0.5 } else { 0.1 }; self.world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(alpha)) .build(ctx); } } fn redraw_outline(&mut self, ctx: &mut EventCtx, block: &Block) { let mut batch = ToggleZoomed::builder(); if let Ok(outline) = block.polygon.to_outline(Distance::meters(10.0)) { batch.unzoomed.push(Color::RED, outline); } if let Ok(outline) = block.polygon.to_outline(Distance::meters(5.0)) { batch.zoomed.push(Color::RED.alpha(0.5), outline); } self.draw_outline = batch.build(ctx); } fn toggle_block(&mut self, ctx: &mut EventCtx, app: &mut App, id: BlockID) -> Transition { if self.last_failed_change == Some((id, self.currently_have_block(app, id))) { return Transition::Keep; } self.last_failed_change = None; match self.try_toggle_block(app, id) { Ok(Some(new_neighborhood)) => { app.session.partitioning.recalculate_coloring(); return Transition::Replace(SelectBoundary::new_state(ctx, app, new_neighborhood)); } Ok(None) => { let old_frontier = std::mem::take(&mut self.frontier); self.frontier = app.session.partitioning.calculate_frontier( &app.session .partitioning .neighborhood_block(self.id) .perimeter, ); let mut changed_blocks: Vec<BlockID> = old_frontier .symmetric_difference(&self.frontier) .cloned() .collect(); changed_blocks.push(id); if app.session.partitioning.recalculate_coloring() { changed_blocks.clear(); changed_blocks.extend(app.session.partitioning.all_block_ids()); } for changed in changed_blocks { self.world.delete_before_replacement(changed); self.add_block(ctx, app, changed); } self.redraw_outline(ctx, app.session.partitioning.neighborhood_block(self.id)); self.panel = make_panel(ctx, app); } Err(err) => { self.last_failed_change = Some((id, self.currently_have_block(app, id))); let label = err.to_string().text_widget(ctx); self.panel.replace(ctx, "warning", label); } } Transition::Keep } fn try_toggle_block(&mut self, app: &mut App, id: BlockID) -> Result<Option<NeighborhoodID>> { if self.currently_have_block(app, id) { app.session .partitioning .remove_block_from_neighborhood(&app.map, id, self.id) } else { let ol
fn currently_have_block(&self, app: &App, id: BlockID) -> bool { app.session.partitioning.block_to_neighborhood(id) == self.id } } impl State<App> for SelectBoundary { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if let Outcome::Clicked(x) = self.panel.event(ctx) { match x.as_ref() { "Cancel" => { app.session.partitioning = self.orig_partitioning.clone(); return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } "Confirm" => { return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } x => { return crate::handle_app_header_click(ctx, app, x).unwrap(); } } } match self.world.event(ctx) { WorldOutcome::Keypress("add" | "remove", id) | WorldOutcome::ClickedObject(id) => { return self.toggle_block(ctx, app, id); } _ => {} } if ctx.redo_mouseover() { if let Some(id) = self.world.get_hovering() { if ctx.is_key_down(Key::LeftControl) { if !self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } else if ctx.is_key_down(Key::LeftShift) { if self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } } } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.world.draw(g); self.draw_outline.draw(g); self.panel.draw(g); } } fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel { Panel::new_builder(Widget::col(vec![ crate::app_header(ctx, app), "Draw a custom boundary for a neighborhood" .text_widget(ctx) .centered_vert(), Text::from_all(vec![ Line("Click").fg(ctx.style().text_hotkey_color), Line(" to add/remove a block"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftControl.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to add"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftShift.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to remove"), ]) .into_widget(ctx), Widget::row(vec![ ctx.style() .btn_solid_primary .text("Confirm") .hotkey(Key::Enter) .build_def(ctx), ctx.style() .btn_solid_destructive .text("Cancel") .hotkey(Key::Escape) .build_def(ctx), ]), Text::new().into_widget(ctx).named("warning"), ])) .aligned(HorizontalAlignment::Left, VerticalAlignment::Top) .build(ctx) }
d_owner = app.session.partitioning.block_to_neighborhood(id); app.session .partitioning .transfer_block(&app.map, id, old_owner, self.id)?; Ok(None) } }
function_block-function_prefixed
[ { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitStopID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, stop_body(ctx, a...
Rust
src/lib/worker.rs
seb-odessa/workers-pool
dd093585771d86c6295123edc64c0b84a497d1f3
use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use super::types::{Message, Processor, HasName, HasTarget}; pub struct WorkerHandler<T: HasName + HasTarget, W: HasName + Processor> { gate: Sender<Message<T>>, input: Receiver<Message<T>>, output: Sender<Message<T>>, received: usize, processed: usize, worker: W, } impl<T: HasName + HasTarget, W: HasName + Processor> Drop for WorkerHandler<T, W> { fn drop(&mut self) { trace!("{} dropped. Received {} tasks, processed {} tasks.", self.worker.name(), self.received, self.processed); } } impl<T: HasName + HasTarget, W: HasName + Processor> WorkerHandler<T, W> { pub fn new(worker: W, output: Sender<Message<T>>) -> Self { trace!("WorkerHandler::new({}, ...)", &worker.name()); let (tx, rx) = mpsc::channel(); WorkerHandler { gate: tx, input: rx, output: output, received: 0, processed: 0, worker: worker, } } pub fn gate(&self) -> Sender<Message<T>> { self.gate.clone() } fn say(&self, msg: Message<T>) -> bool { return self.output.send(msg).is_ok(); } pub fn run(&mut self) { while let Ok(msg) = self.input.recv() { self.received += 1; match msg { Message::Quit => { trace!("{} <= Message::Quit", self.worker.name()); break; } Message::Event(request) => { let name = request.name(); trace!("{} <= Message::Request({})", self.worker.name(), name); let succ = self.say(Message::Busy(name.clone())) && self.say(Message::Event(self.worker.process(request))) && self.say(Message::Free(name.clone())); if !succ { break; } self.processed += 1; trace!("{} <= Message::Request({}); processed: {}", self.worker.name(), name, self.processed); } _ => { warn!("{} <= Unexpected message!!!", self.worker.name()); } } } trace!("{} Has finished run()", self.worker.name()); } } #[cfg(test)] mod tests { use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use std::thread; use types::{Message, Processor, HasName, HasTarget}; use super::WorkerHandler; #[derive(Debug, PartialEq)] struct EventFake; impl HasName for EventFake { fn name(&self) -> String { String::from("EventFake") } } impl HasTarget for EventFake { fn target(&self) -> String { String::from("EventFakeTarget") } } #[derive(Debug, PartialEq)] struct TaskFake; impl HasName for TaskFake { fn name(&self) -> String { String::from("EventFake") } } impl Processor for TaskFake { fn process<T>(&self, event: T) -> T { event } } #[test] fn message_quit() { let task = TaskFake; let (pipe, _): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } #[test] fn message_event() { let task = TaskFake; let taskname = task.name(); let (pipe, results): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Event(EventFake)).unwrap(); assert!(results.recv().unwrap() == Message::Busy(taskname.clone())); assert!(results.recv().unwrap() == Message::Event(EventFake)); assert!(results.recv().unwrap() == Message::Free(taskname.clone())); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } }
use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use super::types::{Message, Processor, HasName, HasTarget}; pub struct WorkerHandler<T: HasName + HasTarget, W: HasName + Processor> { gate: Sender<Message<T>>, input: Receiver<Message<T>>, output: Sender<Message<T>>, received: usize, processed: usize, worker: W, } impl<T: HasName + HasTarget, W: HasName + Processor> Drop for WorkerHandler<T, W> { fn drop(&mut self) { trace!("{} dropped. Received {} tasks, processed {} tasks.", self.worker.name(), self.received, self.processed); } }
trace!("{} Has finished run()", self.worker.name()); } } #[cfg(test)] mod tests { use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use std::thread; use types::{Message, Processor, HasName, HasTarget}; use super::WorkerHandler; #[derive(Debug, PartialEq)] struct EventFake; impl HasName for EventFake { fn name(&self) -> String { String::from("EventFake") } } impl HasTarget for EventFake { fn target(&self) -> String { String::from("EventFakeTarget") } } #[derive(Debug, PartialEq)] struct TaskFake; impl HasName for TaskFake { fn name(&self) -> String { String::from("EventFake") } } impl Processor for TaskFake { fn process<T>(&self, event: T) -> T { event } } #[test] fn message_quit() { let task = TaskFake; let (pipe, _): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } #[test] fn message_event() { let task = TaskFake; let taskname = task.name(); let (pipe, results): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Event(EventFake)).unwrap(); assert!(results.recv().unwrap() == Message::Busy(taskname.clone())); assert!(results.recv().unwrap() == Message::Event(EventFake)); assert!(results.recv().unwrap() == Message::Free(taskname.clone())); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } }
impl<T: HasName + HasTarget, W: HasName + Processor> WorkerHandler<T, W> { pub fn new(worker: W, output: Sender<Message<T>>) -> Self { trace!("WorkerHandler::new({}, ...)", &worker.name()); let (tx, rx) = mpsc::channel(); WorkerHandler { gate: tx, input: rx, output: output, received: 0, processed: 0, worker: worker, } } pub fn gate(&self) -> Sender<Message<T>> { self.gate.clone() } fn say(&self, msg: Message<T>) -> bool { return self.output.send(msg).is_ok(); } pub fn run(&mut self) { while let Ok(msg) = self.input.recv() { self.received += 1; match msg { Message::Quit => { trace!("{} <= Message::Quit", self.worker.name()); break; } Message::Event(request) => { let name = request.name(); trace!("{} <= Message::Request({})", self.worker.name(), name); let succ = self.say(Message::Busy(name.clone())) && self.say(Message::Event(self.worker.process(request))) && self.say(Message::Free(name.clone())); if !succ { break; } self.processed += 1; trace!("{} <= Message::Request({}); processed: {}", self.worker.name(), name, self.processed); } _ => { warn!("{} <= Unexpected message!!!", self.worker.name()); } } }
random
[ { "content": "/// This trait guaranty that it's implementer has a process<T>() function\n\npub trait Processor {\n\n /// Takes arg of type <T>\n\n /// Returns result of the same type\n\n fn process<T>(&self, arg: T) -> T;\n\n}\n\n\n\n/// Message type used as container for all Supevisors/Workers communi...
Rust
dnscat/src/encryption/standard.rs
avitex/rust-dnscat2
151f889780ac4ebf65ecabe3a738f84a22965bb1
use std::borrow::Borrow; use bytes::BufMut; use constant_time_eq::constant_time_eq; use generic_array::typenum::{U32, U65}; use generic_array::{sequence::Lengthen, GenericArray}; use ring::agreement::{self, agree_ephemeral}; use ring::rand; use salsa20::cipher::{NewStreamCipher, StreamCipher}; use salsa20::Salsa20; use secstr::SecStr; use sha3::{Digest, Sha3_256}; use super::{Authenticator, Encryption, EncryptionAcceptor, EncryptionError, PublicKey}; use crate::packet::SessionHeader; use crate::util::Encode; const PUBLIC_KEY_OCTET_TAG: u8 = 0x04; const STANDARD_ARGS_SIZE: usize = 6 + 2; type EncryptionKey = GenericArray<u8, <Salsa20 as NewStreamCipher>::KeySize>; type EncryptionNonce = GenericArray<u8, <Salsa20 as NewStreamCipher>::NonceSize>; type EncryptionMac = GenericArray<u8, U32>; type PublicKeyWithTag = GenericArray<u8, U65>; #[derive(Debug)] pub struct StandardEncryption { is_client: bool, nonce: u16, preshared_key: Option<SecStr>, self_pub_key: agreement::PublicKey, self_authenticator: Option<Authenticator>, peer_authenticator: Option<Authenticator>, self_priv_key: Option<agreement::EphemeralPrivateKey>, peer_pub_key: Option<agreement::UnparsedPublicKey<PublicKeyWithTag>>, stream_keys: Option<StreamKeys>, } impl StandardEncryption { pub fn new_with_ephemeral( is_client: bool, preshared_key: Option<SecStr>, ) -> Result<Self, EncryptionError> { let rand = rand::SystemRandom::new(); let (self_pub_key, self_priv_key) = agreement::EphemeralPrivateKey::generate(&agreement::ECDH_P256, &rand) .and_then(|priv_key| { priv_key .compute_public_key() .map(|pub_key| (pub_key, priv_key)) }) .or(Err(EncryptionError::Keygen))?; Ok(Self { nonce: 0, is_client, preshared_key, self_pub_key, peer_pub_key: None, self_authenticator: None, peer_authenticator: None, self_priv_key: Some(self_priv_key), stream_keys: None, }) } fn next_nouce(&mut self) -> Result<u16, EncryptionError> { if self.nonce == u16::max_value() { Err(EncryptionError::Renegotiate) } else { let current = self.nonce; self.nonce += 1; Ok(current) } } fn stream_keys(&self) -> &StreamKeys { self.stream_keys.as_ref().expect("stream keys not set") } fn raw_public_key(&self) -> &[u8] { &self.self_pub_key.as_ref()[1..] } } impl Encryption for StandardEncryption { fn args_size(&self) -> u8 { STANDARD_ARGS_SIZE as u8 } fn public_key(&self) -> PublicKey { GenericArray::clone_from_slice(self.raw_public_key()) } fn handshake(&mut self, peer: PublicKey) -> Result<(), EncryptionError> { let peer_with_tag = peer.prepend(PUBLIC_KEY_OCTET_TAG); let peer_pub_key = agreement::UnparsedPublicKey::new(&agreement::ECDH_P256, peer_with_tag); let (self_auth, peer_auth, stream_keys) = agree_ephemeral( self.self_priv_key.take().expect("no private key"), &peer_pub_key, EncryptionError::Handshake, |shared_key| { let self_auth = calc_authenticator( self.is_client, self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let peer_auth = calc_authenticator( self.is_client, !self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let stream_keys = StreamKeys::from_shared(shared_key); Ok((self_auth, peer_auth, stream_keys)) }, )?; self.self_authenticator = Some(self_auth); self.peer_authenticator = Some(peer_auth); self.peer_pub_key = Some(peer_pub_key); self.stream_keys = Some(stream_keys); Ok(()) } fn authenticator(&self) -> Authenticator { self.self_authenticator .expect("authenticator not initialised") } fn authenticate(&mut self, peer: Authenticator) -> Result<(), EncryptionError> { let valid = self .peer_authenticator .expect("peer authenticator consumed or not init"); if constant_time_eq(&valid[..], &peer[..]) { Ok(()) } else { Err(EncryptionError::Authentication) } } fn encrypt( &mut self, head: &SessionHeader, mut args: &mut [u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_write_keys(self.is_client); let nonce = self.next_nouce()?.to_be_bytes(); let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.encrypt(data); let sig = calc_signature(head, &nonce[..], &mac_key[..], data); args.put_slice(&sig[..]); args.put_slice(&nonce[..]); Ok(()) } fn decrypt( &mut self, head: &SessionHeader, args: &[u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_read_keys(self.is_client); let sig = [args[0], args[1], args[2], args[3], args[4], args[5]]; let nonce = [args[6], args[7]]; if calc_signature(head, &nonce[..], &mac_key[..], data) != sig { return Err(EncryptionError::Signature); } let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.decrypt(data); Ok(()) } } #[derive(Debug)] pub struct StandardEncryptionAcceptor { preshared_key: Option<SecStr>, } impl StandardEncryptionAcceptor { pub fn new(preshared_key: Option<SecStr>) -> Self { Self { preshared_key } } } impl EncryptionAcceptor for StandardEncryptionAcceptor { type Encryption = StandardEncryption; fn accept(&mut self, client: PublicKey) -> Result<Self::Encryption, EncryptionError> { let psk = self.preshared_key.clone(); StandardEncryption::new_with_ephemeral(false, psk).and_then(|mut encryption| { encryption.handshake(client)?; Ok(encryption) }) } } #[derive(Debug)] struct StreamKeys { client_mac: EncryptionMac, server_mac: EncryptionMac, client_write: EncryptionKey, server_write: EncryptionKey, } impl StreamKeys { fn get_write_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { if is_client { (self.client_write, self.client_mac) } else { (self.server_write, self.server_mac) } } fn get_read_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { self.get_write_keys(!is_client) } fn from_shared(key: &[u8]) -> Self { let mut hash = Sha3_256::new(); hash.update(key); hash.update("client_write_key"); let client_write = hash.finalize_reset(); hash.update(key); hash.update("client_mac_key"); let client_mac = hash.finalize_reset(); hash.update(key); hash.update("server_write_key"); let server_write = hash.finalize_reset(); hash.update(key); hash.update("server_mac_key"); let server_mac = hash.finalize(); Self { client_write, server_write, client_mac, server_mac, } } } fn calc_nonce(nonce: [u8; 2]) -> EncryptionNonce { let mut nonce_array = [0u8; 8]; nonce_array[6] = nonce[0]; nonce_array[7] = nonce[1]; nonce_array.into() } fn calc_authenticator( is_client: bool, for_client: bool, pubkey_self: &[u8], pubkey_peer: &[u8], shared_key: &[u8], preshared_key: Option<&[u8]>, ) -> Authenticator { let mut hash = Sha3_256::new(); if for_client { hash.update("client"); } else { hash.update("server"); } hash.update(shared_key); if is_client { hash.update(pubkey_self); hash.update(pubkey_peer); } else { hash.update(pubkey_peer); hash.update(pubkey_self); } if let Some(preshared_key) = preshared_key { hash.update(preshared_key); } hash.finalize() } fn calc_signature( head: &SessionHeader, nonce: &[u8], mac_key: &[u8], ciphertext: &[u8], ) -> [u8; 6] { let mut head_bytes = [0u8; SessionHeader::len()]; head.encode(&mut &mut head_bytes[..]); let mut hash = Sha3_256::new(); hash.update(mac_key); hash.update(&head_bytes[..]); hash.update(nonce); hash.update(ciphertext); let res = hash.finalize(); [res[0], res[1], res[2], res[3], res[4], res[5]] } #[cfg(test)] mod tests { use super::*; use crate::packet::PacketKind; #[test] fn test_basic() { let mut client = StandardEncryption::new_with_ephemeral(true, None).expect("client enc"); let mut server = StandardEncryption::new_with_ephemeral(false, None).expect("server enc"); server .handshake(client.public_key()) .expect("client to server handshake"); client .handshake(server.public_key()) .expect("server to client handshake"); server .authenticate(client.authenticator()) .expect("client to server auth"); client .authenticate(server.authenticator()) .expect("server to client auth"); let header = SessionHeader::new(1, PacketKind::SYN, 2); let mut args = [0u8; 8]; let mut data = [1, 2, 3, 5]; client .encrypt(&header, &mut args[..], &mut data[..]) .expect("encrypt"); assert_ne!(data, [1, 2, 3, 5]); server .decrypt(&header, &mut args[..], &mut data[..]) .expect("decrypt"); assert_eq!(data, [1, 2, 3, 5]); } }
use std::borrow::Borrow; use bytes::BufMut; use constant_time_eq::constant_time_eq; use generic_array::typenum::{U32, U65}; use generic_array::{sequence::Lengthen, GenericArray}; use ring::agreement::{self, agree_ephemeral}; use ring::rand; use salsa20::cipher::{NewStreamCipher, StreamCipher}; use salsa20::Salsa20; use secstr::SecStr; use sha3::{Digest, Sha3_256}; use super::{Authenticator, Encryption, EncryptionAcceptor, EncryptionError, PublicKey}; use crate::packet::SessionHeader; use crate::util::Encode; const PUBLIC_KEY_OCTET_TAG: u8 = 0x04; const STANDARD_ARGS_SIZE: usize = 6 + 2; type EncryptionKey = GenericArray<u8, <Salsa20 as NewStreamCipher>::KeySize>; type EncryptionNonce = GenericArray<u8, <Salsa20 as NewStreamCipher>::NonceSize>; type EncryptionMac = GenericArray<u8, U32>; type PublicKeyWithTag = GenericArray<u8, U65>; #[derive(Debug)] pub struct StandardEncryption { is_client: bool, nonce: u16, preshared_key: Option<SecStr>, self_pub_key: agreement::PublicKey, self_authenticator: Option<Authenticator>, peer_authenticator: Option<Authenticator>, self_priv_key: Option<agreement::EphemeralPrivateKey>, peer_pub_key: Option<agreement::UnparsedPublicKey<PublicKeyWithTag>>, stream_keys: Option<StreamKeys>, } impl StandardEncryption { pub fn new_with_ephemeral( is_client: bool, preshared_key: Option<SecStr>, ) -> Result<Self, EncryptionError> { let rand = rand::SystemRandom::new(); let (self_pub_key, self_priv_key) = agreement::EphemeralPrivateKey::generate(&agreement::ECDH_P256, &rand) .and_then(|priv_key| { priv_key .compute_public_key() .map(|pub_key| (pub_key, priv_key)) }) .or(Err(EncryptionError::Keygen))?; Ok(Self { nonce: 0, is_client, preshared_key, self_pub_key, peer_pub_key: None, self_authenticator: None, peer_authenticator: None, self_priv_key: Some(self_priv_key), stream_keys: None, }) } fn next_nouce(&mut self) -> Result<u16, EncryptionError> { if self.nonce == u16::max_value() { Err(EncryptionError::Renegotiate) } else { let current = self.nonce; self.nonce += 1; Ok(current) } } fn stream_keys(&self) -> &StreamKeys { self.stream_keys.as_ref().expect("stream keys not set") } fn raw_public_key(&self) -> &[u8] { &self.self_pub_key.as_ref()[1..] } } impl Encryption for StandardEncryption { fn args_size(&self) -> u8 { STANDARD_ARGS_SIZE as u8 } fn public_key(&self) -> PublicKey { GenericArray::clone_from_slice(self.raw_public_key()) } fn handshake(&mut self, peer: PublicKey) -> Result<(), EncryptionError> { let peer_with_tag = peer.prepend(PUBLIC_KEY_OCTET_TAG); let peer_pub_key = agreement::UnparsedPublicKey::new(&agreement::ECDH_P256, peer_with_tag); let (self_auth, peer_auth, stream_keys) = agree_ephemeral( self.self_priv_key.take().expect("no private key"), &peer_pub_key, EncryptionError::Handshake, |shared_key| { let self_auth = calc_authenticator( self.is_client, self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let peer_auth = calc_authenticator( self.is_client, !self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let stream_keys = StreamKeys::from_shared(shared_key); Ok((self_auth, peer_auth, stream_keys)) }, )?; self.self_authenticator = Some(self_auth); self.peer_authenticator = Some(peer_auth); self.peer_pub_key = Some(peer_pub_key); self.stream_keys = Some(stream_keys); Ok(()) } fn authenticator(&self) -> Authenticator { self.self_authenticator .expect("authenticator not initialised") } fn authenticate(&mut self, peer: Authenticator) -> Result<(), EncryptionError> { let valid
lt<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_write_keys(self.is_client); let nonce = self.next_nouce()?.to_be_bytes(); let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.encrypt(data); let sig = calc_signature(head, &nonce[..], &mac_key[..], data); args.put_slice(&sig[..]); args.put_slice(&nonce[..]); Ok(()) } fn decrypt( &mut self, head: &SessionHeader, args: &[u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_read_keys(self.is_client); let sig = [args[0], args[1], args[2], args[3], args[4], args[5]]; let nonce = [args[6], args[7]]; if calc_signature(head, &nonce[..], &mac_key[..], data) != sig { return Err(EncryptionError::Signature); } let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.decrypt(data); Ok(()) } } #[derive(Debug)] pub struct StandardEncryptionAcceptor { preshared_key: Option<SecStr>, } impl StandardEncryptionAcceptor { pub fn new(preshared_key: Option<SecStr>) -> Self { Self { preshared_key } } } impl EncryptionAcceptor for StandardEncryptionAcceptor { type Encryption = StandardEncryption; fn accept(&mut self, client: PublicKey) -> Result<Self::Encryption, EncryptionError> { let psk = self.preshared_key.clone(); StandardEncryption::new_with_ephemeral(false, psk).and_then(|mut encryption| { encryption.handshake(client)?; Ok(encryption) }) } } #[derive(Debug)] struct StreamKeys { client_mac: EncryptionMac, server_mac: EncryptionMac, client_write: EncryptionKey, server_write: EncryptionKey, } impl StreamKeys { fn get_write_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { if is_client { (self.client_write, self.client_mac) } else { (self.server_write, self.server_mac) } } fn get_read_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { self.get_write_keys(!is_client) } fn from_shared(key: &[u8]) -> Self { let mut hash = Sha3_256::new(); hash.update(key); hash.update("client_write_key"); let client_write = hash.finalize_reset(); hash.update(key); hash.update("client_mac_key"); let client_mac = hash.finalize_reset(); hash.update(key); hash.update("server_write_key"); let server_write = hash.finalize_reset(); hash.update(key); hash.update("server_mac_key"); let server_mac = hash.finalize(); Self { client_write, server_write, client_mac, server_mac, } } } fn calc_nonce(nonce: [u8; 2]) -> EncryptionNonce { let mut nonce_array = [0u8; 8]; nonce_array[6] = nonce[0]; nonce_array[7] = nonce[1]; nonce_array.into() } fn calc_authenticator( is_client: bool, for_client: bool, pubkey_self: &[u8], pubkey_peer: &[u8], shared_key: &[u8], preshared_key: Option<&[u8]>, ) -> Authenticator { let mut hash = Sha3_256::new(); if for_client { hash.update("client"); } else { hash.update("server"); } hash.update(shared_key); if is_client { hash.update(pubkey_self); hash.update(pubkey_peer); } else { hash.update(pubkey_peer); hash.update(pubkey_self); } if let Some(preshared_key) = preshared_key { hash.update(preshared_key); } hash.finalize() } fn calc_signature( head: &SessionHeader, nonce: &[u8], mac_key: &[u8], ciphertext: &[u8], ) -> [u8; 6] { let mut head_bytes = [0u8; SessionHeader::len()]; head.encode(&mut &mut head_bytes[..]); let mut hash = Sha3_256::new(); hash.update(mac_key); hash.update(&head_bytes[..]); hash.update(nonce); hash.update(ciphertext); let res = hash.finalize(); [res[0], res[1], res[2], res[3], res[4], res[5]] } #[cfg(test)] mod tests { use super::*; use crate::packet::PacketKind; #[test] fn test_basic() { let mut client = StandardEncryption::new_with_ephemeral(true, None).expect("client enc"); let mut server = StandardEncryption::new_with_ephemeral(false, None).expect("server enc"); server .handshake(client.public_key()) .expect("client to server handshake"); client .handshake(server.public_key()) .expect("server to client handshake"); server .authenticate(client.authenticator()) .expect("client to server auth"); client .authenticate(server.authenticator()) .expect("server to client auth"); let header = SessionHeader::new(1, PacketKind::SYN, 2); let mut args = [0u8; 8]; let mut data = [1, 2, 3, 5]; client .encrypt(&header, &mut args[..], &mut data[..]) .expect("encrypt"); assert_ne!(data, [1, 2, 3, 5]); server .decrypt(&header, &mut args[..], &mut data[..]) .expect("decrypt"); assert_eq!(data, [1, 2, 3, 5]); } }
= self .peer_authenticator .expect("peer authenticator consumed or not init"); if constant_time_eq(&valid[..], &peer[..]) { Ok(()) } else { Err(EncryptionError::Authentication) } } fn encrypt( &mut self, head: &SessionHeader, mut args: &mut [u8], data: &mut [u8], ) -> Resu
random
[ { "content": "pub fn decode_iter<I>(iter: I, skip_ignored: bool) -> impl Iterator<Item = Result<u8, DecodeError>>\n\nwhere\n\n I: Iterator<Item = u8>,\n\n{\n\n let mut iter = iter\n\n .fuse()\n\n .map(decode_nibble)\n\n .filter_map(move |res| match res {\n\n NibbleResult::I...
Rust
src/dnsimple/domains_signer_records.rs
dnsimple/dnsimple-rust
4f972d5e89b1cf76baea5b4df604359a39acfe5f
use crate::dnsimple::domains::Domains; use crate::dnsimple::{DNSimpleEmptyResponse, DNSimpleResponse, Endpoint, RequestOptions}; use serde::{Deserialize, Serialize}; #[derive(Debug, Deserialize, Serialize)] pub struct DelegationSignerRecord { pub id: u64, pub domain_id: u64, pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, pub created_at: String, pub updated_at: String, } struct ListSignerRecordsEndpoint; impl Endpoint for ListSignerRecordsEndpoint { type Output = Vec<DelegationSignerRecord>; } #[derive(Debug, Serialize)] pub struct DelegationSignerRecordPayload { pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, } struct SignerRecordEndpoint; impl Endpoint for SignerRecordEndpoint { type Output = DelegationSignerRecord; } impl Domains<'_> { pub fn list_delegation_signer_records( &self, account_id: u64, domain: &str, options: Option<RequestOptions>, ) -> Result<DNSimpleResponse<Vec<DelegationSignerRecord>>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .get::<ListSignerRecordsEndpoint>(&*path, options) } pub fn create_delegation_signer_record( &self, account_id: u64, domain: &str, payload: DelegationSignerRecordPayload, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .post::<SignerRecordEndpoint>(&*path, serde_json::to_value(payload).unwrap()) } pub fn get_delegation_signer_record( &self, account_id: u64, domain: &str, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client.get::<SignerRecordEndpoint>(&*path, None) } pub fn delete_delegation_signer_record( &self, account_id: u64, domain: &str, delegation_signer_record_id: i32, ) -> DNSimpleEmptyResponse { let path = format!( "/{}/domains/{}/ds_records/{}", account_id, domain, delegation_signer_record_id ); self.client.delete(&*path) } }
use crate::dnsimple::domains::Domains; use crate::dnsimple::{DNSimpleEmptyResponse, DNSimpleResponse, Endpoint, RequestOptions}; use serde::{Deserialize, Serialize}; #[derive(Debug, Deserialize, Serialize)] pub struct DelegationSignerRecord { pub id: u64, pub domain_id: u64, pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, pub created_at: String, pub updated_at: String, } struct ListSignerRecordsEndpoint; impl Endpoint for ListSignerRecordsEndpoint { type Output = Vec<DelegationSignerRecord>; } #[derive(Debug, Serialize)] pub struct DelegationSignerRecordPayload { pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, } struct SignerRecordEndpoint; impl Endpoint for SignerRecordEndpoint { type Output = DelegationSignerRecord; } impl Domains<'_> { pub fn list_delegation_signer_records( &self, account_id: u64, domain: &str, options: Option<RequestOptions>, ) -> Result<DNSimpleResponse<Vec<DelegationSignerRecord>>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .get::<ListSignerRecordsEndpoint>(&*path, options) } pub fn create_delegation_signer_record( &sel
pub fn get_delegation_signer_record( &self, account_id: u64, domain: &str, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client.get::<SignerRecordEndpoint>(&*path, None) } pub fn delete_delegation_signer_record( &self, account_id: u64, domain: &str, delegation_signer_record_id: i32, ) -> DNSimpleEmptyResponse { let path = format!( "/{}/domains/{}/ds_records/{}", account_id, domain, delegation_signer_record_id ); self.client.delete(&*path) } }
f, account_id: u64, domain: &str, payload: DelegationSignerRecordPayload, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .post::<SignerRecordEndpoint>(&*path, serde_json::to_value(payload).unwrap()) }
function_block-function_prefixed
[ { "content": "/// Creates a mockserver and a client (changing the url of the client\n\n/// to that of the mockserver to capture the requests).\n\n///\n\n/// It builds a response struct for the mock server using the fixture.\n\n///\n\n/// # Arguments\n\n///\n\n/// `fixture`: the path to the fixture inside the `a...
Rust
src/lib.rs
dnaeon/rust-libzmq
6925d29555743e714864378c08eaeffaeed5e8f1
extern crate libc; const ZMQ_HAUSNUMERO: usize = 156384712; pub const EFSM: usize = ZMQ_HAUSNUMERO + 51; pub const ENOCOMPATPROTO: usize = ZMQ_HAUSNUMERO + 52; pub const ETERM: usize = ZMQ_HAUSNUMERO + 53; pub const EMTHREAD: usize = ZMQ_HAUSNUMERO + 54; pub const ZMQ_IO_THREADS: usize = 1; pub const ZMQ_MAX_SOCKETS: usize = 2; pub const ZMQ_SOCKET_LIMIT: usize = 3; pub const ZMQ_THREAD_PRIORITY: usize = 3; pub const ZMQ_THREAD_SCHED_POLICY: usize = 4; pub const ZMQ_IO_THREADS_DFLT: usize = 1; pub const ZMQ_MAX_SOCKETS_DFLT: usize = 1023; pub const ZMQ_THREAD_PRIORITY_DFLT: isize = -1; pub const ZMQ_THREAD_SCHED_POLICY_DFLT: isize = -1; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_msg_t { pub _m: [::libc::c_uchar; 64usize], } impl Clone for Struct_zmq_msg_t { fn clone(&self) -> Self { *self } } #[allow(non_camel_case_types)] pub type zmq_msg_t = Struct_zmq_msg_t; #[allow(non_camel_case_types)] pub type zmq_free_fn = extern "C" fn(data: *mut ::libc::c_void, hint: *mut ::libc::c_void); pub const ZMQ_PAIR: usize = 0; pub const ZMQ_PUB: usize = 1; pub const ZMQ_SUB: usize = 2; pub const ZMQ_REQ: usize = 3; pub const ZMQ_REP: usize = 4; pub const ZMQ_DEALER: usize = 5; pub const ZMQ_ROUTER: usize = 6; pub const ZMQ_PULL: usize = 7; pub const ZMQ_PUSH: usize = 8; pub const ZMQ_XPUB: usize = 9; pub const ZMQ_XSUB: usize = 10; pub const ZMQ_STREAM: usize = 11; pub const ZMQ_SERVER: usize = 12; pub const ZMQ_CLIENT: usize = 13; pub const ZMQ_XREQ: usize = ZMQ_DEALER; pub const ZMQ_XREP: usize = ZMQ_ROUTER; pub const ZMQ_AFFINITY: usize = 4; pub const ZMQ_IDENTITY: usize = 5; pub const ZMQ_SUBSCRIBE: usize = 6; pub const ZMQ_UNSUBSCRIBE: usize = 7; pub const ZMQ_RATE: usize = 8; pub const ZMQ_RECOVERY_IVL: usize = 9; pub const ZMQ_SNDBUF: usize = 11; pub const ZMQ_RCVBUF: usize = 12; pub const ZMQ_RCVMORE: usize = 13; pub const ZMQ_FD: usize = 14; pub const ZMQ_EVENTS: usize = 15; pub const ZMQ_TYPE: usize = 16; pub const ZMQ_LINGER: usize = 17; pub const ZMQ_RECONNECT_IVL: usize = 18; pub const ZMQ_BACKLOG: usize = 19; pub const ZMQ_RECONNECT_IVL_MAX: usize = 21; pub const ZMQ_MAXMSGSIZE: usize = 22; pub const ZMQ_SNDHWM: usize = 23; pub const ZMQ_RCVHWM: usize = 24; pub const ZMQ_MULTICAST_HOPS: usize = 25; pub const ZMQ_RCVTIMEO: usize = 27; pub const ZMQ_SNDTIMEO: usize = 28; pub const ZMQ_LAST_ENDPOINT: usize = 32; pub const ZMQ_ROUTER_MANDATORY: usize = 33; pub const ZMQ_TCP_KEEPALIVE: usize = 34; pub const ZMQ_TCP_KEEPALIVE_CNT: usize = 35; pub const ZMQ_TCP_KEEPALIVE_IDLE: usize = 36; pub const ZMQ_TCP_KEEPALIVE_INTVL: usize = 37; pub const ZMQ_IMMEDIATE: usize = 39; pub const ZMQ_XPUB_VERBOSE: usize = 40; pub const ZMQ_ROUTER_RAW: usize = 41; pub const ZMQ_IPV6: usize = 42; pub const ZMQ_MECHANISM: usize = 43; pub const ZMQ_PLAIN_SERVER: usize = 44; pub const ZMQ_PLAIN_USERNAME: usize = 45; pub const ZMQ_PLAIN_PASSWORD: usize = 46; pub const ZMQ_CURVE_SERVER: usize = 47; pub const ZMQ_CURVE_PUBLICKEY: usize = 48; pub const ZMQ_CURVE_SECRETKEY: usize = 49; pub const ZMQ_CURVE_SERVERKEY: usize = 50; pub const ZMQ_PROBE_ROUTER: usize = 51; pub const ZMQ_REQ_CORRELATE: usize = 52; pub const ZMQ_REQ_RELAXED: usize = 53; pub const ZMQ_CONFLATE: usize = 54; pub const ZMQ_ZAP_DOMAIN: usize = 55; pub const ZMQ_ROUTER_HANDOVER: usize = 56; pub const ZMQ_TOS: usize = 57; pub const ZMQ_CONNECT_RID: usize = 61; pub const ZMQ_GSSAPI_SERVER: usize = 62; pub const ZMQ_GSSAPI_PRINCIPAL: usize = 63; pub const ZMQ_GSSAPI_SERVICE_PRINCIPAL: usize = 64; pub const ZMQ_GSSAPI_PLAINTEXT: usize = 65; pub const ZMQ_HANDSHAKE_IVL: usize = 66; pub const ZMQ_SOCKS_PROXY: usize = 68; pub const ZMQ_XPUB_NODROP: usize = 69; pub const ZMQ_BLOCKY: usize = 70; pub const ZMQ_XPUB_MANUAL: usize = 71; pub const ZMQ_XPUB_WELCOME_MSG: usize = 72; pub const ZMQ_STREAM_NOTIFY: usize = 73; pub const ZMQ_INVERT_MATCHING: usize = 74; pub const ZMQ_HEARTBEAT_IVL: usize = 75; pub const ZMQ_HEARTBEAT_TTL: usize = 76; pub const ZMQ_HEARTBEAT_TIMEOUT: usize = 77; pub const ZMQ_MORE: usize = 1; pub const ZMQ_SRCFD: usize = 2; pub const ZMQ_SHARED: usize = 3; pub const ZMQ_DONTWAIT: usize = 1; pub const ZMQ_SNDMORE: usize = 2; pub const ZMQ_NULL: usize = 0; pub const ZMQ_PLAIN: usize = 1; pub const ZMQ_CURVE: usize = 2; pub const ZMQ_GSSAPI: usize = 3; pub const ZMQ_TCP_ACCEPT_FILTER: usize = 38; pub const ZMQ_IPC_FILTER_PID: usize = 58; pub const ZMQ_IPC_FILTER_UID: usize = 59; pub const ZMQ_IPC_FILTER_GID: usize = 60; pub const ZMQ_IPV4ONLY: usize = 31; pub const ZMQ_DELAY_ATTACH_ON_CONNECT: usize = ZMQ_IMMEDIATE; pub const ZMQ_NOBLOCK: usize = ZMQ_DONTWAIT; pub const ZMQ_FAIL_UNROUTABLE: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_ROUTER_BEHAVIOR: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_EVENT_CONNECTED: usize = 1; pub const ZMQ_EVENT_CONNECT_DELAYED: usize = 2; pub const ZMQ_EVENT_CONNECT_RETRIED: usize = 4; pub const ZMQ_EVENT_LISTENING: usize = 8; pub const ZMQ_EVENT_BIND_FAILED: usize = 16; pub const ZMQ_EVENT_ACCEPTED: usize = 32; pub const ZMQ_EVENT_ACCEPT_FAILED: usize = 64; pub const ZMQ_EVENT_CLOSED: usize = 128; pub const ZMQ_EVENT_CLOSE_FAILED: usize = 256; pub const ZMQ_EVENT_DISCONNECTED: usize = 512; pub const ZMQ_EVENT_MONITOR_STOPPED: usize = 1024; pub const ZMQ_EVENT_ALL: usize = 65536; pub const ZMQ_POLLIN: usize = 1; pub const ZMQ_POLLOUT: usize = 2; pub const ZMQ_POLLERR: usize = 4; pub const ZMQ_POLLPRI: usize = 8; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_pollitem_t { socket: *mut libc::c_void, fd: libc::c_int, events: libc::c_short, revents: libc::c_short, } impl Clone for Struct_zmq_pollitem_t { fn clone(&self) -> Self { *self } } impl Default for Struct_zmq_pollitem_t { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[allow(non_camel_case_types)] pub type zmq_pollitem_t = Struct_zmq_pollitem_t; pub const ZMQ_POLLITEMS_DFLT: usize = 16; pub const ZMQ_HAS_CAPABILITIES: usize = 1; pub const ZMQ_STREAMER: usize = 1; pub const ZMQ_FORWARDER: usize = 2; pub const ZMQ_QUEUE: usize = 3; #[repr(C)] #[allow(non_camel_case_types)] pub struct iovec; #[allow(non_camel_case_types)] pub type zmq_thread_fn = extern "C" fn(arg: *mut libc::c_void); #[link(name = "zmq")] extern { pub fn zmq_errno(); pub fn zmq_strerror(errnum: &i32) -> *const libc::c_char; pub fn zmq_version(major: &mut i32, minor: &mut i32, patch: &mut i32); pub fn zmq_ctx_new() -> *mut libc::c_void; pub fn zmq_ctx_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_shutdown(ctx_: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_set(context: *mut libc::c_void, option: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_ctx_get(context: *mut libc::c_void, option: libc::c_int) -> libc::c_int; pub fn zmq_init(io_threads: libc::c_int) -> *mut libc::c_void; pub fn zmq_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_destroy(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_init(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_init_size(msg: *mut zmq_msg_t, size: libc::size_t) -> libc::c_int; pub fn zmq_msg_init_data(msg: *mut zmq_msg_t, data: *mut libc::c_void, size: libc::size_t, ffn: *mut zmq_free_fn, hint: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_send(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_recv(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_close(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_move(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_copy(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_data(msg: *mut zmq_msg_t) -> *mut libc::c_void; pub fn zmq_msg_size(msg: *mut zmq_msg_t) -> libc::size_t; pub fn zmq_msg_more(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_get(msg: *mut zmq_msg_t, property: libc::c_int) -> libc::c_int; pub fn zmq_msg_set(msg: *mut zmq_msg_t, property: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_msg_gets(msg: *mut zmq_msg_t, property: *const libc::c_char) -> *const libc::c_char; pub fn zmq_msg_set_routing_id(msg: *mut zmq_msg_t, routing_id: libc::uint32_t) -> libc::c_int; pub fn zmq_msg_get_routing_id(msg: *mut zmq_msg_t) -> libc::uint32_t; pub fn zmq_socket(s: *mut libc::c_void, t: libc::c_int) -> *mut libc::c_void; pub fn zmq_close(s: *mut libc::c_void) -> libc::c_int; pub fn zmq_setsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *const libc::c_void, optvallen: libc::size_t) -> libc::c_int; pub fn zmq_getsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *mut libc::c_void, optvallen: *mut libc::size_t) -> libc::c_int; pub fn zmq_bind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_connect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_unbind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_disconnect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_send(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_send_const(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recv(s: *mut libc::c_void, buf: *mut libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_socket_monitor(s: *mut libc::c_void, addr: *const libc::c_char, events: libc::c_int) -> libc::c_int; pub fn zmq_poll(items: *mut zmq_pollitem_t, nitems: libc::c_int, timeout: libc::c_long) -> libc::c_int; pub fn zmq_proxy(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void) -> libc::c_int; pub fn zmq_proxy_steerable(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void, control: *mut libc::c_void) -> libc::c_int; pub fn zmq_has(capabitility: *const libc::c_char) -> libc::c_int; pub fn zmq_device(t: libc::c_int, frontend: *mut libc::c_void, backend: *mut libc::c_void) -> libc::c_int; pub fn zmq_sendmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recvmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_z85_encode(dest: *mut libc::c_char, data: *const libc::uint8_t, size: libc::size_t) -> *mut libc::c_char; pub fn zmq_z85_decode(dest: *mut libc::uint8_t, string: *const libc::c_char) -> libc::uint8_t; pub fn zmq_curve_keypair(z85_public_key: *mut libc::c_char, z85_secret_key: *mut libc::c_char) -> libc::c_int; pub fn zmq_atomic_counter_new() -> *mut libc::c_void; pub fn zmq_atomic_counter_set(counter: *mut libc::c_void, value: libc::c_int); pub fn zmq_atomic_counter_inc(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_dec(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_value(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_destroy(counter: *mut *mut libc::c_void); pub fn zmq_stopwatch_start() -> *mut libc::c_void; pub fn zmq_stopwatch_stop(watch_: *mut libc::c_void) -> libc::c_ulong; pub fn zmq_sleep(seconds_: libc::c_int); pub fn zmq_threadstart(func: *mut zmq_thread_fn, arg: *mut libc::c_void); pub fn zmq_threadclose(thread: *mut libc::c_void); pub fn zmq_sendiov(s: *mut libc::c_void, iov: *mut iovec, count: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recviov(s: *mut libc::c_void, iov: *mut iovec, count: *mut libc::size_t, flags: libc::c_int) -> libc::c_int; }
extern crate libc; const ZMQ_HAUSNUMERO: usize = 156384712; pub const EFSM: usize = ZMQ_HAUSNUMERO + 51; pub const ENOCOMPATPROTO: usize = ZMQ_HAUSNUMERO + 52; pub const ETERM: usize = ZMQ_HAUSNUMERO + 53; pub const EMTHREAD: usize = ZMQ_HAUSNUMERO + 54; pub const ZMQ_IO_THREADS: usize = 1; pub const ZMQ_MAX_SOCKETS: usize = 2; pub const ZMQ_SOCKET_LIMIT: usize = 3; pub const ZMQ_THREAD_PRIORITY: usize = 3; pub const ZMQ_THREAD_SCHED_POLICY: usize = 4; pub const ZMQ_IO_THREADS_DFLT: usize = 1; pub const ZMQ_MAX_SOCKETS_DFLT: usize = 1023; pub const ZMQ_THREAD_PRIORITY_DFLT: isize = -1; pub const ZMQ_THREAD_SCHED_POLICY_DFLT: isize = -1; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_msg_t { pub _m: [::libc::c_uchar; 64usize], } impl Clone for Struct_zmq_msg_t { fn clone(&self) -> Self { *self } } #[allow(non_camel_case_types)] pub type zmq_msg_t = Struct_zmq_msg_t; #[allow(non_camel_case_types)] pub type zmq_free_fn = extern "C" fn(data: *mut ::libc::c_void, hint: *mut ::libc::c_void); pub const ZMQ_PAIR: usize = 0; pub const ZMQ_PUB: usize = 1; pub const ZMQ_SUB: usize = 2; pub const ZMQ_REQ: usize = 3; pub const ZMQ_REP: usize = 4; pub const ZMQ_DEALER: usize = 5; pub const ZMQ_ROUTER: usize = 6; pub const ZMQ_PULL: usize = 7; pub const ZMQ_PUSH: usize = 8; pub const ZMQ_XPUB: usize = 9; pub const ZMQ_XSUB: usize = 10; pub const ZMQ_STREAM: usize = 11; pub const ZMQ_SERVER: usize = 12; pub const ZMQ_CLIENT: usize = 13; pub const ZMQ_XREQ: usize = ZMQ_DEALER; pub const ZMQ_XREP: usize = ZMQ_ROUTER; pub const ZMQ_AFFINITY: usize = 4; pub const ZMQ_IDENTITY: usize = 5; pub const ZMQ_SUBSCRIBE: usize = 6; pub const ZMQ_UNSUBSCRIBE: usize = 7; pub const ZMQ_RATE: usize = 8; pub const ZMQ_RECOVERY_IVL: usize = 9; pub const ZMQ_SNDBUF: usize = 11; pub const ZMQ_RCVBUF: usize = 12; pub const ZMQ_RCVMORE: usize = 13; pub const ZMQ_FD: usize = 14; pub const ZMQ_EVENTS: usize = 15; pub const ZMQ_TYPE: usize = 16; pub const ZMQ_LINGER: usize = 17; pub const ZMQ_RECONNECT_IVL: usize = 18; pub const ZMQ_BACKLOG: usize = 19; pub const ZMQ_RECONNECT_IVL_MAX: usize = 21; pub const ZMQ_MAXMSGSIZE: usize = 22; pub const ZMQ_SNDHWM: usize = 23; pub const ZMQ_RCVHWM: usize = 24; pub const ZMQ_MULTICAST_HOPS: usize = 25; pub const ZMQ_RCVTIMEO: usize = 27; pub const ZMQ_SNDTIMEO: usize = 28; pub const ZMQ_LAST_ENDPOINT: usize = 32; pub const ZMQ_ROUTER_MANDATORY: usize = 33; pub const ZMQ_TCP_KEEPALIVE: usize = 34; pub const ZMQ_TCP_KEEPALIVE_CNT: usize = 35; pub const ZMQ_TCP_KEEPALIVE_IDLE: usize = 36; pub const ZMQ_TCP_KEEPALIVE_INTVL: usize = 37; pub const ZMQ_IMMEDIATE: usize = 39; pub const ZMQ_XPUB_VERBOSE: usize = 40; pub const ZMQ_ROUTER_RAW: usize = 41; pub const ZMQ_IPV6: usize = 42; pub const ZMQ_MECHANISM: usize = 43; pub const ZMQ_PLAIN_SERVER: usize = 44; pub const ZMQ_PLAIN_USERNAME: usize = 45; pub const ZMQ_PLAIN_PASSWORD: usize = 46; pub const ZMQ_CURVE_SERVER: usize = 47; pub const ZMQ_CURVE_PUBLICKEY: usize = 48; pub const ZMQ_CURVE_SECRETKEY: usize = 49; pub const ZMQ_CURVE_SERVERKEY: usize = 50; pub const ZMQ_PROBE_ROUTER: usize = 51; pub const ZMQ_REQ_CORRELATE: usize = 52; pub const ZMQ_REQ_RELAXED: usize = 53; pub const ZMQ_CONFLATE: usize = 54; pub const ZMQ_ZAP_DOMAIN: usize = 55; pub const ZMQ_ROUTER_HANDOVER: usize = 56; pub const ZMQ_TOS: usize = 57; pub const ZMQ_CONNECT_RID: usize = 61; pub const ZMQ_GSSAPI_SERVER: usize = 62; pub const ZMQ_GSSAPI_PRINCIPAL: usize = 63; pub const ZMQ_GSSAPI_SERVICE_PRINCIPAL: usize = 64; pub const ZMQ_GSSAPI_PLAINTEXT: usize = 65; pub const ZMQ_HANDSHAKE_IVL: usize = 66; pub const ZMQ_SOCKS_PROXY: usize = 68; pub const ZMQ_XPUB_NODROP: usize = 69; pub const ZMQ_BLOCKY: usize = 70; pub const ZMQ_XPUB_MANUAL: usize = 71; pub const ZMQ_XPUB_WELCOME_MSG: usize = 72; pub const ZMQ_STREAM_NOTIFY: usize = 73; pub const ZMQ_INVERT_MATCHING: usize = 74; pub const ZMQ_HEARTBEAT_IVL: usize = 75; pub const ZMQ_HEARTBEAT_TTL: usize = 76; pub const ZMQ_HEARTBEAT_TIMEOUT: usize = 77; pub const ZMQ_MORE: usize = 1; pub const ZMQ_SRCFD: usize = 2; pub const ZMQ_SHARED: usize = 3; pub const ZMQ_DONTWAIT: usize = 1; pub const ZMQ_SNDMORE: usize = 2; pub const ZMQ_NULL: usize = 0; pub const ZMQ_PLAIN: usize = 1; pub const ZMQ_CURVE: usize = 2; pub const ZMQ_GSSAPI: usize = 3; pub const ZMQ_TCP_ACCEPT_FILTER: usize = 38; pub const ZMQ_IPC_FILTER_PID: usize = 58; pub const ZMQ_IPC_FILTER_UID: usize = 59; pub const ZMQ_IPC_FILTER_GID: usize = 60; pub const ZMQ_IPV4ONLY: usize = 31; pub const ZMQ_DELAY_ATTACH_ON_CONNECT: usize = ZMQ_IMMEDIATE; pub const ZMQ_NOBLOCK: usize = ZMQ_DONTWAIT; pub const ZMQ_FAIL_UNROUTABLE: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_ROUTER_BEHAVIOR: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_EVENT_CONNECTED: usize = 1; pub const ZMQ_EVENT_CONNECT_DELAYED: usize = 2; pub const ZMQ_EVENT_CONNECT_RETRIED: usize = 4; pub const ZMQ_EVENT_LISTENING: usize = 8; pub const ZMQ_EVENT_BIND_FAILED: usize = 16; pub const ZMQ_EVENT_ACCEPTED: usize = 32; pub const ZMQ_EVENT_ACCEPT_FAILED: usize = 64; pub const ZMQ_EVENT_CLOSED: usize = 128; pub const ZMQ_EVENT_CLOSE_FAILED: usize = 256; pub const ZMQ_EVENT_DISCONNECTED: usize = 512; pub const ZMQ_EVENT_MONITOR_STOPPED: usize = 1024; pub const ZMQ_EVENT_ALL: usize = 65536; pub const ZMQ_POLLIN: usize = 1; pub const ZMQ_POLLOUT: usize = 2; pub const ZMQ_POLLERR: usize = 4; pub const ZMQ_POLLPRI: usize = 8; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_pollitem_t { socket: *mut libc::c_void, fd: libc::c_int, events: libc::c_short, revents: libc::c_short, } impl Clone for Struct_zmq_pollitem_t { fn clone(&self) -> Self { *self } } impl Default for Struct_zmq_pollitem_t { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[allow(non_camel_case_types)] pub type zmq_pollitem_t = Struct_zmq_pollitem_t; pub const ZMQ_POLLITEMS_DFLT: usize = 16; pub const ZMQ_HAS_CAPABILITIES: usize = 1; pub const ZMQ_STREAMER: usize = 1; pub const ZMQ_FORWARDER: usize = 2; pub const ZMQ_QUEUE: usize = 3; #[repr(C)] #[allow(non_camel_case_types)] pub struct iovec; #[allow(non_camel_case_types)] pub type zmq_thread_fn = extern "C" fn(arg: *mut libc::c_void); #[link(name = "zmq")] extern {
ibc::c_void, t: libc::c_int) -> *mut libc::c_void; pub fn zmq_close(s: *mut libc::c_void) -> libc::c_int; pub fn zmq_setsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *const libc::c_void, optvallen: libc::size_t) -> libc::c_int; pub fn zmq_getsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *mut libc::c_void, optvallen: *mut libc::size_t) -> libc::c_int; pub fn zmq_bind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_connect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_unbind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_disconnect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_send(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_send_const(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recv(s: *mut libc::c_void, buf: *mut libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_socket_monitor(s: *mut libc::c_void, addr: *const libc::c_char, events: libc::c_int) -> libc::c_int; pub fn zmq_poll(items: *mut zmq_pollitem_t, nitems: libc::c_int, timeout: libc::c_long) -> libc::c_int; pub fn zmq_proxy(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void) -> libc::c_int; pub fn zmq_proxy_steerable(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void, control: *mut libc::c_void) -> libc::c_int; pub fn zmq_has(capabitility: *const libc::c_char) -> libc::c_int; pub fn zmq_device(t: libc::c_int, frontend: *mut libc::c_void, backend: *mut libc::c_void) -> libc::c_int; pub fn zmq_sendmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recvmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_z85_encode(dest: *mut libc::c_char, data: *const libc::uint8_t, size: libc::size_t) -> *mut libc::c_char; pub fn zmq_z85_decode(dest: *mut libc::uint8_t, string: *const libc::c_char) -> libc::uint8_t; pub fn zmq_curve_keypair(z85_public_key: *mut libc::c_char, z85_secret_key: *mut libc::c_char) -> libc::c_int; pub fn zmq_atomic_counter_new() -> *mut libc::c_void; pub fn zmq_atomic_counter_set(counter: *mut libc::c_void, value: libc::c_int); pub fn zmq_atomic_counter_inc(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_dec(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_value(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_destroy(counter: *mut *mut libc::c_void); pub fn zmq_stopwatch_start() -> *mut libc::c_void; pub fn zmq_stopwatch_stop(watch_: *mut libc::c_void) -> libc::c_ulong; pub fn zmq_sleep(seconds_: libc::c_int); pub fn zmq_threadstart(func: *mut zmq_thread_fn, arg: *mut libc::c_void); pub fn zmq_threadclose(thread: *mut libc::c_void); pub fn zmq_sendiov(s: *mut libc::c_void, iov: *mut iovec, count: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recviov(s: *mut libc::c_void, iov: *mut iovec, count: *mut libc::size_t, flags: libc::c_int) -> libc::c_int; }
pub fn zmq_errno(); pub fn zmq_strerror(errnum: &i32) -> *const libc::c_char; pub fn zmq_version(major: &mut i32, minor: &mut i32, patch: &mut i32); pub fn zmq_ctx_new() -> *mut libc::c_void; pub fn zmq_ctx_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_shutdown(ctx_: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_set(context: *mut libc::c_void, option: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_ctx_get(context: *mut libc::c_void, option: libc::c_int) -> libc::c_int; pub fn zmq_init(io_threads: libc::c_int) -> *mut libc::c_void; pub fn zmq_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_destroy(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_init(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_init_size(msg: *mut zmq_msg_t, size: libc::size_t) -> libc::c_int; pub fn zmq_msg_init_data(msg: *mut zmq_msg_t, data: *mut libc::c_void, size: libc::size_t, ffn: *mut zmq_free_fn, hint: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_send(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_recv(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_close(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_move(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_copy(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_data(msg: *mut zmq_msg_t) -> *mut libc::c_void; pub fn zmq_msg_size(msg: *mut zmq_msg_t) -> libc::size_t; pub fn zmq_msg_more(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_get(msg: *mut zmq_msg_t, property: libc::c_int) -> libc::c_int; pub fn zmq_msg_set(msg: *mut zmq_msg_t, property: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_msg_gets(msg: *mut zmq_msg_t, property: *const libc::c_char) -> *const libc::c_char; pub fn zmq_msg_set_routing_id(msg: *mut zmq_msg_t, routing_id: libc::uint32_t) -> libc::c_int; pub fn zmq_msg_get_routing_id(msg: *mut zmq_msg_t) -> libc::uint32_t; pub fn zmq_socket(s: *mut l
random
[ { "content": "## rust-libzmq\n\n\n\nRust low-level bindings to [libzmq](https://github.com/zeromq/libzmq).\n\n\n\nCurrent version of `rust-libzmq` is built against libzmq version 4.2.0.\n\n\n\nIf you are looking for the high-level Rust ZeroMQ bindings, please\n\ncheck [rust-zmq](https://github.com/erickt/rust-z...
Rust
research/query_service/ir/runtime/src/process/operator/sort/sort.rs
lnfjpt/GraphScope
917146f86d8387302a2e1de6963115e7568bf3ee
use std::cmp::Ordering; use std::convert::{TryFrom, TryInto}; use ir_common::error::ParsePbError; use ir_common::generated::algebra as algebra_pb; use ir_common::generated::algebra::order_by::ordering_pair::Order; use crate::error::FnGenResult; use crate::process::functions::CompareFunction; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::TagKey; use crate::process::record::Record; #[derive(Debug)] struct RecordCompare { tag_key_order: Vec<(TagKey, Order)>, } impl CompareFunction<Record> for RecordCompare { fn compare(&self, left: &Record, right: &Record) -> Ordering { let mut result = Ordering::Equal; for (tag_key, order) in self.tag_key_order.iter() { let left_obj = tag_key.get_arc_entry(left).ok(); let right_obj = tag_key.get_arc_entry(right).ok(); let ordering = left_obj.partial_cmp(&right_obj); if let Some(ordering) = ordering { if Ordering::Equal != ordering { result = { match order { Order::Desc => ordering.reverse(), _ => ordering, } }; break; } } } result } } impl CompareFunctionGen for algebra_pb::OrderBy { fn gen_cmp(self) -> FnGenResult<Box<dyn CompareFunction<Record>>> { let record_compare = RecordCompare::try_from(self)?; debug!("Runtime order operator cmp: {:?}", record_compare); Ok(Box::new(record_compare)) } } impl TryFrom<algebra_pb::OrderBy> for RecordCompare { type Error = ParsePbError; fn try_from(order_pb: algebra_pb::OrderBy) -> Result<Self, Self::Error> { let mut tag_key_order = Vec::with_capacity(order_pb.pairs.len()); for order_pair in order_pb.pairs { let key = order_pair .key .ok_or(ParsePbError::EmptyFieldError("key is empty in order".to_string()))? .try_into()?; let order: Order = unsafe { ::std::mem::transmute(order_pair.order) }; tag_key_order.push((key, order)); } Ok(RecordCompare { tag_key_order }) } } #[cfg(test)] mod tests { use std::collections::HashMap; use dyn_type::Object; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use ir_common::NameOrId; use pegasus::api::{Sink, SortBy}; use pegasus::result::ResultStream; use pegasus::JobConf; use crate::graph::element::{Element, GraphElement, Vertex}; use crate::graph::property::{DefaultDetails, Details, DynDetails}; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::tests::{init_source, init_source_with_tag, to_var_pb, TAG_A}; use crate::process::record::Record; fn sort_test(source: Vec<Record>, sort_opr: pb::OrderBy) -> ResultStream<Record> { let conf = JobConf::new("sort_test"); let result = pegasus::run(conf, || { let source = source.clone(); let sort_opr = sort_opr.clone(); |input, output| { let mut stream = input.input_from(source.into_iter())?; let sort_func = sort_opr.gen_cmp().unwrap(); stream = stream.sort_by(move |a, b| sort_func.compare(a, b))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn sort_simple_ascending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 1, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_simple_descending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_name = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_name.push( element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), ); } } let expected_names = vec![object!("vadas"), object!("marko")]; assert_eq!(result_name, expected_names); } #[test] fn sort_by_multi_property_test() { let map3: HashMap<NameOrId, Object> = vec![("id".into(), object!(3)), ("age".into(), object!(20)), ("name".into(), object!("marko"))] .into_iter() .collect(); let v3 = Vertex::new(1, Some("person".into()), DynDetails::new(DefaultDetails::new(map3))); let mut source = init_source(); source.push(Record::new(v3, None)); let sort_opr = pb::OrderBy { pairs: vec![ pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 1, }, pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.age".to_string())), order: 2, }, ], limit: None, }; let mut result = sort_test(source, sort_opr); let mut result_name_ages = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { let details = element.details().unwrap(); result_name_ages.push(( details .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), details .get_property(&"age".into()) .unwrap() .try_to_owned() .unwrap(), )); } } let expected_name_ages = vec![ (object!("marko"), object!(29)), (object!("marko"), object!(20)), (object!("vadas"), object!(27)), ]; assert_eq!(result_name_ages, expected_name_ages); } #[test] fn sort_by_tag_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), None)), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_tag_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), Some("age".into()))), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } }
use std::cmp::Ordering; use std::convert::{TryFrom, TryInto}; use ir_common::error::ParsePbError; use ir_common::generated::algebra as algebra_pb; use ir_common::generated::algebra::order_by::ordering_pair::Order; use crate::error::FnGenResult; use crate::process::functions::CompareFunction; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::TagKey; use crate::process::record::Record; #[derive(Debug)] struct RecordCompare { tag_key_order: Vec<(TagKey, Order)>, } impl CompareFunction<Record> for RecordCompare { fn compare(&self, left: &Record, right: &Record) -> Ordering { let mut result = Ordering::Equal; for (tag_key, order) in self.tag_key_order.iter() { let left_obj = tag_key.get_arc_entry(left).ok(); let right_obj = tag_key.get_arc_entry(right).ok(); let ordering = left_obj.partial_cmp(&right_obj); if let Some(ordering) = ordering { if Ordering::Equal != ordering { result = { match order { Order::Desc => ordering.reverse(), _ => ordering, } }; break; } } } result } } impl CompareFunctionGen for algebra_pb::OrderBy { fn gen_cmp(self) -> FnGenResult<Box<dyn CompareFunction<Record>>> { let record_compare = RecordCompare::try_from(self)?; debug!("Runtime order operator cmp: {:?}", record_compare); Ok(Box::new(record_compare)) } } impl TryFrom<algebra_pb::OrderBy> for RecordCompare { type Error = ParsePbError; fn try_from(order_pb: algebra_pb::OrderBy) -> Result<Self, Self::Error> { let mut tag_key_order = Vec::with_capacity(order_pb.pairs.len()); for order_pair in order_pb.pairs { let key = order_pair .key .ok_or(ParsePbError::EmptyFieldError("key is empty in order".to_string()))? .try_into()?; let order: Order = unsafe { ::std::mem::transmute(order_pair.order) }; tag_key_order.push((key, order)); } Ok(RecordCompare { tag_key_order }) } } #[cfg(test)] mod tests { use std::collections::HashMap; use dyn_type::Object; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use ir_common::NameOrId; use pegasus::api::{Sink, SortBy}; use pegasus::result::ResultStream; use pegasus::JobConf; use crate::graph::element::{Element, GraphElement, Vertex}; use crate::graph::property::{DefaultDetails, Details, DynDetails}; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::tests::{init_source, init_source_with_tag, to_var_pb, TAG_A}; use crate::process::record::Record; fn sort_test(source: Vec<Record>, sort_opr: pb::OrderBy) -> ResultStream<Record> { let conf = JobConf::new("sort_test"); let result = pegasus::run(conf, || { let source = source.clone(); let sort_opr = sort_opr.clone(); |input, output| { let mut stream = input.input_from(source.into_iter())?; let sort_func = sort_opr.gen_cmp().unwrap(); stream = stream.sort_by(move |a, b| sort_func.compare(a, b))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn sort_simple_ascending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 1, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_simple_descending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() {
#[test] fn sort_by_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_name = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_name.push( element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), ); } } let expected_names = vec![object!("vadas"), object!("marko")]; assert_eq!(result_name, expected_names); } #[test] fn sort_by_multi_property_test() { let map3: HashMap<NameOrId, Object> = vec![("id".into(), object!(3)), ("age".into(), object!(20)), ("name".into(), object!("marko"))] .into_iter() .collect(); let v3 = Vertex::new(1, Some("person".into()), DynDetails::new(DefaultDetails::new(map3))); let mut source = init_source(); source.push(Record::new(v3, None)); let sort_opr = pb::OrderBy { pairs: vec![ pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 1, }, pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.age".to_string())), order: 2, }, ], limit: None, }; let mut result = sort_test(source, sort_opr); let mut result_name_ages = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { let details = element.details().unwrap(); result_name_ages.push(( details .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), details .get_property(&"age".into()) .unwrap() .try_to_owned() .unwrap(), )); } } let expected_name_ages = vec![ (object!("marko"), object!(29)), (object!("marko"), object!(20)), (object!("vadas"), object!(27)), ]; assert_eq!(result_name_ages, expected_name_ages); } #[test] fn sort_by_tag_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), None)), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_tag_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), Some("age".into()))), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } }
if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); }
function_block-function_prefix_line
[ { "content": "fn create_src(id: u32, source: &mut Source<i32>) -> Result<(Stream<i32>, Stream<i32>), BuildJobError> {\n\n let src1 = if id == 0 { source.input_from(1..5)? } else { source.input_from(8..10)? };\n\n let (src1, src2) = src1.copied()?;\n\n let src2 = src2.map(|x| Ok(x + 1))?;\n\n Ok((src...
Rust
src/parser/lexer/mod.rs
mitsuhiko/lol-html
2772fd0e9feb4a820bfc0cb460e9441b0d0c205d
#[macro_use] mod actions; mod conditions; mod lexeme; use crate::base::{Align, Range}; use crate::html::{LocalNameHash, Namespace, TextType}; use crate::parser::state_machine::{ ActionError, ActionResult, FeedbackDirective, StateMachine, StateResult, }; use crate::parser::{ ParserDirective, ParsingAmbiguityError, TreeBuilderFeedback, TreeBuilderSimulator, }; use crate::rewriter::RewritingError; use std::cell::RefCell; use std::rc::Rc; pub use self::lexeme::*; const DEFAULT_ATTR_BUFFER_CAPACITY: usize = 256; pub trait LexemeSink { fn handle_tag(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError>; fn handle_non_tag_content( &mut self, lexeme: &NonTagContentLexeme, ) -> Result<(), RewritingError>; } pub type State<S> = fn(&mut Lexer<S>, &[u8]) -> StateResult; pub type SharedAttributeBuffer = Rc<RefCell<Vec<AttributeOutline>>>; pub struct Lexer<S: LexemeSink> { next_pos: usize, is_last_input: bool, lexeme_start: usize, token_part_start: usize, is_state_enter: bool, cdata_allowed: bool, lexeme_sink: S, state: State<S>, current_tag_token: Option<TagTokenOutline>, current_non_tag_content_token: Option<NonTagContentTokenOutline>, current_attr: Option<AttributeOutline>, last_start_tag_name_hash: LocalNameHash, closing_quote: u8, attr_buffer: SharedAttributeBuffer, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>, last_text_type: TextType, feedback_directive: FeedbackDirective, } impl<S: LexemeSink> Lexer<S> { pub fn new(lexeme_sink: S, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>) -> Self { Lexer { next_pos: 0, is_last_input: false, lexeme_start: 0, token_part_start: 0, is_state_enter: true, cdata_allowed: false, lexeme_sink, state: Lexer::data_state, current_tag_token: None, current_non_tag_content_token: None, current_attr: None, last_start_tag_name_hash: LocalNameHash::default(), closing_quote: b'"', attr_buffer: Rc::new(RefCell::new(Vec::with_capacity( DEFAULT_ATTR_BUFFER_CAPACITY, ))), tree_builder_simulator, last_text_type: TextType::Data, feedback_directive: FeedbackDirective::None, } } fn try_get_tree_builder_feedback( &mut self, token: &TagTokenOutline, ) -> Result<Option<TreeBuilderFeedback>, ParsingAmbiguityError> { Ok(match self.feedback_directive.take() { FeedbackDirective::ApplyUnhandledFeedback(feedback) => Some(feedback), FeedbackDirective::Skip => None, FeedbackDirective::None => Some({ let mut simulator = self.tree_builder_simulator.borrow_mut(); match *token { TagTokenOutline::StartTag { name_hash, .. } => { simulator.get_feedback_for_start_tag(name_hash)? } TagTokenOutline::EndTag { name_hash, .. } => { simulator.get_feedback_for_end_tag(name_hash) } } }), }) } fn handle_tree_builder_feedback(&mut self, feedback: TreeBuilderFeedback, lexeme: &TagLexeme) { match feedback { TreeBuilderFeedback::SwitchTextType(text_type) => self.set_last_text_type(text_type), TreeBuilderFeedback::SetAllowCdata(cdata_allowed) => self.cdata_allowed = cdata_allowed, TreeBuilderFeedback::RequestLexeme(mut callback) => { let feedback = callback(&mut self.tree_builder_simulator.borrow_mut(), lexeme); self.handle_tree_builder_feedback(feedback, lexeme); } TreeBuilderFeedback::None => (), } } #[inline] fn emit_lexeme(&mut self, lexeme: &NonTagContentLexeme) -> ActionResult { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink .handle_non_tag_content(lexeme) .map_err(ActionError::RewritingError) } #[inline] fn emit_tag_lexeme(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError> { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink.handle_tag(lexeme) } #[inline] fn create_lexeme_with_raw<'i, T>( &mut self, input: &'i [u8], token: T, raw_end: usize, ) -> Lexeme<'i, T> { Lexeme::new( input.into(), token, Range { start: self.lexeme_start, end: raw_end, }, ) } #[inline] fn create_lexeme_with_raw_inclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos() + 1; self.create_lexeme_with_raw(input, token, raw_end) } #[inline] fn create_lexeme_with_raw_exclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos(); self.create_lexeme_with_raw(input, token, raw_end) } } impl<S: LexemeSink> StateMachine for Lexer<S> { impl_common_sm_accessors!(); impl_common_input_cursor_methods!(); #[inline] fn set_state(&mut self, state: State<S>) { self.state = state; } #[inline] fn state(&self) -> State<S> { self.state } #[inline] fn get_consumed_byte_count(&self, _input: &[u8]) -> usize { self.lexeme_start } fn adjust_for_next_input(&mut self) { self.token_part_start.align(self.lexeme_start); self.current_tag_token.align(self.lexeme_start); self.current_non_tag_content_token.align(self.lexeme_start); self.current_attr.align(self.lexeme_start); self.lexeme_start = 0; } #[inline] fn adjust_to_bookmark(&mut self, pos: usize, feedback_directive: FeedbackDirective) { self.lexeme_start = pos; self.feedback_directive = feedback_directive; } #[inline] fn enter_ch_sequence_matching(&mut self) { trace!(@noop); } #[inline] fn leave_ch_sequence_matching(&mut self) { trace!(@noop); } }
#[macro_use] mod actions; mod conditions; mod lexeme; use crate::base::{Align, Range}; use crate::html::{LocalNameHash, Namespace, TextType}; use crate::parser::state_machine::{ ActionError, ActionResult, FeedbackDirective, StateMachine, StateResult, }; use crate::parser::{ ParserDirective, ParsingAmbiguityError, TreeBuilderFeedback, TreeBuilderSimulator, }; use crate::rewriter::RewritingError; use std::cell::RefCell; use std::rc::Rc; pub use self::lexeme::*; const DEFAULT_ATTR_BUFFER_CAPACITY: usize = 256; pub trait LexemeSink { fn handle_tag(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError>; fn handle_non_tag_content( &mut self, lexeme: &NonTagContentLexeme, ) -> Result<(), RewritingError>; } pub type State<S> = fn(&mut Lexer<S>, &[u8]) -> StateResult; pub type SharedAttributeBuffer = Rc<RefCell<Vec<AttributeOutline>>>; pub struct Lexer<S: LexemeSink> { next_pos: usize, is_last_input: bool, lexeme_start: usize, token_part_start: usize, is_state_enter: bool, cdata_allowed: bool, lexeme_sink: S, state: State<S>, current_tag_token: Option<TagTokenOutline>, current_non_tag_content_token: Option<NonTagContentTokenOutline>, current_attr: Option<AttributeOutline>, last_start_tag_name_hash: LocalNameHash, closing_quote: u8, attr_buffer: SharedAttributeBuffer, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>, last_text_type: TextType, feedback_directive: FeedbackDirective, } impl<S: LexemeSink> Lexer<S> { pub fn new(lexeme_sink: S, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>) -> Self { Lexer { next_pos: 0, is_last_input: false, lexeme_start: 0, token_part_start: 0, is_state_enter: true, cdata_allowed: false, lexeme_sink, state: Lexer::data_state, current_tag_token: None, current_non_tag_content_token: None, current_attr: None, last_start_tag_name_hash: LocalNameHash::default(), closing_quote: b'"', attr_buffer: Rc::new(RefCell::new(Vec::with_capacity( DEFAULT_ATTR_BUFFER_CAPACITY, ))), tree_builder_simulator, last_text_type: TextType::Data, feedback_directive: FeedbackDirective::None, } } fn try_get_tree_builder_feedback( &mut self, token: &TagTokenOutline, ) -> Result<Option<TreeBuilderFeedback>, ParsingAmbiguityError> { Ok(match self.feedback_directive.take() { FeedbackDirective::ApplyUnhandledFeedback(feedback) => Some(feedback), FeedbackDirective::Skip => None, FeedbackDirective::None => Some({ let mut simulator = self.tree_builder_simulator.borrow_mut(); match *token { TagTokenOutline::StartTag { name_hash, .. } => { simulator.get_feedback_for_start_tag(name_hash)? } TagTokenOutline::EndTag { name_hash, .. } => { simulator.get_feedback_for_end_tag(name_hash) } } }), }) } fn handle_tree_builder_feedback(&mut self, feedback: TreeBuilderFeedback, lexeme: &TagLexeme) { match feedback { TreeBuilderFeedback::SwitchTextType(text_type) => self.set_last_text_type(text_type), TreeBuilderFeedback::SetAllowCdata(cdata_allowed) => self.cdata_allowed = cdata_allowed, TreeBuilderFeedback::RequestLexeme(mut callback) => { let feedback = callback(&mut self.tree_builder_simulator.borrow_mut(), lexeme); self.handle_tree_builder_feedback(feedback, lexeme); } TreeBuilderFeedback::None => (), } } #[inline] fn emit_lexeme(&mut self, lexeme: &NonTagContentLexeme) -> ActionResult { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink .handle_non_tag_content(lexeme) .map_err(ActionError::RewritingError) } #[inline] fn emit_tag_lexeme(&mut self, lexeme: &TagLexem
#[inline] fn create_lexeme_with_raw<'i, T>( &mut self, input: &'i [u8], token: T, raw_end: usize, ) -> Lexeme<'i, T> { Lexeme::new( input.into(), token, Range { start: self.lexeme_start, end: raw_end, }, ) } #[inline] fn create_lexeme_with_raw_inclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos() + 1; self.create_lexeme_with_raw(input, token, raw_end) } #[inline] fn create_lexeme_with_raw_exclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos(); self.create_lexeme_with_raw(input, token, raw_end) } } impl<S: LexemeSink> StateMachine for Lexer<S> { impl_common_sm_accessors!(); impl_common_input_cursor_methods!(); #[inline] fn set_state(&mut self, state: State<S>) { self.state = state; } #[inline] fn state(&self) -> State<S> { self.state } #[inline] fn get_consumed_byte_count(&self, _input: &[u8]) -> usize { self.lexeme_start } fn adjust_for_next_input(&mut self) { self.token_part_start.align(self.lexeme_start); self.current_tag_token.align(self.lexeme_start); self.current_non_tag_content_token.align(self.lexeme_start); self.current_attr.align(self.lexeme_start); self.lexeme_start = 0; } #[inline] fn adjust_to_bookmark(&mut self, pos: usize, feedback_directive: FeedbackDirective) { self.lexeme_start = pos; self.feedback_directive = feedback_directive; } #[inline] fn enter_ch_sequence_matching(&mut self) { trace!(@noop); } #[inline] fn leave_ch_sequence_matching(&mut self) { trace!(@noop); } }
e) -> Result<ParserDirective, RewritingError> { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink.handle_tag(lexeme) }
function_block-function_prefixed
[ { "content": "type CapturerEventHandler<'h> = &'h mut dyn FnMut(TokenCapturerEvent) -> Result<(), RewritingError>;\n\n\n\npub struct TokenCapturer {\n\n encoding: &'static Encoding,\n\n text_decoder: TextDecoder,\n\n capture_flags: TokenCaptureFlags,\n\n}\n\n\n\nimpl TokenCapturer {\n\n pub fn new(c...
Rust
src/main.rs
matthias-t/workspace
2ff2562d55d2d92b007379fe5a283e3bb0d81582
#[macro_use] mod macros; mod app; mod exit; mod shell; mod tilde; mod workspace; use clap::ArgMatches; use colored::Colorize; use failure::Fail; use std::env; use std::fs; use std::io::Write; use std::path; use std::process; use crate::exit::Exit; use crate::tilde::Tilde; use crate::workspace::Workspace; pub static mut VERBOSE: bool = false; fn main() { let matches = app::cli().get_matches(); unsafe { VERBOSE = matches.is_present("verbose"); } if !matches.is_present("shell-wrapper") && matches.subcommand_matches("shell").is_none() { warn!("You are using the workspace binary, which is the backend for the `ws` function."); indent_warn!( "To set `ws` up in your shell, see the README.md or run `workspace shell --help`" ) } match matches.subcommand() { ("open", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); let ws = Workspace::get(name) .unwrap_or_exit(&format!("A workspace called '{}' does not exist", name)) .unwrap_or_else(|error| { let path = Workspace::file_path(name); error!("{} from {}", error, path.tilde_format()); if let Some(cause) = error.cause() { indent_error!("{}", cause); } if let Some(backtrace) = error.backtrace() { log!("{}", backtrace); } process::exit(1) }); if !ws.path.exists() { error!("The location of this workspace does not exist anymore"); indent_error!("the path '{}' was moved or deleted", ws.path.tilde_format()); process::exit(1); } let dir_only = matches.is_present("directory"); ws.open(dir_only); } ("add", Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_string(); if Workspace::exists(&name) { error!("A workspace called '{}' already exists", name); process::exit(1); } let path = env::current_dir().unwrap_or_exit("Could not read current directory"); let sames: Vec<_> = Workspace::all() .into_iter() .filter_map(|(name, result)| { if let (Some(name), Ok(workspace)) = (name, result) { if workspace.path == path { return Some(name); } } None }) .collect(); if !sames.is_empty() { warn!( "Found {} pointing to this directory: {}", if sames.len() == 1 { "another workspace" } else { "other workspaces" }, sames.join(", ") ); confirm!("Create a new workspace here anyway"); } let ws = Workspace { path, commands: workspace::Commands::default(), tabs: Vec::default(), }; ws.write(&name); Workspace::edit(&name); println!("Created workspace '{}' in {}", name, ws.path.tilde_format()); } ("edit", Some(matches)) => { let name = matches.value_of("NAME").unwrap(); if !Workspace::exists(&name) { error!("A workspace called '{}' does not exist", name); process::exit(1); } Workspace::edit(name); } ("rename", Some(matches)) => { let old_name = matches.value_of("OLD_NAME").unwrap(); let new_name = matches.value_of("NEW_NAME").unwrap(); if !Workspace::exists(&old_name) { error!("A workspace called '{}' does not exist", old_name); process::exit(1); } if Workspace::exists(&new_name) { error!( "Cannot rename to '{}' because a workspace with that name already exists", new_name ); process::exit(1) } std::fs::rename( Workspace::file_path(old_name), Workspace::file_path(new_name), ) .unwrap_or_exit("Could not rename config file"); } ("delete", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); if !Workspace::file_path(name).exists() { error!("A workspace called '{}' does not exist", name); process::exit(1); } if !matches.is_present("yes") { confirm!("Delete the workspace '{}'", name); } Workspace::delete(name); println!("Deleted workspace '{}'", name); } ("list", Some(_)) => { let all = Workspace::all(); if all.is_empty() { eprintln!("No workspaces found.\nRun `ws add <NAME>` to create one."); return; } use term_grid::{Direction, Filling, Grid, GridOptions}; let mut grid = Grid::new(GridOptions { filling: Filling::Spaces(2), direction: Direction::LeftToRight, }); for (name, result) in all { let path: String; let mut moved = String::new(); match result { Ok(ws) => { path = ws.path.tilde_format().bright_black().to_string(); if !ws.path.exists() { moved = format!("{} path has moved", "warning:".bold().yellow()); } } Err(error) => { path = format!("{} {}", "warning:".bold().yellow(), error); } } let name = name.unwrap_or_else(|| format!("{} invalid UTF-8", "warning:".bold().yellow())); grid.add(name.into()); grid.add(path.into()); grid.add(moved.into()); } print!("{}", grid.fit_into_columns(3)); } ("shell", Some(matches)) => { if matches.subcommand_matches("bash").is_some() { println!("{}", shell::BASH); } else if matches.subcommand_matches("fish").is_some() { println!("{}", shell::FISH); } else if matches.subcommand_matches("powershell").is_some() { println!("{}", shell::POWERSHELL) } else if let Some(matches) = matches.subcommand_matches("cmd") { let path: path::PathBuf = path_to_binary_or_arg(&matches); let mut file: fs::File = fs::OpenOptions::new() .read(false) .write(true) .create(true) .append(false) .truncate(true) .open(&path) .unwrap_or_exit(&format!( "Could not create batch file at {}", path.tilde_format() )); file.write_fmt(format_args!("{}", shell::CMD)) .unwrap_or_exit("Could not write to batch file"); println!("Wrote {}", path.tilde_format()); } } _ => {} } } fn path_to_binary_or_arg(matches: &ArgMatches) -> path::PathBuf { if let Some(path) = matches.value_of("PATH") { return path::Path::new(path) .with_file_name("ws") .with_extension("bat") .to_path_buf(); } else { let mut path = env::current_exe().unwrap_or_exit("Could not determine path to binary"); path.set_file_name("ws"); path.set_extension("bat"); return path; } }
#[macro_use] mod macros; mod app; mod exit; mod shell; mod tilde; mod workspace; use clap::ArgMatches; use colored::Colorize; use failure::Fail; use std::env; use std::fs; use std::io::Write; use std::path; use std::process; use crate::exit::Exit; use crate::tilde::Tilde; use crate::workspace::Workspace; pub static mut VERBOSE: bool = false;
fn path_to_binary_or_arg(matches: &ArgMatches) -> path::PathBuf { if let Some(path) = matches.value_of("PATH") { return path::Path::new(path) .with_file_name("ws") .with_extension("bat") .to_path_buf(); } else { let mut path = env::current_exe().unwrap_or_exit("Could not determine path to binary"); path.set_file_name("ws"); path.set_extension("bat"); return path; } }
fn main() { let matches = app::cli().get_matches(); unsafe { VERBOSE = matches.is_present("verbose"); } if !matches.is_present("shell-wrapper") && matches.subcommand_matches("shell").is_none() { warn!("You are using the workspace binary, which is the backend for the `ws` function."); indent_warn!( "To set `ws` up in your shell, see the README.md or run `workspace shell --help`" ) } match matches.subcommand() { ("open", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); let ws = Workspace::get(name) .unwrap_or_exit(&format!("A workspace called '{}' does not exist", name)) .unwrap_or_else(|error| { let path = Workspace::file_path(name); error!("{} from {}", error, path.tilde_format()); if let Some(cause) = error.cause() { indent_error!("{}", cause); } if let Some(backtrace) = error.backtrace() { log!("{}", backtrace); } process::exit(1) }); if !ws.path.exists() { error!("The location of this workspace does not exist anymore"); indent_error!("the path '{}' was moved or deleted", ws.path.tilde_format()); process::exit(1); } let dir_only = matches.is_present("directory"); ws.open(dir_only); } ("add", Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_string(); if Workspace::exists(&name) { error!("A workspace called '{}' already exists", name); process::exit(1); } let path = env::current_dir().unwrap_or_exit("Could not read current directory"); let sames: Vec<_> = Workspace::all() .into_iter() .filter_map(|(name, result)| { if let (Some(name), Ok(workspace)) = (name, result) { if workspace.path == path { return Some(name); } } None }) .collect(); if !sames.is_empty() { warn!( "Found {} pointing to this directory: {}", if sames.len() == 1 { "another workspace" } else { "other workspaces" }, sames.join(", ") ); confirm!("Create a new workspace here anyway"); } let ws = Workspace { path, commands: workspace::Commands::default(), tabs: Vec::default(), }; ws.write(&name); Workspace::edit(&name); println!("Created workspace '{}' in {}", name, ws.path.tilde_format()); } ("edit", Some(matches)) => { let name = matches.value_of("NAME").unwrap(); if !Workspace::exists(&name) { error!("A workspace called '{}' does not exist", name); process::exit(1); } Workspace::edit(name); } ("rename", Some(matches)) => { let old_name = matches.value_of("OLD_NAME").unwrap(); let new_name = matches.value_of("NEW_NAME").unwrap(); if !Workspace::exists(&old_name) { error!("A workspace called '{}' does not exist", old_name); process::exit(1); } if Workspace::exists(&new_name) { error!( "Cannot rename to '{}' because a workspace with that name already exists", new_name ); process::exit(1) } std::fs::rename( Workspace::file_path(old_name), Workspace::file_path(new_name), ) .unwrap_or_exit("Could not rename config file"); } ("delete", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); if !Workspace::file_path(name).exists() { error!("A workspace called '{}' does not exist", name); process::exit(1); } if !matches.is_present("yes") { confirm!("Delete the workspace '{}'", name); } Workspace::delete(name); println!("Deleted workspace '{}'", name); } ("list", Some(_)) => { let all = Workspace::all(); if all.is_empty() { eprintln!("No workspaces found.\nRun `ws add <NAME>` to create one."); return; } use term_grid::{Direction, Filling, Grid, GridOptions}; let mut grid = Grid::new(GridOptions { filling: Filling::Spaces(2), direction: Direction::LeftToRight, }); for (name, result) in all { let path: String; let mut moved = String::new(); match result { Ok(ws) => { path = ws.path.tilde_format().bright_black().to_string(); if !ws.path.exists() { moved = format!("{} path has moved", "warning:".bold().yellow()); } } Err(error) => { path = format!("{} {}", "warning:".bold().yellow(), error); } } let name = name.unwrap_or_else(|| format!("{} invalid UTF-8", "warning:".bold().yellow())); grid.add(name.into()); grid.add(path.into()); grid.add(moved.into()); } print!("{}", grid.fit_into_columns(3)); } ("shell", Some(matches)) => { if matches.subcommand_matches("bash").is_some() { println!("{}", shell::BASH); } else if matches.subcommand_matches("fish").is_some() { println!("{}", shell::FISH); } else if matches.subcommand_matches("powershell").is_some() { println!("{}", shell::POWERSHELL) } else if let Some(matches) = matches.subcommand_matches("cmd") { let path: path::PathBuf = path_to_binary_or_arg(&matches); let mut file: fs::File = fs::OpenOptions::new() .read(false) .write(true) .create(true) .append(false) .truncate(true) .open(&path) .unwrap_or_exit(&format!( "Could not create batch file at {}", path.tilde_format() )); file.write_fmt(format_args!("{}", shell::CMD)) .unwrap_or_exit("Could not write to batch file"); println!("Wrote {}", path.tilde_format()); } } _ => {} } }
function_block-full_function
[ { "content": "pub fn cli() -> App<'static, 'static> {\n\n App::new(\"workspace\")\n\n .version(crate_version!())\n\n .about(\"A command-line project manager\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .global_setting(AppSettings::ColoredHelp)\n\n .global_se...
Rust
cli/src/forge.rs
Genysys/foundry
0527eb95ce9c17101b434a0626ba3861836c0a9e
use ethers::{ providers::Provider, solc::{remappings::Remapping, ArtifactOutput, Project}, }; use evm_adapters::{ sputnik::{vicinity, ForkMemoryBackend, PRECOMPILES_MAP}, FAUCET_ACCOUNT, }; use regex::Regex; use sputnik::backend::Backend; use structopt::StructOpt; use forge::MultiContractRunnerBuilder; use ansi_term::Colour; use ethers::types::U256; mod forge_opts; use forge_opts::{EvmType, Opts, Subcommands}; use crate::forge_opts::{Dependency, FullContractInfo}; use std::{collections::HashMap, convert::TryFrom, process::Command, str::FromStr, sync::Arc}; mod cmd; mod utils; #[tracing::instrument(err)] fn main() -> eyre::Result<()> { utils::subscriber(); let opts = Opts::from_args(); match opts.sub { Subcommands::Test { opts, env, json, pattern, evm_type, fork_url, fork_block_number, initial_balance, sender, ffi, verbosity, allow_failure, } => { let cfg = proptest::test_runner::Config { failure_persistence: None, ..Default::default() }; let fuzzer = proptest::test_runner::TestRunner::new(cfg); let project = Project::try_from(&opts)?; let builder = MultiContractRunnerBuilder::default() .fuzzer(fuzzer) .initial_balance(initial_balance) .sender(sender); match evm_type { #[cfg(feature = "sputnik-evm")] EvmType::Sputnik => { use evm_adapters::sputnik::Executor; use sputnik::backend::MemoryBackend; let mut cfg = opts.evm_version.sputnik_cfg(); cfg.create_contract_limit = None; let vicinity = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(vicinity(&provider, fork_block_number))? } else { env.sputnik_state() }; let mut backend = MemoryBackend::new(&vicinity, Default::default()); let faucet = backend.state_mut().entry(*FAUCET_ACCOUNT).or_insert_with(Default::default); faucet.balance = U256::MAX; let backend: Box<dyn Backend> = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let init_state = backend.state().clone(); let backend = ForkMemoryBackend::new( provider, backend, fork_block_number, init_state, ); Box::new(backend) } else { Box::new(backend) }; let backend = Arc::new(backend); let precompiles = PRECOMPILES_MAP.clone(); let evm = Executor::new_with_cheatcodes( backend, env.gas_limit, &cfg, &precompiles, ffi, ); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } #[cfg(feature = "evmodin-evm")] EvmType::EvmOdin => { use evm_adapters::evmodin::EvmOdin; use evmodin::tracing::NoopTracer; let revision = opts.evm_version.evmodin_cfg(); let host = env.evmodin_state(); let evm = EvmOdin::new(host, env.gas_limit, revision, NoopTracer); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } } } Subcommands::Build { opts } => { let project = Project::try_from(&opts)?; let output = project.compile()?; if output.has_compiler_errors() { eyre::bail!(output.to_string()) } else if output.is_unchanged() { println!("no files changed, compilation skippped."); } else { println!("success."); } } Subcommands::VerifyContract { contract, address, constructor_args } => { let FullContractInfo { path, name } = contract; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(cmd::verify::run(path, name, address, constructor_args))?; } Subcommands::Create { contract: _, verify: _ } => { unimplemented!("Not yet implemented") } Subcommands::Update { lib } => { let repo = git2::Repository::open(".")?; if let Some(lib) = lib { println!("Updating submodule {:?}", lib); repo.find_submodule( &lib.into_os_string().into_string().expect("invalid submodule path"), )? .update(true, None)?; } else { Command::new("git") .args(&["submodule", "update", "--init", "--recursive"]) .spawn()? .wait()?; } } Subcommands::Install { dependencies } => { install(std::env::current_dir()?, dependencies)?; } Subcommands::Remappings { lib_paths, root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); let root = std::fs::canonicalize(root)?; let lib_paths = if lib_paths.is_empty() { vec![root.join("lib")] } else { lib_paths }; let remappings: Vec<_> = lib_paths .iter() .map(|path| Remapping::find_many(&path).unwrap()) .flatten() .collect(); remappings.iter().for_each(|x| println!("{}", x)); } Subcommands::Init { root, template } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); if !root.exists() { std::fs::create_dir_all(&root)?; } let root = std::fs::canonicalize(root)?; if let Some(ref template) = template { println!("Initializing {} from {}...", root.display(), template); Command::new("git") .args(&["clone", template, &root.display().to_string()]) .spawn()? .wait()?; } else { println!("Initializing {}...", root.display()); let src = root.join("src"); let test = src.join("test"); std::fs::create_dir_all(&test)?; let lib = root.join("lib"); std::fs::create_dir(&lib)?; let contract_path = src.join("Contract.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.sol"))?; let contract_path = test.join("Contract.t.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.t.sol"))?; Command::new("git").arg("init").current_dir(&root).spawn()?.wait()?; Command::new("git").args(&["add", "."]).current_dir(&root).spawn()?.wait()?; Command::new("git") .args(&["commit", "-m", "chore: forge init"]) .current_dir(&root) .spawn()? .wait()?; Dependency::from_str("https://github.com/dapphub/ds-test") .and_then(|dependency| install(root, vec![dependency]))?; } println!("Done."); } Subcommands::Completions { shell } => { Subcommands::clap().gen_completions_to("forge", shell, &mut std::io::stdout()) } Subcommands::Clean { root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); utils::cleanup(root)?; } } Ok(()) } fn test<A: ArtifactOutput + 'static, S: Clone, E: evm_adapters::Evm<S>>( builder: MultiContractRunnerBuilder, project: Project<A>, evm: E, pattern: Regex, json: bool, verbosity: u8, allow_failure: bool, ) -> eyre::Result<HashMap<String, HashMap<String, forge::TestResult>>> { let mut runner = builder.build(project, evm)?; let mut exit_code = 0; let results = runner.test(pattern)?; if json { let res = serde_json::to_string(&results)?; println!("{}", res); } else { for (i, (contract_name, tests)) in results.iter().enumerate() { if i > 0 { println!() } if !tests.is_empty() { let term = if tests.len() > 1 { "tests" } else { "test" }; println!("Running {} {} for {}", tests.len(), term, contract_name); } for (name, result) in tests { let status = if result.success { Colour::Green.paint("[PASS]") } else { exit_code = -1; let txt = match (&result.reason, &result.counterexample) { (Some(ref reason), Some(ref counterexample)) => { format!( "[FAIL. Reason: {}. Counterexample: {}]", reason, counterexample ) } (None, Some(ref counterexample)) => { format!("[FAIL. Counterexample: {}]", counterexample) } (Some(ref reason), None) => { format!("[FAIL. Reason: {}]", reason) } (None, None) => "[FAIL]".to_string(), }; Colour::Red.paint(txt) }; println!( "{} {} (gas: {})", status, name, result .gas_used .map(|x| x.to_string()) .unwrap_or_else(|| "[fuzztest]".to_string()) ); } if verbosity > 1 { println!(); for (name, result) in tests { let status = if result.success { "Success" } else { "Failure" }; println!("{}: {}", status, name); println!(); for log in &result.logs { println!(" {}", log); } println!(); } } } } if allow_failure { exit_code = 0; } std::process::exit(exit_code); } fn install(root: impl AsRef<std::path::Path>, dependencies: Vec<Dependency>) -> eyre::Result<()> { let libs = std::path::Path::new("lib"); dependencies.iter().try_for_each(|dep| -> eyre::Result<_> { let path = libs.join(&dep.name); println!("Installing {} in {:?}, (url: {}, tag: {:?})", dep.name, path, dep.url, dep.tag); Command::new("git") .args(&["submodule", "add", &dep.url, &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; Command::new("git") .args(&["submodule", "update", "--init", "--recursive", &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; let message = if let Some(ref tag) = dep.tag { Command::new("git") .args(&["checkout", "--recurse-submodules", tag]) .current_dir(&path) .spawn()? .wait()?; Command::new("git").args(&["add", &path.display().to_string()]).spawn()?.wait()?; format!("forge install: {}\n\n{}", dep.name, tag) } else { format!("forge install: {}", dep.name) }; Command::new("git").args(&["commit", "-m", &message]).current_dir(&root).spawn()?.wait()?; Ok(()) }) }
use ethers::{ providers::Provider, solc::{remappings::Remapping, ArtifactOutput, Project}, }; use evm_adapters::{ sputnik::{vicinity, ForkMemoryBackend, PRECOMPILES_MAP}, FAUCET_ACCOUNT, }; use regex::Regex; use sputnik::backend::Backend; use structopt::StructOpt; use forge::MultiContractRunnerBuilder; use ansi_term::Colour; use ethers::types::U256; mod forge_opts; use forge_opts::{EvmType, Opts, Subcommands}; use crate::forge_opts::{Dependency, FullContractInfo}; use std::{collections::HashMap, convert::TryFrom, process::Command, str::FromStr, sync::Arc}; mod cmd; mod utils; #[tracing::instrument(err)] fn main() -> eyre::Result<()> { utils::subscriber(); let opts = Opts::from_args(); match opts.sub { Subcommands::Test { opts, env, json, pattern, evm_type, fork_url, fork_block_number, initial_balance, sender, ffi, verbosity, allow_failure, } => { let cfg = proptest::test_runner::Config { failure_persistence: None, ..Default::default() }; let fuzzer = proptest::test_runner::TestRunner::new(cfg); let project = Project::try_from(&opts)?; let builder = MultiContractRunnerBuilder::default() .fuzzer(fuzzer) .initial_balance(initial_balance) .sender(sender); match evm_type { #[cfg(feature = "sputnik-evm")] EvmType::Sputnik => { use evm_adapters::sputnik::Executor; use sputnik::backend::MemoryBackend; let mut cfg = opts.evm_version.sputnik_cfg(); cfg.create_contract_limit = None; let vicinity = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(vicinity(&provider, fork_block_number))? } else { env.sputnik_state() }; let mut backend = MemoryBackend::new(&vicinity, Default::default()); let faucet = backend.state_mut().entry(*FAUCET_ACCOUNT).or_insert_with(Default::default); faucet.balance = U256::MAX; let backend: Box<dyn Backend> = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let init_state = backend.state().clone(); let backend = ForkMemoryBackend::new( provider, backend, fork_block_number, init_state, ); Box::new(backend) } else { Box::new(backend) }; let backend = Arc::new(backend); let precompiles = PRECOMPILES_MAP.clone(); let evm = Executor::new_with_cheatcodes( backend, env.gas_limit, &cfg, &precompiles, ffi, ); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } #[cfg(feature = "evmodin-evm")] EvmType::EvmOdin => { use evm_adapters::evmodin::EvmOdin; use evmodin::tracing::NoopTracer; let revision = opts.evm_version.evmodin_cfg(); let host = env.evmodin_state(); let evm = EvmOdin::new(host, env.gas_limit, revision, NoopTracer); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } } } Subcommands::Build { opts } => { let project = Project::try_from(&opts)?; let output = project.compile()?; if output.has_compiler_errors() { eyre::bail!(output.to_string()) } else if output.is_unchanged() { println!("no files changed, compilation skippped."); } else { println!("success."); } } Subcommands::VerifyContract { contract, address, constructor_args } => { let FullContractInfo { path, name } = contract; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(cmd::verify::run(path, name, address, constructor_args))?; } Subcommands::Create { contract: _, verify: _ } => { unimplemented!("Not yet implemented") } Subcommands::Update { lib } => { let repo = git2::Repository::open(".")?; if let Some(lib) = lib { println!("Updating submodule {:?}", lib); repo.find_submodule( &lib.into_os_string().into_string().expect("invalid submodule path"), )? .update(true, None)?; } else { Command::new("git") .args(&["submodule", "update", "--init", "--recursive"]) .spawn()? .wait()?; } } Subcommands::Install { dependencies } => { install(std::env::current_dir()?, dependencies)?; } Subcommands::Remappings { lib_paths, root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); let root = std::fs::canonicalize(root)?; let lib_paths = if lib_paths.is_empty() { vec![root.join("lib")] } else { lib_paths }; let remappings: Vec<_> = lib_paths .iter() .map(|path| Remapping::find_many(&path).unwrap()) .flatten() .collect(); remappings.iter().for_each(|x| println!("{}", x)); } Subcommands::Init { root, template } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); if !root.exists() { std::fs::create_dir_all(&root)?; } let root = std::fs::canonicalize(root)?; if let Some(ref template) = template { println!("Initializing {} from {}...", root.display(), template); Command::new("git") .args(&["clone", template, &root.display().to_string()]) .spawn()? .wait()?; } else { println!("Initializing {}...", root.display()); let src = root.join("src"); let test = src.join("test"); std::fs::create_dir_all(&test)?; let lib = root.join("lib"); std::fs::create_dir(&lib)?; let contract_path = src.join("Contract.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.sol"))?; let contract_path = test.join("Contract.t.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.t.sol"))?; Command::new("git").arg("init").current_dir(&root).spawn()?.wait()?; Command::new("git").args(&["add", "."]).current_dir(&root).spawn()?.wait()?; Command::new("git") .args(&["commit", "-m", "chore: forge init"]) .current_dir(&root) .spawn()? .wait()?; Dependency::from_str("https://github.com/dapphub/ds-test") .and_then(|dependency| install(root, vec![dependency]))?; } println!("Done."); } Subcommands::Completions { shell } => { Subcommands::clap().gen_completions_to("forge", shell, &mut std::io::stdout()) } Subcommands::Clean { root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); utils::cleanup(root)?; } } Ok(()) } fn test<A: ArtifactOutput + 'static, S: Clone, E: evm_adapters::Evm<S>>( builder: MultiContractRunnerBuilder, project: Project<A>, evm: E, pattern: Regex, json: bool, verbosity: u8, allow_failure: bool, ) -> eyre::Result<HashMap<String, HashMap<String, forge::TestResult>>> { let mut runner = builder.build(project, evm)?; let mut exit_code = 0; let results = runner.test(pattern)?; if json { let res = serde_json::to_string(&results)?; println!("{}", res); } else { for (i, (contract_name, tests)) in results.iter().enumerate() { if i > 0 { println!() } if !tests.is_empty() { let term = if tests.len() > 1 { "tests" } else { "test" }; println!("Running {} {} for {}", tests.len(), term, contract_name); } for (name, result) in tests { let status = if result.success { Colour::Green.paint("[PASS]") } else { exit_code = -1; let txt = match (&result.reason, &result.counterexample) { (Some(ref reason), Some(ref counterexample)) => { format!( "[FAIL. Reason: {}. Counterexample: {}]", reason, counte
fn install(root: impl AsRef<std::path::Path>, dependencies: Vec<Dependency>) -> eyre::Result<()> { let libs = std::path::Path::new("lib"); dependencies.iter().try_for_each(|dep| -> eyre::Result<_> { let path = libs.join(&dep.name); println!("Installing {} in {:?}, (url: {}, tag: {:?})", dep.name, path, dep.url, dep.tag); Command::new("git") .args(&["submodule", "add", &dep.url, &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; Command::new("git") .args(&["submodule", "update", "--init", "--recursive", &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; let message = if let Some(ref tag) = dep.tag { Command::new("git") .args(&["checkout", "--recurse-submodules", tag]) .current_dir(&path) .spawn()? .wait()?; Command::new("git").args(&["add", &path.display().to_string()]).spawn()?.wait()?; format!("forge install: {}\n\n{}", dep.name, tag) } else { format!("forge install: {}", dep.name) }; Command::new("git").args(&["commit", "-m", &message]).current_dir(&root).spawn()?.wait()?; Ok(()) }) }
rexample ) } (None, Some(ref counterexample)) => { format!("[FAIL. Counterexample: {}]", counterexample) } (Some(ref reason), None) => { format!("[FAIL. Reason: {}]", reason) } (None, None) => "[FAIL]".to_string(), }; Colour::Red.paint(txt) }; println!( "{} {} (gas: {})", status, name, result .gas_used .map(|x| x.to_string()) .unwrap_or_else(|| "[fuzztest]".to_string()) ); } if verbosity > 1 { println!(); for (name, result) in tests { let status = if result.success { "Success" } else { "Failure" }; println!("{}: {}", status, name); println!(); for log in &result.logs { println!(" {}", log); } println!(); } } } } if allow_failure { exit_code = 0; } std::process::exit(exit_code); }
function_block-function_prefixed
[ { "content": "/// Tries to extract the `Contract` in the `DAPP_JSON` file\n\npub fn find_dapp_json_contract(path: &str, name: &str) -> eyre::Result<Contract> {\n\n let dapp_json = dapp_json_path();\n\n let mut value: serde_json::Value = serde_json::from_reader(std::fs::File::open(&dapp_json)?)\n\n ...
Rust
graphannis/src/annis/db/token_helper.rs
corpus-tools/graphANNIS
6b1bf752a33f851f3fffe8e49e1e72cfc29b60f5
use crate::{ annis::{ db::{ aql::model::{AnnotationComponentType, TOKEN_KEY}, AnnotationStorage, }, errors::GraphAnnisError, }, errors::Result, graph::GraphStorage, AnnotationGraph, }; use graphannis_core::{ graph::ANNIS_NS, types::{Component, NodeID}, }; use std::collections::HashSet; use std::sync::Arc; #[derive(Clone)] pub struct TokenHelper<'a> { node_annos: &'a dyn AnnotationStorage<NodeID>, left_edges: Arc<dyn GraphStorage>, right_edges: Arc<dyn GraphStorage>, cov_edges: Vec<Arc<dyn GraphStorage>>, } lazy_static! { static ref COMPONENT_LEFT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::LeftToken, ANNIS_NS.into(), "".into(), ) }; static ref COMPONENT_RIGHT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::RightToken, ANNIS_NS.into(), "".into(), ) }; } pub fn necessary_components(db: &AnnotationGraph) -> HashSet<Component<AnnotationComponentType>> { let mut result = HashSet::default(); result.insert(COMPONENT_LEFT.clone()); result.insert(COMPONENT_RIGHT.clone()); result.extend( db.get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter(), ); result } impl<'a> TokenHelper<'a> { pub fn new(graph: &'a AnnotationGraph) -> Result<TokenHelper<'a>> { let cov_edges: Vec<Arc<dyn GraphStorage>> = graph .get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter() .filter_map(|c| graph.get_graphstorage(&c)) .filter(|gs| { if let Some(stats) = gs.get_statistics() { stats.nodes > 0 } else { true } }) .collect(); let left_edges = graph.get_graphstorage(&COMPONENT_LEFT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "LeftToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; let right_edges = graph.get_graphstorage(&COMPONENT_RIGHT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "RightToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; Ok(TokenHelper { node_annos: graph.get_node_annos(), left_edges, right_edges, cov_edges, }) } pub fn get_gs_coverage(&self) -> &Vec<Arc<dyn GraphStorage>> { &self.cov_edges } pub fn get_gs_left_token(&self) -> &dyn GraphStorage { self.left_edges.as_ref() } pub fn get_gs_right_token_(&self) -> &dyn GraphStorage { self.right_edges.as_ref() } pub fn is_token(&self, id: NodeID) -> Result<bool> { if self.node_annos.has_value_for_item(&id, &TOKEN_KEY)? { let has_outgoing = self.has_outgoing_coverage_edges(id)?; Ok(!has_outgoing) } else { Ok(false) } } pub fn has_outgoing_coverage_edges(&self, id: NodeID) -> Result<bool> { for c in self.cov_edges.iter() { if c.has_outgoing_edges(id)? { return Ok(true); } } Ok(false) } pub fn right_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.right_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.left_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_right_token_for(&self, n: NodeID) -> Result<(Option<NodeID>, Option<NodeID>)> { if self.is_token(n)? { Ok((Some(n), Some(n))) } else { let out_left = match self.left_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; let out_right = match self.right_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; Ok((out_left, out_right)) } } }
use crate::{ annis::{ db::{ aql::model::{AnnotationComponentType, TOKEN_KEY}, AnnotationStorage, }, errors::GraphAnnisError, }, errors::Result, graph::GraphStorage, AnnotationGraph, }; use graphannis_core::{ graph::ANNIS_NS, types::{Component, NodeID}, }; use std::collections::HashSet; use std::sync::Arc; #[derive(Clone)] pub struct TokenHelper<'a> { node_annos: &'a dyn AnnotationStorage<NodeID>, left_edges: Arc<dyn GraphStorage>, right_edges: Arc<dyn GraphStorage>, cov_edges: Vec<Arc<dyn GraphStorage>>, } lazy_static! { static ref COMPONENT_LEFT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::LeftToken, ANNIS_NS.into(), "".into(), ) }; static ref COMPONENT_RIGHT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::RightToken, ANNIS_NS.into(), "".into(), ) }; } pub fn necessary_components(db: &AnnotationGraph) -> HashSet<Component<AnnotationComponentType>> { let mut result = HashSet::default(); result.insert(COMPONENT_LEFT.clone()); result.insert(COMPONENT_RIGHT.clone()); result.extend( db.get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter(), ); result } impl<'a> TokenHelper<'a> { pub fn new(graph: &'a AnnotationGraph) -> Result<TokenHelper<'a>> { let cov_edges: Vec<Arc<dyn GraphStorage>> = graph .get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter() .filter_map(|c| graph.get_graphstorage(&c)) .filter(|gs| { if let Some(stats) = gs.get_statistics() { stats.nodes > 0 } else { true } }) .collect(); let left_edges = graph.get_graphstorage(&COMPONENT_LEFT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "LeftToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; let right_edges = graph.get_graphstorage(&COMPONENT_RIGHT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "RightToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; Ok(TokenHelper { node_annos: graph.get_node_annos(), left_edges, right_edges, cov_edges, }) } pub fn get_gs_coverage(&self) -> &Vec<Arc<dyn GraphStorage>> { &self.cov_edges } pub fn get_gs_left_token(&self) -> &dyn GraphStorage { self.left_edges.as_ref() } pub fn get_gs_right_token_(&self) -> &dyn GraphStorage { self.right_edges.as_ref() } pub fn is_token(&self, id: NodeID) -> Result<bool> { if self.node_annos.has_value_for_item(&id, &TOKEN_KEY)? { let has_outgoing = self.has_outgoing_coverage_edges(id)?; Ok(!has_outgoing) } else { Ok(false) } } pub fn has_outgoing_coverage_edges(&self, id: NodeID) -> Result<bool> { for c in self.cov_edges.iter() { if c.has_outgoing_edges(id)? { return Ok(true); } } Ok(false) } pub fn right_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.right_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.left_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_right_token_for(&self, n: NodeID) -> Result<(Option<NodeID>, Option<NodeID>)> { if self.is_token(n)? { Ok((Some(n), Some(n))) } else { let out_left = match self.left_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; let out_right =
; Ok((out_left, out_right)) } } }
match self.right_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }
if_condition
[ { "content": "pub fn compare_match_by_text_pos(\n\n m1: &Match,\n\n m2: &Match,\n\n node_annos: &dyn AnnotationStorage<NodeID>,\n\n token_helper: Option<&TokenHelper>,\n\n gs_order: Option<&dyn GraphStorage>,\n\n collation: CollationType,\n\n quirks_mode: bool,\n\n) -> Result<Ordering> {\n\...
Rust
packages/std/src/init_handle.rs
puneet2019/cosmwasm
8cf9c302e408ce175852ad6a2ab153d426b43bdd
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::fmt; use crate::coins::Coin; use crate::encoding::Binary; use crate::errors::StdResult; use crate::types::{HumanAddr, Never}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CosmosMsg<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { Bank(BankMsg), Custom(T), Staking(StakingMsg), Wasm(WasmMsg), } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum BankMsg { Send { from_address: HumanAddr, to_address: HumanAddr, amount: Vec<Coin>, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StakingMsg { Delegate { validator: HumanAddr, amount: Coin, }, Undelegate { validator: HumanAddr, amount: Coin, }, Withdraw { validator: HumanAddr, recipient: Option<HumanAddr>, }, Redelegate { src_validator: HumanAddr, dst_validator: HumanAddr, amount: Coin, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum WasmMsg { Execute { contract_addr: HumanAddr, msg: Binary, send: Vec<Coin>, }, Instantiate { code_id: u64, msg: Binary, send: Vec<Coin>, label: Option<String>, }, } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<BankMsg> for CosmosMsg<T> { fn from(msg: BankMsg) -> Self { CosmosMsg::Bank(msg) } } #[cfg(feature = "staking")] impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<StakingMsg> for CosmosMsg<T> { fn from(msg: StakingMsg) -> Self { CosmosMsg::Staking(msg) } } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<WasmMsg> for CosmosMsg<T> { fn from(msg: WasmMsg) -> Self { CosmosMsg::Wasm(msg) } } #[derive(Serialize, Deserialize, Clone, Default, Debug, PartialEq, JsonSchema)] pub struct LogAttribute { pub key: String, pub value: String, } pub fn log<K: ToString, V: ToString>(key: K, value: V) -> LogAttribute { LogAttribute { key: key.to_string(), value: value.to_string(), } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InitResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type InitResult<U = Never> = StdResult<InitResponse<U>>; impl<T> Default for InitResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { InitResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct HandleResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type HandleResult<U = Never> = StdResult<HandleResponse<U>>; impl<T> Default for HandleResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { HandleResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct MigrateResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type MigrateResult<U = Never> = StdResult<MigrateResponse<U>>; impl<T> Default for MigrateResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { MigrateResponse { messages: vec![], log: vec![], data: None, } } } #[cfg(test)] mod test { use super::*; use crate::errors::StdError; use crate::{coins, from_slice, to_vec, Uint128}; #[test] fn log_works_for_different_types() { let expeceted = LogAttribute { key: "foo".to_string(), value: "42".to_string(), }; assert_eq!(log("foo", "42"), expeceted); assert_eq!(log("foo".to_string(), "42"), expeceted); assert_eq!(log("foo", "42".to_string()), expeceted); assert_eq!(log("foo", HumanAddr::from("42")), expeceted); assert_eq!(log("foo", Uint128(42)), expeceted); assert_eq!(log("foo", 42), expeceted); } #[test] fn can_deser_error_result() { let fail = InitResult::Err(StdError::Unauthorized { backtrace: None }); let bin = to_vec(&fail).expect("encode contract result"); println!("error: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(fail, back); } #[test] fn can_deser_ok_result() { let send = InitResult::Ok(InitResponse { messages: vec![BankMsg::Send { from_address: HumanAddr("me".to_string()), to_address: HumanAddr("you".to_string()), amount: coins(1015, "earth"), } .into()], log: vec![LogAttribute { key: "action".to_string(), value: "release".to_string(), }], data: None, }); let bin = to_vec(&send).expect("encode contract result"); println!("ok: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(send, back); } #[test] fn msg_from_works() { let from_address = HumanAddr("me".to_string()); let to_address = HumanAddr("you".to_string()); let amount = coins(1015, "earth"); let bank = BankMsg::Send { from_address, to_address, amount, }; let msg: CosmosMsg = bank.clone().into(); match msg { CosmosMsg::Bank(msg) => assert_eq!(bank, msg), _ => panic!("must encode in Bank variant"), } } }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::fmt; use crate::coins::Coin; use crate::encoding::Binary; use crate::errors::StdResult; use crate::types::{HumanAddr, Never}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CosmosMsg<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { Bank(BankMsg), Custom(T), Staking(StakingMsg), Wasm(WasmMsg), } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum BankMsg { Send { from_address: HumanAddr, to_address: HumanAddr, amount: Vec<Coin>, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StakingMsg { Delegate { validator: HumanAddr, amount: Coin, }, Undelegate { validator: HumanAddr, amount: Coin, }, Withdraw { validator: HumanAddr, recipient: Option<HumanAddr>, }, Redelegate { src_validator: HumanAddr, dst_validator: HumanAddr, amount: Coin, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum WasmMsg { Execute { contract_addr: HumanAddr, msg: Binary, send: Vec<Coin>, }, Instantiate { code_id: u64, msg: Binary, send: Vec<Coin>, label: Option<String>, }, } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<BankMsg> for CosmosMsg<T> { fn from(msg: BankMsg) -> Self { CosmosMsg::Bank(msg) } } #[cfg(feature = "staking")] impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<StakingMsg> for CosmosMsg<T> { fn from(msg: StakingMsg) -> Self { CosmosMsg::Staking(msg) } } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<WasmMsg> for CosmosMsg<T> { fn from(msg: WasmMsg) -> Self { CosmosMsg::Wasm(msg) } } #[derive(Serialize, Deserialize, Clone, Default, Debug, PartialEq, JsonSchema)] pub struct LogAttribute { pub key: String, pub value: String, } pub fn log<K: ToString, V: ToString>(key: K, value: V) -> LogAttribute { LogAttribute { key: key.to_string(), value: value.to_string(), } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InitResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type InitResult<U = Never> = StdResult<InitResponse<U>>; impl<T> Default for InitResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { InitResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct HandleResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type HandleResult<U = Never> = StdResult<HandleResponse<U>>; impl<T> Default for HandleResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { HandleResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct MigrateResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type MigrateResult<U = Never> = StdResult<MigrateResponse<U>>; impl<T> Default for MigrateResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { MigrateResponse { messages: vec![], log: vec![],
=> assert_eq!(bank, msg), _ => panic!("must encode in Bank variant"), } } }
data: None, } } } #[cfg(test)] mod test { use super::*; use crate::errors::StdError; use crate::{coins, from_slice, to_vec, Uint128}; #[test] fn log_works_for_different_types() { let expeceted = LogAttribute { key: "foo".to_string(), value: "42".to_string(), }; assert_eq!(log("foo", "42"), expeceted); assert_eq!(log("foo".to_string(), "42"), expeceted); assert_eq!(log("foo", "42".to_string()), expeceted); assert_eq!(log("foo", HumanAddr::from("42")), expeceted); assert_eq!(log("foo", Uint128(42)), expeceted); assert_eq!(log("foo", 42), expeceted); } #[test] fn can_deser_error_result() { let fail = InitResult::Err(StdError::Unauthorized { backtrace: None }); let bin = to_vec(&fail).expect("encode contract result"); println!("error: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(fail, back); } #[test] fn can_deser_ok_result() { let send = InitResult::Ok(InitResponse { messages: vec![BankMsg::Send { from_address: HumanAddr("me".to_string()), to_address: HumanAddr("you".to_string()), amount: coins(1015, "earth"), } .into()], log: vec![LogAttribute { key: "action".to_string(), value: "release".to_string(), }], data: None, }); let bin = to_vec(&send).expect("encode contract result"); println!("ok: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(send, back); } #[test] fn msg_from_works() { let from_address = HumanAddr("me".to_string()); let to_address = HumanAddr("you".to_string()); let amount = coins(1015, "earth"); let bank = BankMsg::Send { from_address, to_address, amount, }; let msg: CosmosMsg = bank.clone().into(); match msg { CosmosMsg::Bank(msg)
random
[ { "content": "// coins is a shortcut constructor for a set of one denomination of coins\n\npub fn coins(amount: u128, denom: &str) -> Vec<Coin> {\n\n vec![coin(amount, denom)]\n\n}\n\n\n", "file_path": "packages/std/src/coins.rs", "rank": 1, "score": 333316.4643425882 }, { "content": "// ...
Rust
libllama/src/dbgcore.rs
HIDE810/llama
380326dd946cee1769711999b9c12aa09dfb1e99
use std::sync; use cpu::{self, v5, v6}; pub use cpu::irq::{IrqType9, IrqClient}; use cpu::caches::Ops; use hwcore; use io; #[derive(Clone)] pub struct DbgCore { hw: sync::Arc<sync::Mutex<hwcore::HwCore>> } impl DbgCore { pub fn bind(hw: hwcore::HwCore) -> DbgCore { DbgCore { hw: sync::Arc::new(sync::Mutex::new(hw)), } } pub fn ctx<'a>(&'a mut self, which: ActiveCpu) -> DbgContext<'a> { DbgContext { active_cpu: which, hwcore: self.hw.lock().unwrap() } } } pub struct DbgContext<'a> { active_cpu: ActiveCpu, hwcore: sync::MutexGuard<'a, hwcore::HwCore> } impl<'a> DbgContext<'a> { pub fn pause(&mut self) { self.hwcore.stop(); } pub fn resume(&mut self) { self.hwcore.start(); } pub fn running(&mut self) -> bool { self.hwcore.running() } pub fn hwcore(&self) -> &hwcore::HwCore { &*self.hwcore } pub fn hwcore_mut(&mut self) -> &mut hwcore::HwCore { &mut *self.hwcore } pub fn hw9<'b>(&'b mut self) -> DbgHw9Context<'b> { use std::sync::PoisonError; use hwcore::Hardware9; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware9>>| { let hw9 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw9.arm9.regs, hw9.arm9.cpsr.val, hw9.arm9.last_instructions); panic!("{}", s); }; DbgHw9Context { hw: self.hwcore.hardware9.lock().unwrap_or_else(print_regs) } } pub fn hw11<'b>(&'b mut self) -> DbgHw11Context<'b> { use std::sync::PoisonError; use hwcore::Hardware11; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware11>>| { let hw11 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw11.arm11.regs, hw11.arm11.cpsr.val, hw11.arm11.last_instructions); panic!("{}", s); }; DbgHw11Context { hw: self.hwcore.hardware11.lock().unwrap_or_else(print_regs) } } pub fn hw<'b>(&'b mut self) -> Box<dyn HwCtx + 'b> { match self.active_cpu { ActiveCpu::Arm9 => Box::new(self.hw9()), ActiveCpu::Arm11 => Box::new(self.hw11()) } } pub fn trigger_irq(&mut self, irq: IrqType9) { self.hwcore_mut().irq_tx.assert(irq); } } #[derive(Copy, Clone, Eq, PartialEq)] pub enum ActiveCpu { Arm9, Arm11 } #[allow(non_camel_case_types)] pub enum CpuRef<'a> { v5(&'a cpu::Cpu<v5>), v6(&'a cpu::Cpu<v6>), } #[allow(non_camel_case_types)] pub enum CpuMut<'a> { v5(&'a mut cpu::Cpu<v5>), v6(&'a mut cpu::Cpu<v6>), } macro_rules! any_cpu { ($self:expr, mut $ident:ident; $code:block) => { match $self.cpu_mut() { CpuMut::v5($ident) => $code, CpuMut::v6($ident) => $code } }; ($self:expr, ref $ident:ident; $code:block) => { match $self.cpu_ref() { CpuRef::v5($ident) => $code, CpuRef::v6($ident) => $code } }; } pub trait HwCtx { fn cpu_ref(&self) -> CpuRef; fn cpu_mut(&mut self) -> CpuMut; fn read_mem(&mut self, address: u32, bytes: &mut [u8]) -> Result<(), String> { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().debug_read_buf(address, bytes) }) } fn write_mem(&mut self, address: u32, bytes: &[u8]) { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().write_buf(address, bytes); }) } fn read_reg(&self, reg: usize) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[reg] }) } fn write_reg(&mut self, reg: usize, value: u32) { any_cpu!(self, mut cpu; { cpu.regs[reg] = value; }) } fn read_cpsr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.cpsr.val }) } fn write_cpsr(&mut self, value: u32) { any_cpu!(self, mut cpu; { cpu.cpsr.val = value; let mode_num = cpu.cpsr.mode.get(); cpu.regs.swap(cpu::Mode::from_num(mode_num)); }) } fn pause_addr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[15] - cpu.get_pc_offset() }) } fn branch_to(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.branch(addr); }) } fn is_thumb(&self) -> bool { any_cpu!(self, ref cpu; { cpu.cpsr.thumb_bit.get() == 1 }) } fn step(&mut self) { any_cpu!(self, mut cpu; { cpu.run(1); }) } fn set_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.insert(addr); }) } fn has_breakpoint(&mut self, addr: u32) -> bool { any_cpu!(self, ref cpu; { cpu.breakpoints.contains(&addr) }) } fn del_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.remove(&addr); }) } } pub struct DbgHw9Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware9> } impl<'a> DbgHw9Context<'a> { pub fn io9_devices(&self) -> &io::IoRegsArm9 { self.hw.io9() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw9Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v5(&self.hw.arm9) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v5(&mut self.hw.arm9) } } pub struct DbgHw11Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware11> } impl<'a> DbgHw11Context<'a> { pub fn io11_devices(&self) -> &io::IoRegsArm11 { self.hw.io11() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw11Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v6(&self.hw.arm11) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v6(&mut self.hw.arm11) } }
use std::sync; use cpu::{self, v5, v6}; pub use cpu::irq::{IrqType9, IrqClient}; use cpu::caches::Ops; use hwcore; use io; #[derive(Clone)] pub struct DbgCore { hw: sync::Arc<sync::Mutex<hwcore::HwCore>> } impl DbgCore { pub fn bind(hw: hwcore::HwCore) -> DbgCore { DbgCore { hw: sync::Arc::new(sync::Mutex::new(hw)), } } pub fn ctx<'a>(&'a mut self, which: ActiveCpu) -> DbgContext<'a> { DbgContext { active_cpu: which, hwcore: self.hw.lock().unwrap() } } } pub struct DbgContext<'a> { active_cpu: ActiveCpu, hwcore: sync::MutexGuard<'a, hwcore::HwCore> } impl<'a> DbgContext<'a> { pub fn pause(&mut self) { self.hwcore.stop(); } pub fn resume(&mut self) { self.hwcore.start(); } pub fn running(&mut self) -> bool { self.hwcore.running() } pub fn hwcore(&self) -> &hwcore::HwCore { &*self.hwcore } pub fn hwcore_mut(&mut self) -> &mut hwcore::HwCore { &mut *self.hwcore } pub fn hw9<'b>(&'b mut self) -> DbgHw9Context<'b> { use std::sync::PoisonError; use hwcore::Hardware9; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware9>>| { let hw9 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw9.arm9.regs, hw9.arm9.cpsr.val, hw9.arm9.last_instructions); panic!("{}", s); }; DbgHw9Context { hw: self.hwcore.hardware9.lock().unwrap_or_else(print_regs) } } pub fn hw11<'b>(&'b mut self) -> DbgHw11Context<'b> { use std::sync::PoisonError; use hwcore::Hardware11; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware11>>| { let hw11 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw11.arm11.regs, hw11.arm11.cpsr.val, hw11.arm11.last_instructions); panic!("{}", s); }; DbgHw11Context { hw: self.hwcore.hardw
de:block) => { match $self.cpu_ref() { CpuRef::v5($ident) => $code, CpuRef::v6($ident) => $code } }; } pub trait HwCtx { fn cpu_ref(&self) -> CpuRef; fn cpu_mut(&mut self) -> CpuMut; fn read_mem(&mut self, address: u32, bytes: &mut [u8]) -> Result<(), String> { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().debug_read_buf(address, bytes) }) } fn write_mem(&mut self, address: u32, bytes: &[u8]) { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().write_buf(address, bytes); }) } fn read_reg(&self, reg: usize) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[reg] }) } fn write_reg(&mut self, reg: usize, value: u32) { any_cpu!(self, mut cpu; { cpu.regs[reg] = value; }) } fn read_cpsr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.cpsr.val }) } fn write_cpsr(&mut self, value: u32) { any_cpu!(self, mut cpu; { cpu.cpsr.val = value; let mode_num = cpu.cpsr.mode.get(); cpu.regs.swap(cpu::Mode::from_num(mode_num)); }) } fn pause_addr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[15] - cpu.get_pc_offset() }) } fn branch_to(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.branch(addr); }) } fn is_thumb(&self) -> bool { any_cpu!(self, ref cpu; { cpu.cpsr.thumb_bit.get() == 1 }) } fn step(&mut self) { any_cpu!(self, mut cpu; { cpu.run(1); }) } fn set_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.insert(addr); }) } fn has_breakpoint(&mut self, addr: u32) -> bool { any_cpu!(self, ref cpu; { cpu.breakpoints.contains(&addr) }) } fn del_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.remove(&addr); }) } } pub struct DbgHw9Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware9> } impl<'a> DbgHw9Context<'a> { pub fn io9_devices(&self) -> &io::IoRegsArm9 { self.hw.io9() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw9Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v5(&self.hw.arm9) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v5(&mut self.hw.arm9) } } pub struct DbgHw11Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware11> } impl<'a> DbgHw11Context<'a> { pub fn io11_devices(&self) -> &io::IoRegsArm11 { self.hw.io11() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw11Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v6(&self.hw.arm11) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v6(&mut self.hw.arm11) } }
are11.lock().unwrap_or_else(print_regs) } } pub fn hw<'b>(&'b mut self) -> Box<dyn HwCtx + 'b> { match self.active_cpu { ActiveCpu::Arm9 => Box::new(self.hw9()), ActiveCpu::Arm11 => Box::new(self.hw11()) } } pub fn trigger_irq(&mut self, irq: IrqType9) { self.hwcore_mut().irq_tx.assert(irq); } } #[derive(Copy, Clone, Eq, PartialEq)] pub enum ActiveCpu { Arm9, Arm11 } #[allow(non_camel_case_types)] pub enum CpuRef<'a> { v5(&'a cpu::Cpu<v5>), v6(&'a cpu::Cpu<v6>), } #[allow(non_camel_case_types)] pub enum CpuMut<'a> { v5(&'a mut cpu::Cpu<v5>), v6(&'a mut cpu::Cpu<v6>), } macro_rules! any_cpu { ($self:expr, mut $ident:ident; $code:block) => { match $self.cpu_mut() { CpuMut::v5($ident) => $code, CpuMut::v6($ident) => $code } }; ($self:expr, ref $ident:ident; $co
random
[ { "content": "/// Controls debugger behavior based on user-provided commands\n\n///\n\n/// `command`: Iterator over &str items\n\npub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n match command.next() {\n\n ...
Rust
rooms/tests/kdbush.rs
oniproject/tto
4336c525ac39b1706e7ded9b28e4c5d55929268f
/* mod data; fn sq_dist(a: [f32; 2], b: [f32; 2]) -> f32 { let dx = a[0] - b[0]; let dy = a[1] - b[1]; dx * dx + dy * dy } static IDS: &[u32] = &[ 97,74,95,30,77,38,76,27,80,55,72,90,88,48,43,46, 65,39,62,93, 9,96,47, 8, 3,12,15,14,21,41,36,40, 69,56,85,78,17,71,44,19,18,13,99,24,67,33,37,49, 54,57,98,45,23,31,66,68, 0,32, 5,51,75,73,84,35, 81,22,61,89, 1,11,86,52,94,16, 2, 6,25,92,42,20, 60,58,83,79,64,10,59,53,26,87, 4,63,50, 7,28,82, 70,29,34,91, ]; static COORDS: &[(f32, f32)] = &[ (10.0,20.0),( 6.0,22.0),(10.0,10.0),( 6.0,27.0),(20.0,42.0),(18.0,28.0), (11.0,23.0),(13.0,25.0),( 9.0,40.0),(26.0, 4.0),(29.0,50.0),(30.0,38.0), (41.0,11.0),(43.0,12.0),(43.0, 3.0),(46.0,12.0),(32.0,14.0),(35.0,15.0), (40.0,31.0),(33.0,18.0),(43.0,15.0),(40.0,34.0),(32.0,38.0),(33.0,34.0), (33.0,54.0),( 1.0,61.0),(24.0,56.0),(11.0,91.0),( 4.0,98.0),(20.0,81.0), (22.0,93.0),(19.0,81.0),(21.0,67.0),( 6.0,76.0),(21.0,72.0),(21.0,73.0), (25.0,57.0),(44.0,64.0),(47.0,66.0),(29.0,69.0),(46.0,61.0),(38.0,74.0), (46.0,78.0),(38.0,84.0),(32.0,88.0),(27.0,91.0),(45.0,94.0),(39.0,94.0), (41.0,92.0),(47.0,21.0),(47.0,29.0),(48.0,34.0),(60.0,25.0),(58.0,22.0), (55.0, 6.0),(62.0,32.0),(54.0, 1.0),(53.0,28.0),(54.0, 3.0),(66.0,14.0), (68.0, 3.0),(70.0, 5.0),(83.0, 6.0),(93.0,14.0),(99.0, 2.0),(71.0,15.0), (96.0,18.0),(95.0,20.0),(97.0,21.0),(81.0,23.0),(78.0,30.0),(84.0,30.0), (87.0,28.0),(90.0,31.0),(65.0,35.0),(53.0,54.0),(52.0,38.0),(65.0,48.0), (67.0,53.0),(49.0,60.0),(50.0,68.0),(57.0,70.0),(56.0,77.0),(63.0,86.0), (71.0,90.0),(52.0,83.0),(71.0,82.0),(72.0,81.0),(94.0,51.0),(75.0,53.0), (95.0,39.0),(78.0,53.0),(88.0,62.0),(84.0,72.0),(77.0,73.0),(99.0,76.0), (73.0,81.0),(88.0,87.0),(96.0,98.0),(96.0,82.0), ]; /* #[test] fn create_index() { let index = kdbush(points, 10); assert!(index.ids, ids, "ids are kd-sorted"); assert!(index.coords, coords, "coords are kd-sorted"); } #[test] fn range_search() { let index = kdbush(points, 10); let result = index.range(20, 30, 50, 70); assert_eq!(result, &RANGE, "returns ids"); for idx in &result { let p = points[idx]; let is = p[0] < 20 || p[0] > 50 || p[1] < 30 || p[1] > 70; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.indexOf(idx) < 0 && p[0] >= 20 && p[0] <= 50 && p[1] >= 30 && p[1] <= 70; assert!(!is, "outside point not in range"); } } #[test] fn within_search() { let index = KDBush::new(points, 10); let qp = [50, 50]; let r = 20; let r2 = 20 * 20; let result = index.within(qp[0], qp[1], r); assert_eq!(result, &WITHIN, "returns ids"); for idx in &result { let p = points[idx]; let is = sq_dist(p, qp) > r2; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.index_of(idx) < 0 && sq_dist(p, qp) <= r2; assert!(!is, "outside point not in range"); } } */ */ mod data; use crate::data::*; use rooms::index::{KDBush, SpatialIndex}; #[test] fn range() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.range(RANGE_MIN, RANGE_MAX, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_range(p), "result point {:?} not in range {:?} {:?}", p, RANGE_MIN, RANGE_MAX); }); let mut brute: Vec<_> = brute_range().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); } #[test] fn within() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.within(WITHIN_CENTER, WITHIN_RADIUS, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_within(p), "result point {:?} not in range {:?} {:?}", p, WITHIN_CENTER, WITHIN_RADIUS); }); let mut brute: Vec<_> = brute_within().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); }
/* mod data; fn sq_dist(a: [f32; 2], b: [f32; 2]) -> f32 { let dx = a[0] - b[0]; let dy = a[1] - b[1]; dx * dx + dy * dy } static IDS: &[u32] = &[ 97,74,95,30,77,38,76,27,80,55,72,90,88,48,43,46, 65,39,62,93, 9,96,47, 8, 3,12,15,14,21,41,36,40, 69,56,85,78,17,71,44,19,18,13,99,24,67,33,37,49, 54,57,98,45,23,31,66,68, 0,32, 5,51,75,73,84,35, 81,22,61,89, 1,11,86,52,94,16, 2, 6,25,92,42,20, 60,58,83,79,64,10,59,53,26,87, 4,63,50, 7,28,82, 70,29,34,91, ]; static COORDS: &[(f32, f32)] = &[ (10.0,20.0),( 6.0,22.0),(10.0,10.0),( 6.0,27.0),(20.0,42.0),(18.0,28.0), (11.0,23.0),(13.0,25.0),( 9.0,40.0),(26.0, 4.0),(29.0,50.0),(30.0,38.0), (41.0,11.0),(43.0,12.0),(43.0, 3.0),(46.0,12.0),(32.0,14.0),(35.0,15.0), (40.0,31.0),(33.0,18.0),(43.0,15.0),(40.0,34.0),(32.0,38.0),(33.0,34.0), (33.0,54.0),( 1.0,61.0),(24.0,56.0),(11.0,91.0),( 4.0,98.0),(20.0,81.0), (22.0,93.0),(19.0,81.0),(21.0,67.0),( 6.0,76.0),(21.0,72.0),(21.0,73.0), (25.0,57.0),(44.0,64.0),(47.0,66.0),(29.0,69.0),(46.0,61.0),(38.0,74.0), (46.0,78.0),(38.0,84.0),(32.0,88.0),(27.0,91.0),(45.0,94.0),(39.0,94.0), (41.0,92.0),(47.0,21.0),(47.0,29.0),(48.0,34.0),(60.0,25.0),(58.0,22.0), (55.0, 6.0),(62.0,32.0),(54.0, 1.0),(53.0,28.0),(54.0, 3.0),(66.0,14.0), (68.0, 3.0),(70.0, 5.0),(83.0, 6.0),(93.0,14.0),(99.0, 2.0),(71.0,15.0), (96.0,18.0),(95.0,20.0),(97.0,21.0),(81.0,23.0),(78.0,30.0),(84.0,30.0), (87.0,28.0),(90.0,31.0),(65.0,35.0),(53.0,54.0),(52.0,38.0),(65.0,48.0), (67.0,53.0),(49.0,60.0),(50.0,68.0),(57.0,70.0),(56.0,77.0),(63.0,86.0), (71.0,90.0),(52.0,83.0),(71.0,82.0),(72.0,81.0),(94.0,51.0),(75.0,53.0), (95.0,39.0),(78.0,53.0),(88.0,62.0),(84.0,72.0),(77.0,73.0),(99.0,76.0), (73.0,81.0),(88.0,87.0),(96.0,98.0),(96.0,82.0), ]; /* #[test] fn create_index() { let index = kdbush(points, 10); assert!(index.ids, ids, "ids are kd-sorted"); assert!(index.coords, coords, "coords are kd-sorted"); } #[test] fn range_search() { let index = kdbush(points, 10); let result = index.range(20, 30, 50, 70); assert_eq!(result, &RANGE, "returns ids"); for idx in &result { let p = points[idx]; let is = p[0] < 20 || p[0] > 50 || p[1] < 30 || p[1] > 70; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.indexOf(idx) < 0 && p[0] >= 20 && p[0] <= 50 && p[1] >= 30 && p[1] <= 70; assert!(!is, "outside point not in range"); } } #[test] fn within_search() { let index = KDBush::new(points, 10); let qp = [50, 50]; let r = 20; let r2 = 20 * 20; let result = index.within(qp[0], qp[1], r); assert_eq!(result, &WITHIN, "returns ids"); for idx in &result { let p = points[idx]; let is = sq_dist(p, qp) > r2; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.index_of(idx) < 0 && sq_dist(p, qp) <= r2; assert!(!is, "outside point not in range"); } } */ */ mod data; use crate::data::*; use rooms::index::{KDBush, SpatialIndex}; #[test] fn range() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.range(RANGE_MIN, RANGE_MAX, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_range(p), "result point {:?} not in range {:?} {:?}", p, RANGE_MIN, RANGE_MAX); }); let mut brute: Vec<_> = brute_range().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); } #[test]
fn within() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.within(WITHIN_CENTER, WITHIN_RADIUS, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_within(p), "result point {:?} not in range {:?} {:?}", p, WITHIN_CENTER, WITHIN_RADIUS); }); let mut brute: Vec<_> = brute_within().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); }
function_block-full_function
[ { "content": "pub fn test_within(p: [f32; 2]) -> bool {\n\n let dx = p[0] - WITHIN_CENTER[0];\n\n let dy = p[1] - WITHIN_CENTER[1];\n\n dx * dx + dy * dy <= WITHIN_RADIUS * WITHIN_RADIUS\n\n}\n\n\n", "file_path": "rooms/tests/data.rs", "rank": 0, "score": 279974.47262647696 }, { "co...
Rust
src/main.rs
arlicle/panda-api-install
4adefe942fe58770f617e8331475eebc38217cea
use std::fs::{self, DirEntry, File, OpenOptions}; use std::io::{self, BufReader, Read, Write, Error}; use std::path::Path; use std::process::Command; use fs_extra::dir::{self, copy}; use fs_extra::{copy_items, remove_items}; #[cfg(windows)] use winapi; #[cfg(windows)] use winreg::enums::*; #[cfg(windows)] use winreg::{self, RegKey}; fn main() { pretty_env_logger::init(); let current_exe = &std::env::current_exe().unwrap(); let current_exe = Path::new(current_exe); let path = current_exe.parent().unwrap(); let current_dir = path.to_str().unwrap(); let split_s = if cfg!(target_os = "windows") { r"\" } else { "/" }; let home_dir = dirs::home_dir().unwrap(); let home_dir = home_dir.to_str().unwrap().trim_end_matches(split_s); let mut panda_dir_string = format!("{1}{0}.panda_api{0}", split_s, home_dir); let panda_dir = Path::new(&panda_dir_string); let panda_dir_string = panda_dir.to_str().unwrap(); if panda_dir.exists() { let mut from_paths = vec![&panda_dir_string]; let _r = remove_items(&from_paths); } match std::fs::create_dir_all(&panda_dir_string) { Ok(_) => (), Err(e) => { println!("create folder failed {} {:?}", &panda_dir_string, e); } } let options = dir::CopyOptions::new(); let install_files = if cfg!(target_os = "windows") { ["panda.exe", "theme"] } else { ["panda", "theme"] }; let mut from_paths: Vec<String> = Vec::new(); for file in &install_files { from_paths.push(format!("{1}{0}Contents{0}{2}", split_s, current_dir, file)); } match copy_items(&from_paths, &panda_dir_string, &options) { Ok(r) => { println!("Copy files done."); } Err(e) => { println!("Copy files failed, install failed"); log::error!("Copy files failed, install failed"); return; } } let success_msg = "Congratulations!\nPanda api install done!\nYou can run pana command in your api docs folder now."; if cfg!(target_os = "windows") { #[cfg(windows)] { let hklm = RegKey::predef(HKEY_CURRENT_USER); let cur_ver = hklm.open_subkey("Environment").unwrap_or_else(|e| match e.kind() { io::ErrorKind::NotFound => panic!("Key doesn't exist"), io::ErrorKind::PermissionDenied => panic!("Access denied"), _ => panic!("{:?}", e), }); let (reg_key, disp) = hklm.create_subkey("Environment").unwrap(); let user_envs: String = if let Ok(p) = cur_ver.get_value("Path") { p } else { "".to_string() }; let mut user_envs = user_envs.trim().trim_end_matches(";"); let panda_dir_string = panda_dir_string.trim_end_matches(split_s); if user_envs.contains(panda_dir_string) { } else { let s = format!("{};{};", user_envs, panda_dir_string); match reg_key.set_value("Path", &s) { Ok(r) => { println!("reg ok"); } Err(e) => { println!("reg failed"); } } } } } else { let output = Command::new("sh") .arg("-c") .arg("echo $SHELL") .output() .expect("failed to execute process"); let shell_name = String::from_utf8(output.stdout).unwrap(); let shell_name = shell_name .trim() .trim_start_matches("/") .trim_start_matches("/"); let mut profile_name = "".to_string(); let shell_name_info: Vec<&str> = shell_name.split("/").collect(); if let Some(shell_name) = shell_name_info.last() { profile_name = format!(".{}rc", shell_name); } let profile_filepath_string = format!("{}/{}", home_dir, profile_name); let profile_filepath = Path::new(&profile_filepath_string); let profile_content = r#"export PATH="$HOME/.panda_api:$PATH""#; let mut has_profile_content = false; if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { has_profile_content = true; } } if !has_profile_content { let mut file_options = OpenOptions::new() .read(true) .write(true) .create(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { panic!("{:?}", e); } } } let profile_list = [".zshrc", ".bashrc", ".cshrc"]; for profile_file in &profile_list { let profile_filepath_string = format!("{}/{}", home_dir, profile_file); let profile_filepath = Path::new(&profile_filepath_string); if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { continue; } else { let mut file_options = OpenOptions::new() .read(true) .write(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { println!("不存在 {} {:?}", profile_filepath_string, e); continue; } } } } } } println!("{}", success_msg); } fn fix_filepath(filepath: String) -> String { filepath .replace("(", r"\(") .replace(")", r"\)") .replace(" ", r"\ ") }
use std::fs::{self, DirEntry, File, OpenOptions}; use std::io::{self, BufReader, Read, Write, Error}; use std::path::Path; use std::process::Command; use fs_extra::dir::
; if panda_dir.exists() { let mut from_paths = vec![&panda_dir_string]; let _r = remove_items(&from_paths); } match std::fs::create_dir_all(&panda_dir_string) { Ok(_) => (), Err(e) => { println!("create folder failed {} {:?}", &panda_dir_string, e); } } let options = dir::CopyOptions::new(); let install_files = if cfg!(target_os = "windows") { ["panda.exe", "theme"] } else { ["panda", "theme"] }; let mut from_paths: Vec<String> = Vec::new(); for file in &install_files { from_paths.push(format!("{1}{0}Contents{0}{2}", split_s, current_dir, file)); } match copy_items(&from_paths, &panda_dir_string, &options) { Ok(r) => { println!("Copy files done."); } Err(e) => { println!("Copy files failed, install failed"); log::error!("Copy files failed, install failed"); return; } } let success_msg = "Congratulations!\nPanda api install done!\nYou can run pana command in your api docs folder now."; if cfg!(target_os = "windows") { #[cfg(windows)] { let hklm = RegKey::predef(HKEY_CURRENT_USER); let cur_ver = hklm.open_subkey("Environment").unwrap_or_else(|e| match e.kind() { io::ErrorKind::NotFound => panic!("Key doesn't exist"), io::ErrorKind::PermissionDenied => panic!("Access denied"), _ => panic!("{:?}", e), }); let (reg_key, disp) = hklm.create_subkey("Environment").unwrap(); let user_envs: String = if let Ok(p) = cur_ver.get_value("Path") { p } else { "".to_string() }; let mut user_envs = user_envs.trim().trim_end_matches(";"); let panda_dir_string = panda_dir_string.trim_end_matches(split_s); if user_envs.contains(panda_dir_string) { } else { let s = format!("{};{};", user_envs, panda_dir_string); match reg_key.set_value("Path", &s) { Ok(r) => { println!("reg ok"); } Err(e) => { println!("reg failed"); } } } } } else { let output = Command::new("sh") .arg("-c") .arg("echo $SHELL") .output() .expect("failed to execute process"); let shell_name = String::from_utf8(output.stdout).unwrap(); let shell_name = shell_name .trim() .trim_start_matches("/") .trim_start_matches("/"); let mut profile_name = "".to_string(); let shell_name_info: Vec<&str> = shell_name.split("/").collect(); if let Some(shell_name) = shell_name_info.last() { profile_name = format!(".{}rc", shell_name); } let profile_filepath_string = format!("{}/{}", home_dir, profile_name); let profile_filepath = Path::new(&profile_filepath_string); let profile_content = r#"export PATH="$HOME/.panda_api:$PATH""#; let mut has_profile_content = false; if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { has_profile_content = true; } } if !has_profile_content { let mut file_options = OpenOptions::new() .read(true) .write(true) .create(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { panic!("{:?}", e); } } } let profile_list = [".zshrc", ".bashrc", ".cshrc"]; for profile_file in &profile_list { let profile_filepath_string = format!("{}/{}", home_dir, profile_file); let profile_filepath = Path::new(&profile_filepath_string); if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { continue; } else { let mut file_options = OpenOptions::new() .read(true) .write(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { println!("不存在 {} {:?}", profile_filepath_string, e); continue; } } } } } } println!("{}", success_msg); } fn fix_filepath(filepath: String) -> String { filepath .replace("(", r"\(") .replace(")", r"\)") .replace(" ", r"\ ") }
{self, copy}; use fs_extra::{copy_items, remove_items}; #[cfg(windows)] use winapi; #[cfg(windows)] use winreg::enums::*; #[cfg(windows)] use winreg::{self, RegKey}; fn main() { pretty_env_logger::init(); let current_exe = &std::env::current_exe().unwrap(); let current_exe = Path::new(current_exe); let path = current_exe.parent().unwrap(); let current_dir = path.to_str().unwrap(); let split_s = if cfg!(target_os = "windows") { r"\" } else { "/" }; let home_dir = dirs::home_dir().unwrap(); let home_dir = home_dir.to_str().unwrap().trim_end_matches(split_s); let mut panda_dir_string = format!("{1}{0}.panda_api{0}", split_s, home_dir); let panda_dir = Path::new(&panda_dir_string); let panda_dir_string = panda_dir.to_str().unwrap()
random
[]
Rust
rust/src/bin/frontend.rs
emwalker/digraffe
0ea46c938155204739cae2ddc4c668b7d7d85acd
use actix_web::{guard, post, web, App, HttpRequest, HttpResponse, HttpServer}; use async_graphql::extensions; use async_graphql::http::{playground_source, GraphQLPlaygroundConfig}; use async_graphql::EmptySubscription; use async_graphql_actix_web::{GraphQLRequest, GraphQLResponse}; use std::env; use digraph::config::Config; use digraph::db; use digraph::prelude::*; use digraph::schema::{MutationRoot, QueryRoot, Schema, State}; struct AuthHeader(String); impl AuthHeader { fn decode(&self) -> Result<(String, String)> { let encoded = self.0.split(' ').last().unwrap_or_default(); let decoded = base64::decode(&encoded)?; let decoded = String::from_utf8_lossy(&decoded); let parts = decoded .split(':') .map(str::to_string) .collect::<Vec<String>>(); if parts.len() != 2 { return Err(Error::Auth(format!("unexpected message: {}", self.0))); } Ok((parts[0].clone(), parts[1].clone())) } } fn user_id_from_header(req: HttpRequest) -> Option<(String, String)> { match req.headers().get("authorization") { Some(value) => match value.to_str() { Ok(value) => match AuthHeader(value.into()).decode() { Ok((user_id, session_id)) => { log::info!("user and session id found in auth header: {}", user_id); Some((user_id, session_id)) } Err(err) => { log::info!("failed to decode auth header, proceeding as guest: {}", err); None } }, Err(err) => { log::warn!("problem fetching authorization header value: {}", err); None } }, None => { log::warn!("no authorization header, proceeding as guest"); None } } } #[post("/graphql")] async fn index( state: web::Data<State>, req: GraphQLRequest, http_req: HttpRequest, ) -> GraphQLResponse { let user_info = user_id_from_header(http_req); let viewer = state.authenticate(user_info).await; let repo = state.create_repo(viewer); state .schema .execute(req.into_inner().data(repo)) .await .into() } async fn index_playground() -> Result<HttpResponse> { Ok(HttpResponse::Ok() .content_type("text/html; charset=utf-8") .body(playground_source( GraphQLPlaygroundConfig::new("/graphql").subscription_endpoint("/graphql"), ))) } #[actix_web::main] async fn main() -> async_graphql::Result<()> { let config = Config::load()?; env_logger::init(); let pool = db::db_connection(&config).await?; sqlx::migrate!("db/migrations").run(&pool).await?; let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription) .extension(extensions::Logger) .finish(); let state = State::new(pool, schema, config.digraph_server_secret); let socket = env::var("LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_owned()); println!("Playground: http://localhost:8080"); HttpServer::new(move || { App::new() .app_data(web::Data::new(state.clone())) .service(index) .service( web::resource("/graphql") .guard(guard::Get()) .to(index_playground), ) .service(web::resource("/").guard(guard::Get()).to(index_playground)) }) .bind(socket)? .run() .await?; Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_auth_header_parsing() { let auth = AuthHeader("Bearer NDYxYzg3YzgtZmI4Zi0xMWU4LTljYmMtYWZkZTZjNTRkODgxOmFiM2Q1MTYwYWFlNjMyYTUxNzNjMDVmOGNiMGVmMDg2ODY2ZGFkMTAzNTE3ZGQwMTRmMzhhNWIxY2E2OWI5YWE=".into()); let (user_id, session_id) = auth.decode().unwrap(); assert_eq!(user_id, "461c87c8-fb8f-11e8-9cbc-afde6c54d881"); assert_eq!( session_id, "ab3d5160aae632a5173c05f8cb0ef086866dad103517dd014f38a5b1ca69b9aa" ); } }
use actix_web::{guard, post, web, App, HttpRequest, HttpResponse, HttpServer}; use async_graphql::extensions; use async_graphql::http::{playground_source, GraphQLPlaygroundConfig}; use async_graphql::EmptySubscription; use async_graphql_actix_web::{GraphQLRequest, GraphQLResponse}; use std::env; use digraph::config::Config; use digraph::db; use digraph::prelude::*; use digraph::schema::{MutationRoot, QueryRoot, Schema, State}; struct AuthHeader(String); impl AuthHeader { fn decode(&self) -> Result<(String, String)> { let encoded = self.0.split(' ').last().unwrap_or_default(); let decoded = base64::decode(&encoded)?; let decoded = String::from_utf8_lossy(&decoded); let parts = decoded .split(':') .map(str::to_string) .collect::<Vec<String>>(); if parts.len() != 2 { return Err(Error::Auth(format!("unexpected message: {}", self.0))); } Ok((parts[0].clone(), parts[1].clone())) } } fn user_id_from_header(req: HttpRequest) -> Option<(String, String)> { match req.headers().get("authorization") { Some(value) => match value.to_str() { Ok(value) => match AuthHeader(value.into()).decode() { Ok((user_id, session_id)) => { log::info!("user and session id found in auth header: {}", user_id); Some((user_id, session_id)) } Err(err) => { log::info!("failed to decode auth header, proceeding as guest: {}", err); None } }, Err(err) => { log::warn!("problem fetching authorization header value: {}", err); None } }, None => { log::warn!("no authorization header, proceeding as guest"); None } } } #[post("/graphql")] async fn index( state: web::Data<State>, req: GraphQLRequest, http_req: HttpRequest, ) -> GraphQLResponse { let user_info = user_id_from_header(http_req); let viewer = state.authenticate(user_info).await; let repo = state.create_repo(viewer); state .schema .execute(req.into_inner().data(repo)) .await .into() } async fn index_playground() -> Result<HttpResponse> { Ok(HttpResponse::Ok() .content_type("text/html; charset=utf-8") .body(playground_source( GraphQLPlaygroundConfig::new("/graphql").subscription_endpoint("/graphql"), ))) } #[actix_web::main] async fn main() -> async_graphql::Result<()> { let config = Config::load()?; env_logger::init(); let pool = db::db_connection(&config).await?; sqlx::migrate!("db/migrations").run(&pool).await?; let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription) .exte
80"); HttpServer::new(move || { App::new() .app_data(web::Data::new(state.clone())) .service(index) .service( web::resource("/graphql") .guard(guard::Get()) .to(index_playground), ) .service(web::resource("/").guard(guard::Get()).to(index_playground)) }) .bind(socket)? .run() .await?; Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_auth_header_parsing() { let auth = AuthHeader("Bearer NDYxYzg3YzgtZmI4Zi0xMWU4LTljYmMtYWZkZTZjNTRkODgxOmFiM2Q1MTYwYWFlNjMyYTUxNzNjMDVmOGNiMGVmMDg2ODY2ZGFkMTAzNTE3ZGQwMTRmMzhhNWIxY2E2OWI5YWE=".into()); let (user_id, session_id) = auth.decode().unwrap(); assert_eq!(user_id, "461c87c8-fb8f-11e8-9cbc-afde6c54d881"); assert_eq!( session_id, "ab3d5160aae632a5173c05f8cb0ef086866dad103517dd014f38a5b1ca69b9aa" ); } }
nsion(extensions::Logger) .finish(); let state = State::new(pool, schema, config.digraph_server_secret); let socket = env::var("LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_owned()); println!("Playground: http://localhost:80
function_block-random_span
[ { "content": "fn sha1_digest(normalized: &[u8]) -> String {\n\n let hash = Sha1::digest(normalized);\n\n format!(\"{:x}\", hash)\n\n}\n\n\n", "file_path": "rust/src/http/repo_url.rs", "rank": 0, "score": 157224.1471751658 }, { "content": "pub fn warning(text: String) -> Alert {\n\n ...
Rust
src/wayland/gamma_control.rs
EdwardBetts/way-cooler
9d0e5d8137dbe2b4e6c7c3c1b899ab3e87cb6da0
use wayland::gamma_control::generated ::server::gamma_control::GammaControl; use wayland::gamma_control::generated ::server::gamma_control_manager::GammaControlManager; use rustwlc::wayland; use rustwlc::handle::{wlc_handle_from_wl_output_resource, WlcOutput}; use rustwlc::render::{wlc_output_set_gamma, wlc_output_get_gamma_size}; use wayland_server::Resource; use wayland_sys::common::{wl_array}; use wayland_sys::server::{WAYLAND_SERVER_HANDLE, wl_client, wl_resource}; use std::os::raw::c_void; use nix::libc::{c_int, c_uint, uint32_t, uint16_t}; static SET_GAMMA_ERROR: &'static str = "The gamma ramps don't have the same size!"; static INVALID_GAMMA_CODE: u32 = 0; static mut GAMMA_CONTROL_MANAGER: GammaControlManagerInterface = GammaControlManagerInterface { destroy: destroy, get_gamma_control: get_gamma_control }; static mut GAMMA_CONTROL: GammaControlInterface = GammaControlInterface { destroy: destroy, set_gamma: set_gamma, reset_gamma: reset_gamma }; mod generated { #![allow(dead_code,non_camel_case_types,unused_unsafe,unused_variables)] #![allow(non_upper_case_globals,non_snake_case,unused_imports)] pub mod interfaces { #[doc(hidden)] pub use wayland_server::protocol_interfaces::{wl_output_interface}; include!(concat!(env!("OUT_DIR"), "/gamma-control_interface.rs")); } pub mod server { #[doc(hidden)] pub use wayland_server::{Resource, Client, Liveness, Implementable, EventLoopHandle, EventResult}; #[doc(hidden)] pub use wayland_server::protocol::{wl_output}; #[doc(hidden)] pub use super::interfaces; include!(concat!(env!("OUT_DIR"), "/gamma-control_api.rs")); } } #[repr(C)] struct GammaControlManagerInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), get_gamma_control: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, id: u32, output: *mut wl_resource) } #[repr(C)] struct GammaControlInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), set_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array), reset_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource) } unsafe extern "C" fn set_gamma(_client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array) { info!("Setting gamma"); if (*red).size != (*green).size || (*red).size != (*blue).size { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_post_error, resource, INVALID_GAMMA_CODE, SET_GAMMA_ERROR.as_bytes().as_ptr() as *const i8); warn!("Color size error, can't continue"); return } let r = (*red).data as *mut u16; let g = (*green).data as *mut u16; let b = (*blue).data as *mut u16; let user_data = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_get_user_data, resource) as *const _; let output = WlcOutput(wlc_handle_from_wl_output_resource(user_data)); if output.is_null() { warn!("wl_resource didn't correspond to a wlc output"); return; } wlc_output_set_gamma(output.0, ((*red).size / 2) as u16, r, g, b) } unsafe extern "C" fn reset_gamma(_client: *mut wl_client, _resource: *mut wl_resource) { info!("Resetting gamma"); } unsafe extern "C" fn destroy(_client: *mut wl_client, resource: *mut wl_resource) { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_destroy, resource ); } unsafe extern "C" fn get_gamma_control(client: *mut wl_client, _resource: *mut wl_resource, id: uint32_t, output: *mut wl_resource) { info!("Request received for control of the gamma ramps"); let manager_resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControl::interface_ptr(), GammaControl::supported_version() as i32, id); let wlc_output = WlcOutput(wlc_handle_from_wl_output_resource(output as *const _)); if wlc_output.is_null() { warn!("This is triggering, dis bad?"); return; } info!("Client requested control of the gamma ramps for {:?}", wlc_output); let gamma_control_ptr = &mut GAMMA_CONTROL as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, manager_resource, gamma_control_ptr, output as *mut c_void, None ); info!("Request granted for gamma ramp control of {:?}", wlc_output); gamma_control_send_gamma_size(manager_resource, wlc_output_get_gamma_size(wlc_output.0)) } unsafe extern "C" fn bind(client: *mut wl_client, _data: *mut c_void, version: u32, id: u32) { info!("Binding Gamma Control resource"); let cur_version = GammaControlManager::supported_version(); if version > cur_version { warn!("Unsupported gamma control protocol version {}!", version); warn!("We only support version {}", cur_version); return } let resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControlManager::interface_ptr(), version as c_int, id ); if resource.is_null() { warn!("Out of memory, could not make a new wl_resource \ for gamma control"); ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_client_post_no_memory, client ); } let global_manager_ptr = &mut GAMMA_CONTROL_MANAGER as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, resource, global_manager_ptr, ::std::ptr::null_mut(), None ); } unsafe extern "C" fn gamma_control_send_gamma_size(resource: *mut wl_resource, size: uint16_t) { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_resource_post_event, resource, 0, size as c_uint); } pub fn init() { let w_display = wayland::get_display(); unsafe { info!("Initializing gamma control manager"); ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_global_create, w_display as *mut _, GammaControlManager::interface_ptr(), GammaControlManager::supported_version() as i32, ::std::ptr::null_mut(), bind ); } }
use wayland::gamma_control::generated ::server::gamma_control::GammaControl; use wayland::gamma_control::generated ::server::gamma_control_manager::GammaControlManager; use rustwlc::wayland; use rustwlc::handle::{wlc_handle_from_wl_output_resource, WlcOutput}; use rustwlc::render::{wlc_output_set_gamma, wlc_output_get_gamma_size}; use wayland_server::Resource; use wayland_sys::common::{wl_array}; use wayland_sys::server::{WAYLAND_SERVER_HANDLE, wl_client, wl_resource}; use std::os::raw::c_void; use nix::libc::{c_int, c_uint, uint32_t, uint16_t}; static SET_GAMMA_ERROR: &'static str = "The gamma ramps don't have the same size!"; static INVALID_GAMMA_CODE: u32 = 0; static mut GAMMA_CONTROL_MANAGER: GammaControlManagerInterface = GammaControlManagerInterface { destroy: destroy, get_gamma_control: get_gamma_control }; static mut GAMMA_CONTROL: GammaControlInterface = GammaControlInterface { destroy: destroy, set_gamma: set_gamma, reset_gamma: reset_gamma }; mod generated { #![allow(dead_code,non_camel_case_types,unused_unsafe,unused_variables)] #![allow(non_upper_case_globals,non_snake_case,unused_imports)] pub mod interfaces { #[doc(hidden)] pub use wayland_server::protocol_interfaces::{wl_output_interface}; include!(concat!(env!("OUT_DIR"), "/gamma-control_interface.rs")); } pub mod server { #[doc(hidden)] pub use wayland_server::{Resource, Client, Liveness, Implementable, EventLoopHandle, EventResult}; #[doc(hidden)] pub use wayland_server::protocol::{wl_output}; #[doc(hidden)] pub use super::interfaces; include!(concat!(env!("OUT_DIR"), "/gamma-control_api.rs")); } } #[repr(C)] struct GammaControlManagerInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), get_gamma_control: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, id: u32, output: *mut wl_resource) } #[repr(C)] struct GammaControlInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), set_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array), reset_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource) } unsafe extern "C" fn set_gamma(_client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array) { info!("Setting gamma"); if (*red).size != (*green).size || (*red).size != (*blue).size { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_post_error, resource, INVALID_GAMMA_CODE, SET_GAMMA_ERROR.as_bytes().as_ptr() as *const i8); warn!("Color size error, can't continue"); return } let r = (*red).data as *mut u16; let g = (*green).data as *mut u16; let b = (*blue).data as *mut u16; let user_data = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_get_user_data, resource) as *const _; let output = WlcOutput(wlc_handle_from_wl_output_resource(user_data)); if output.is_null() { warn!("wl_resource didn't correspond to a wlc output"); return; } wlc_output_set_gamma(output.0, ((*red).size / 2) as u16, r, g, b) } unsafe extern "C" fn reset_gamma(_client: *mut wl_client, _resource: *mut wl_resource) { info!("Resetting gamma"); } unsafe extern "C" fn destroy(_client: *mut wl_client,
unsafe extern "C" fn get_gamma_control(client: *mut wl_client, _resource: *mut wl_resource, id: uint32_t, output: *mut wl_resource) { info!("Request received for control of the gamma ramps"); let manager_resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControl::interface_ptr(), GammaControl::supported_version() as i32, id); let wlc_output = WlcOutput(wlc_handle_from_wl_output_resource(output as *const _)); if wlc_output.is_null() { warn!("This is triggering, dis bad?"); return; } info!("Client requested control of the gamma ramps for {:?}", wlc_output); let gamma_control_ptr = &mut GAMMA_CONTROL as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, manager_resource, gamma_control_ptr, output as *mut c_void, None ); info!("Request granted for gamma ramp control of {:?}", wlc_output); gamma_control_send_gamma_size(manager_resource, wlc_output_get_gamma_size(wlc_output.0)) } unsafe extern "C" fn bind(client: *mut wl_client, _data: *mut c_void, version: u32, id: u32) { info!("Binding Gamma Control resource"); let cur_version = GammaControlManager::supported_version(); if version > cur_version { warn!("Unsupported gamma control protocol version {}!", version); warn!("We only support version {}", cur_version); return } let resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControlManager::interface_ptr(), version as c_int, id ); if resource.is_null() { warn!("Out of memory, could not make a new wl_resource \ for gamma control"); ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_client_post_no_memory, client ); } let global_manager_ptr = &mut GAMMA_CONTROL_MANAGER as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, resource, global_manager_ptr, ::std::ptr::null_mut(), None ); } unsafe extern "C" fn gamma_control_send_gamma_size(resource: *mut wl_resource, size: uint16_t) { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_resource_post_event, resource, 0, size as c_uint); } pub fn init() { let w_display = wayland::get_display(); unsafe { info!("Initializing gamma control manager"); ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_global_create, w_display as *mut _, GammaControlManager::interface_ptr(), GammaControlManager::supported_version() as i32, ::std::ptr::null_mut(), bind ); } }
resource: *mut wl_resource) { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_destroy, resource ); }
function_block-function_prefix_line
[ { "content": "pub fn lock_screen(client: *mut wl_client, output: WlcOutput) {\n\n let mut mode = write_current_mode();\n\n {\n\n match *mode {\n\n Modes::LockScreen(ref mut lock_mode) => {\n\n lock_mode.clients.push((client as _, output, None));\n\n return\n...
Rust
src/lib.rs
LaBatata101/grep_bin
89d41dfbcd4dc9b8d7e174bfe802971ab24eaebf
use clap::{values_t, App, AppSettings, Arg, ArgMatches}; use std::process; use std::{env, path::PathBuf}; use ansi_term::Colour; mod utils; use utils::{file, search}; use crate::utils::{print_hexdump_output, PatternType}; pub fn setup_args<'a>() -> ArgMatches<'a> { let integer_validator = |value: String| match value.parse::<usize>() { Ok(_) => Ok(()), Err(_) => Err(String::from("the value needs to be a valid integer")), }; App::new("grep_bin") .version(clap::crate_version!()) .author(clap::crate_authors!()) .long_about(clap::crate_description!()) .arg( Arg::with_name("FILE") .index(1) .required(true) .multiple(true) .empty_values(false) .help("The file path"), ) .arg( Arg::with_name("PATTERN") .index(2) .required(true) .empty_values(false) .long_help( "Can be a ascii string or a byte sequence. Ascii strings should be passed inside quotes like so '\"This is a string\"' Escaping quotes '\"This is a \\\"quote\\\"\"' All of these byte sequence are valid: f9b4ca, F9B4CA and f9B4Ca", ), ) .arg( Arg::with_name("filetype") .short("f") .multiple(true) .takes_value(true) .empty_values(false) .long_help( "Filter the search by the file extensions. Examples of input: jpg, mp3, exe", ), ) .arg( Arg::with_name("context_bytes_size") .short("c") .default_value("16") .validator(integer_validator) .long_help("Defines the number of bytes that will be printed in each line."), ) .arg( Arg::with_name("print_only") .short("p") .long("print-only") .help("Prints only the file name that contais the match."), ) .arg( Arg::with_name("skip_bytes") .short("s") .long("skip-bytes") .default_value("0") .takes_value(true) .validator(integer_validator) .help("Skip n bytes before searching."), ) .settings(&[AppSettings::ArgRequiredElseHelp, AppSettings::ColoredHelp]) .get_matches() } pub fn run(args: ArgMatches) { let filetypes: Vec<&str> = args.values_of("filetype").unwrap_or_default().collect(); let filepaths = values_t!(args, "FILE", PathBuf).unwrap(); let files: Vec<PathBuf> = if filetypes.is_empty() { file::get_all_files_from_paths(filepaths) } else { file::filter_filetypes(file::get_all_files_from_paths(filepaths), &filetypes) }; let pattern: Vec<u8> = match PatternType::from(args.value_of("PATTERN").unwrap()) { PatternType::Str(pattern) => pattern.into_bytes(), PatternType::HexStr(pattern) => hex::decode(pattern).unwrap_or_else(|error| { eprintln!("Error: {} in byte sequence!", error); process::exit(1); }), }; let context_bytes_size: usize = args .value_of("context_bytes_size") .unwrap() .parse() .unwrap(); let skip_bytes: u64 = args.value_of("skip_bytes").unwrap().parse().unwrap(); for filename in files { let mut searcher = search::Searcher::new(&pattern, context_bytes_size, skip_bytes); let filename = filename.to_str().unwrap(); searcher.search_in_file(filename).unwrap_or_else(|error| { eprintln!("{}: {}", filename, error); process::exit(1); }); let result = searcher.result(); if !result.is_empty() { println!("{}", Colour::Purple.paint(filename)); } if !args.is_present("print_only") { print_hexdump_output(result, searcher.context_bytes_size()); } } }
use clap::{values_t, App, AppSettings, Arg, ArgMatches}; use std::process; use std::{env, path::PathBuf}; use ansi_term::Colour; mod utils; use utils::{file, search}; use crate::utils::{print_hexdump_output, PatternType}; pub fn setup_args<'a>() -> ArgMatches<'a> { let integer_validator = |value: String| match value.parse::<usize>() { Ok(_) => Ok(()), Err(_) => Err(String::from("the value needs to be a valid integer")), }; App::new("grep_bin") .version(clap::crate_version!()) .author(clap::crate_authors!()) .long_about(clap::crate_description!()) .arg( Arg::with_name("FILE") .index(1) .required(true) .multiple(true)
ne."), ) .arg( Arg::with_name("print_only") .short("p") .long("print-only") .help("Prints only the file name that contais the match."), ) .arg( Arg::with_name("skip_bytes") .short("s") .long("skip-bytes") .default_value("0") .takes_value(true) .validator(integer_validator) .help("Skip n bytes before searching."), ) .settings(&[AppSettings::ArgRequiredElseHelp, AppSettings::ColoredHelp]) .get_matches() } pub fn run(args: ArgMatches) { let filetypes: Vec<&str> = args.values_of("filetype").unwrap_or_default().collect(); let filepaths = values_t!(args, "FILE", PathBuf).unwrap(); let files: Vec<PathBuf> = if filetypes.is_empty() { file::get_all_files_from_paths(filepaths) } else { file::filter_filetypes(file::get_all_files_from_paths(filepaths), &filetypes) }; let pattern: Vec<u8> = match PatternType::from(args.value_of("PATTERN").unwrap()) { PatternType::Str(pattern) => pattern.into_bytes(), PatternType::HexStr(pattern) => hex::decode(pattern).unwrap_or_else(|error| { eprintln!("Error: {} in byte sequence!", error); process::exit(1); }), }; let context_bytes_size: usize = args .value_of("context_bytes_size") .unwrap() .parse() .unwrap(); let skip_bytes: u64 = args.value_of("skip_bytes").unwrap().parse().unwrap(); for filename in files { let mut searcher = search::Searcher::new(&pattern, context_bytes_size, skip_bytes); let filename = filename.to_str().unwrap(); searcher.search_in_file(filename).unwrap_or_else(|error| { eprintln!("{}: {}", filename, error); process::exit(1); }); let result = searcher.result(); if !result.is_empty() { println!("{}", Colour::Purple.paint(filename)); } if !args.is_present("print_only") { print_hexdump_output(result, searcher.context_bytes_size()); } } }
.empty_values(false) .help("The file path"), ) .arg( Arg::with_name("PATTERN") .index(2) .required(true) .empty_values(false) .long_help( "Can be a ascii string or a byte sequence. Ascii strings should be passed inside quotes like so '\"This is a string\"' Escaping quotes '\"This is a \\\"quote\\\"\"' All of these byte sequence are valid: f9b4ca, F9B4CA and f9B4Ca", ), ) .arg( Arg::with_name("filetype") .short("f") .multiple(true) .takes_value(true) .empty_values(false) .long_help( "Filter the search by the file extensions. Examples of input: jpg, mp3, exe", ), ) .arg( Arg::with_name("context_bytes_size") .short("c") .default_value("16") .validator(integer_validator) .long_help("Defines the number of bytes that will be printed in each li
random
[ { "content": "pub fn print_hexdump_output(matches: &Matches, bytes_per_line: usize) {\n\n let mut ascii_repr = Vec::new();\n\n\n\n for range in matches.context_bytes_indexes() {\n\n let offset = range.range().start;\n\n print!(\n\n \"{}: \",\n\n Colour::Green.paint(for...
Rust
examples/demo/main.rs
cohaereo/egui_glfw_gl
c2d244eaecffc06d6010d6aa6193c24757e31a23
use egui_glfw_gl as egui_backend; use std::time::Instant; use egui_backend::egui::{vec2, Color32, Image, Pos2, Rect}; use egui_glfw_gl::glfw::{Context, Key}; use glfw::Action; const SCREEN_WIDTH: u32 = 800; const SCREEN_HEIGHT: u32 = 600; const PIC_WIDTH: i32 = 320; const PIC_HEIGHT: i32 = 192; mod triangle; fn main() { let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap(); glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2)); glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core)); glfw.window_hint(glfw::WindowHint::DoubleBuffer(true)); glfw.window_hint(glfw::WindowHint::Resizable(false)); let (mut window, events) = glfw.create_window(SCREEN_WIDTH, SCREEN_HEIGHT, "Egui in GLFW!", glfw::WindowMode::Windowed) .expect("Failed to create GLFW window."); window.set_char_polling(true); window.set_cursor_pos_polling(true); window.set_key_polling(true); window.set_mouse_button_polling(true); window.make_current(); glfw.set_swap_interval(glfw::SwapInterval::Sync(1)); gl::load_with(|symbol| window.get_proc_address(symbol) as *const _); let mut painter = egui_backend::Painter::new(&mut window, SCREEN_WIDTH, SCREEN_HEIGHT); let mut egui_ctx = egui::CtxRef::default(); let (width, height) = window.get_framebuffer_size(); let native_pixels_per_point = window.get_content_scale().0; let mut egui_input_state = egui_backend::EguiInputState::new(egui::RawInput { screen_rect: Some(Rect::from_min_size( Pos2::new(0f32, 0f32), vec2(width as f32, height as f32) / native_pixels_per_point, )), pixels_per_point: Some(native_pixels_per_point), ..Default::default() }); let start_time = Instant::now(); let mut srgba: Vec<Color32> = Vec::new(); for _ in 0..PIC_HEIGHT { for _ in 0..PIC_WIDTH { srgba.push(Color32::BLACK); } } let plot_tex_id = painter.new_user_texture((PIC_WIDTH as usize, PIC_HEIGHT as usize), &srgba, false); let mut sine_shift = 0f32; let mut amplitude: f32 = 50f32; let mut test_str: String = "A text box to write in. Cut, copy, paste commands are available.".to_owned(); let triangle = triangle::Triangle::new(); let mut quit = false; while !window.should_close() { egui_input_state.input.time = Some(start_time.elapsed().as_secs_f64()); egui_ctx.begin_frame(egui_input_state.input.take()); egui_input_state.input.pixels_per_point = Some(native_pixels_per_point); unsafe { gl::ClearColor(0.455, 0.302, 0.663, 1.0); gl::Clear(gl::COLOR_BUFFER_BIT); } triangle.draw(); let mut srgba: Vec<Color32> = Vec::new(); let mut angle = 0f32; for y in 0..PIC_HEIGHT { for x in 0..PIC_WIDTH { srgba.push(Color32::BLACK); if y == PIC_HEIGHT - 1 { let y = amplitude * (angle * 3.142f32 / 180f32 + sine_shift).sin(); let y = PIC_HEIGHT as f32 / 2f32 - y; srgba[(y as i32 * PIC_WIDTH + x) as usize] = Color32::YELLOW; angle += 360f32 / PIC_WIDTH as f32; } } } sine_shift += 0.1f32; painter.update_user_texture_data(plot_tex_id, &srgba); egui::Window::new("Egui with GLFW").show(&egui_ctx, |ui| { ui.add(Image::new(plot_tex_id, vec2(PIC_WIDTH as f32, PIC_HEIGHT as f32))); ui.separator(); ui.label("A simple sine wave plotted onto a GL texture then blitted to an egui managed Image."); ui.label(" "); ui.text_edit_multiline(&mut test_str); ui.label(" "); ui.add(egui::Slider::new(&mut amplitude, 0.0..=50.0).text("Amplitude")); ui.label(" "); if ui.button("Quit").clicked() { quit = true; } }); let (egui_output, paint_cmds) = egui_ctx.end_frame(); if !egui_output.copied_text.is_empty() { egui_backend::copy_to_clipboard(&mut egui_input_state, egui_output.copied_text); } let paint_jobs = egui_ctx.tessellate(paint_cmds); painter.paint_jobs( None, paint_jobs, &egui_ctx.texture(), native_pixels_per_point, ); for (_, event) in glfw::flush_messages(&events) { match event { glfw::WindowEvent::Close => window.set_should_close(true), _ => { egui_backend::handle_event(event, &mut egui_input_state); } } } window.swap_buffers(); glfw.poll_events(); if quit { break; } } }
use egui_glfw_gl as egui_backend; use std::time::Instant; use egui_backend::egui::{vec2, Color32, Image, Pos2, Rect}; use egui_glfw_gl::glfw::{Context, Key}; use glfw::Action; const SCREEN_WIDTH: u32 = 800; const SCREEN_HEIGHT: u32 = 600; const PIC_WIDTH: i32 = 320; const PIC_HEIGHT: i32 = 192; mod triangle; fn main() { let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap(); glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2)); glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core)); glfw.window_hint(glfw::WindowHint::DoubleBuffer(true)); glfw.window_hint(glfw::WindowHint::Resizable(false)); let (mut window, events) = glfw.create_window(SCREEN_WIDTH, SCREEN_HEIGHT, "Egui in GLFW!", glfw::WindowMode::Windowed) .expect("Failed to create GLFW window."); window.set_char_polling(true); window.set_cursor_pos_polling(true); window.set_key_polling(true); window.set_mouse_button_polling(true); window.make_current(); glfw.set_swap_interval(glfw::SwapInterval::Sync(1)); gl::load_with(|symbol| window.get_proc_address(symbol) as *const _); let mut painter = egui_backend::Painter::new(&mut window, SCREEN_WIDTH, SCREEN_HEIGHT); let mut egui_ctx = egui::CtxRef::default(); let (width, height) = window.get_framebuffer_size(); let native_pixels_per_point = window.get_content_scale().0; let mut egui_input_state = egui_backend::EguiInputState::new(egui::RawInput { screen_rect:
, pixels_per_point: Some(native_pixels_per_point), ..Default::default() }); let start_time = Instant::now(); let mut srgba: Vec<Color32> = Vec::new(); for _ in 0..PIC_HEIGHT { for _ in 0..PIC_WIDTH { srgba.push(Color32::BLACK); } } let plot_tex_id = painter.new_user_texture((PIC_WIDTH as usize, PIC_HEIGHT as usize), &srgba, false); let mut sine_shift = 0f32; let mut amplitude: f32 = 50f32; let mut test_str: String = "A text box to write in. Cut, copy, paste commands are available.".to_owned(); let triangle = triangle::Triangle::new(); let mut quit = false; while !window.should_close() { egui_input_state.input.time = Some(start_time.elapsed().as_secs_f64()); egui_ctx.begin_frame(egui_input_state.input.take()); egui_input_state.input.pixels_per_point = Some(native_pixels_per_point); unsafe { gl::ClearColor(0.455, 0.302, 0.663, 1.0); gl::Clear(gl::COLOR_BUFFER_BIT); } triangle.draw(); let mut srgba: Vec<Color32> = Vec::new(); let mut angle = 0f32; for y in 0..PIC_HEIGHT { for x in 0..PIC_WIDTH { srgba.push(Color32::BLACK); if y == PIC_HEIGHT - 1 { let y = amplitude * (angle * 3.142f32 / 180f32 + sine_shift).sin(); let y = PIC_HEIGHT as f32 / 2f32 - y; srgba[(y as i32 * PIC_WIDTH + x) as usize] = Color32::YELLOW; angle += 360f32 / PIC_WIDTH as f32; } } } sine_shift += 0.1f32; painter.update_user_texture_data(plot_tex_id, &srgba); egui::Window::new("Egui with GLFW").show(&egui_ctx, |ui| { ui.add(Image::new(plot_tex_id, vec2(PIC_WIDTH as f32, PIC_HEIGHT as f32))); ui.separator(); ui.label("A simple sine wave plotted onto a GL texture then blitted to an egui managed Image."); ui.label(" "); ui.text_edit_multiline(&mut test_str); ui.label(" "); ui.add(egui::Slider::new(&mut amplitude, 0.0..=50.0).text("Amplitude")); ui.label(" "); if ui.button("Quit").clicked() { quit = true; } }); let (egui_output, paint_cmds) = egui_ctx.end_frame(); if !egui_output.copied_text.is_empty() { egui_backend::copy_to_clipboard(&mut egui_input_state, egui_output.copied_text); } let paint_jobs = egui_ctx.tessellate(paint_cmds); painter.paint_jobs( None, paint_jobs, &egui_ctx.texture(), native_pixels_per_point, ); for (_, event) in glfw::flush_messages(&events) { match event { glfw::WindowEvent::Close => window.set_should_close(true), _ => { egui_backend::handle_event(event, &mut egui_input_state); } } } window.swap_buffers(); glfw.poll_events(); if quit { break; } } }
Some(Rect::from_min_size( Pos2::new(0f32, 0f32), vec2(width as f32, height as f32) / native_pixels_per_point, ))
call_expression
[ { "content": "pub fn handle_event(event: glfw::WindowEvent, state: &mut EguiInputState) {\n\n use glfw::WindowEvent::*;\n\n\n\n match event {\n\n FramebufferSize(width, height) => {\n\n state.input.screen_rect = Some(Rect::from_min_size(\n\n Pos2::new(0f32, 0f32),\n\n ...
Rust
day22/src/main.rs
obi1kenobi/advent-of-code-2021
f4df532edc67efcfaae7332e60483032afb3801a
#![feature(map_first_last)] use std::{ collections::{BTreeMap, BTreeSet}, env, fs, ops::RangeInclusive, fmt::Display, }; #[allow(unused_imports)] use itertools::Itertools; fn parse_range(range: &str) -> (i64, i64) { let (low, high) = range.split_once("..").unwrap(); (low.parse().unwrap(), high.parse().unwrap()) } fn main() { let args: Vec<String> = env::args().collect(); let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect(); reversed_args .pop() .expect("Expected the executable name to be the first argument, but was missing"); let part = reversed_args.pop().expect("part number"); let input_file = reversed_args.pop().expect("input file"); let content = fs::read_to_string(input_file).unwrap(); let input_data: Vec<RebootStep> = content .trim_end() .split('\n') .map(|x| { let (direction, all_coords) = x.split_once(' ').unwrap(); let switch_on = match direction { "on" => true, "off" => false, _ => unreachable!("{}", direction), }; let (x_range, y_range, z_range) = { let (x_comp, (y_comp, z_comp)) = all_coords .split_once(',') .map(|(left, right)| (left, right.split_once(',').unwrap())) .unwrap(); ( parse_range(x_comp.split_once('=').unwrap().1), parse_range(y_comp.split_once('=').unwrap().1), parse_range(z_comp.split_once('=').unwrap().1), ) }; RebootStep { switch_on, range: [x_range, y_range, z_range], } }) .collect(); match part { "1" => { let result = solve_part1(&input_data); println!("{}", result); } "2" => { let result = solve_part2(&input_data); println!("{}", result); } "minify" => { minify(&input_data); } _ => unreachable!("{}", part), } } #[derive(Clone, Debug)] struct RebootStep { switch_on: bool, range: [(i64, i64); 3], } impl Display for RebootStep { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let state = if self.switch_on { "on" } else { "off" }; write!( f, "{} x={}..{},y={}..{},z={}..{}", state, self.range[0].0, self.range[0].1, self.range[1].0, self.range[1].1, self.range[2].0, self.range[2].1, ) } } struct RebootStepPrinter<'a>(&'a [RebootStep]); impl<'a> Display for RebootStepPrinter<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for step in self.0.iter() { writeln!(f, "{}", step)?; } Ok(()) } } fn minify(data: &[RebootStep]) { let part1 = solve_part1(data); let part2 = solve_part2(data); assert_ne!(part1, part2); let mut minified = data; loop { let next_minified = &minified[1..]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated prefix: {}", minified.len()); } else { break; } } loop { let next_minified = &minified[..(minified.len() - 1)]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated suffix: {}", minified.len()); } else { break; } } println!("{}", RebootStepPrinter(minified)); } fn to_range(range: (i64, i64)) -> RangeInclusive<i64> { range.0..=range.1 } fn solve_part1(data: &[RebootStep]) -> usize { let target_range = -50i64..=50; target_range .clone() .cartesian_product(target_range.clone()) .cartesian_product(target_range) .filter(|((x, y), z)| { for step in data.iter().rev() { let (x_range, y_range, z_range) = step .range .iter() .copied() .map(to_range) .collect_tuple() .unwrap(); if x_range.contains(x) && y_range.contains(y) && z_range.contains(z) { return step.switch_on; } } false }) .count() } fn solve_part2(data: &[RebootStep]) -> usize { let notable_coordinates: Vec<BTreeSet<i64>> = (0..3).map(|idx| { data .iter() .flat_map(|step| [step.range[idx].0, step.range[idx].1 + 1]) .collect() }).collect_vec(); let splits = notable_coordinates.iter().map(|coords| coords.iter().copied().collect_vec()).collect_vec(); let offsets: Vec<BTreeMap<i64, usize>> = splits.iter().map(|axis_splits| axis_splits.iter().enumerate().map(|(idx, val)| (*val, idx)) .collect()).collect(); let mut cell_is_on = vec![ vec![vec![false; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; let mut cell_volumes = vec![ vec![vec![0usize; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; for (x_idx, (x_start, x_end)) in splits[0].iter().tuple_windows().enumerate() { for (y_idx, (y_start, y_end)) in splits[1].iter().tuple_windows().enumerate() { for (z_idx, (z_start, z_end)) in splits[2].iter().tuple_windows().enumerate() { let x_width = (*x_end - *x_start) as usize; let y_width = (*y_end - *y_start) as usize; let z_width = (*z_end - *z_start) as usize; cell_volumes[x_idx + 1][y_idx + 1][z_idx + 1] = x_width * y_width * z_width; } } } for step in data { let ( (x_start_cell, x_end_cell), (y_start_cell, y_end_cell), (z_start_cell, z_end_cell), ) = step.range.iter().zip(offsets.iter()).map(|((start, end_incl), offset)| { let end = end_incl + 1; (offset[start], offset[&end]) }).collect_tuple().unwrap(); #[allow(clippy::needless_range_loop)] for x in x_start_cell..x_end_cell { for y in y_start_cell..y_end_cell { for z in z_start_cell..z_end_cell { cell_is_on[x][y][z] = step.switch_on; } } } } let cell_volumes_ref = &cell_volumes; cell_is_on .iter() .enumerate() .flat_map(move |(x_idx, y)| { y.iter().enumerate().map::<usize, _>(move |(y_idx, z)| { z.iter() .enumerate() .filter_map(|(z_idx, is_on)| { if *is_on { Some(cell_volumes_ref[x_idx + 1][y_idx + 1][z_idx + 1]) } else { None } }) .sum() }) }) .sum() }
#![feature(map_first_last)] use std::{ collections::{BTreeMap, BTreeSet}, env, fs, ops::RangeInclusive, fmt::Display, }; #[allow(unused_imports)] use itertools::Itertools; fn parse_range(range: &str) -> (i64, i64) { let (low, high) = range.split_once("..").unwrap(); (low.parse().unwrap(), high.parse().unwrap()) } fn main() { let args: Vec<String> = env::args().collect(); let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect(); reversed_args .pop() .expect("Expected the executable name to be the first argument, but was missing"); let part = reversed_args.pop().expect("part number"); let input_file = reversed_args.pop().expect("input file"); let content = fs::read_to_string(input_file).unwrap(); let input_data: Vec<RebootStep> = content .trim_end() .split('\n') .map(|x| { let (direction, all_coords) = x.split_once(' ').unwrap(); let switch_on = match direction { "on" => true, "off" => false, _ => unreachable!("{}", direction), }; let (x_range, y_range, z_range) = { let (x_comp, (y_comp, z_comp)) = all_coords .split_once(',') .map(|(left, right)| (left, right.split_once(',').unwrap())) .unwrap(); ( parse_range(x_comp.split_once('=').unwrap().1), parse_range(y_comp.split_once('=').unwrap().1), parse_range(z_comp.split_once('=').unwrap().1), ) }; RebootStep { switch_on, range: [x_range, y_range, z_range], } }) .collect(); match part { "1" => { let result = solve_part1(&input_data); println!("{}", result); } "2" => { let result = solve_part2(&input_data); println!("{}", result); } "minify" => { minify(&input_data); } _ => unreachable!("{}", part), } } #[derive(Clone, Debug)] struct RebootStep { switch_on: bool, range: [(i64, i64); 3], } impl Display for RebootStep { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let state = if self.switch_on { "on" } else { "off" }; write!( f, "{} x={}..{},y={}..{},z={}..{}", state, self.range[0].0, self.range[0].1, self.range[1].0, self.range[1].1, self.range[2].0, self.range[2].1, ) } } struct RebootStepPrinter<'a>(&'a [RebootStep]); impl<'a> Display for RebootStepPrinter<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for step in self.0.iter() { writeln!(f, "{}", step)?; } Ok(()) } } fn minify(data: &[RebootStep]) { let part1 = solve_part1(data); let part2 = solve_part2(data); assert_ne!(part1, part2); let mut minified = data; loop { let next_minified = &minified[1..]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated prefix: {}", minified.len()); } else { break; } } loop { let next_minified = &minified[..(minified.len() - 1)]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated suffix: {}", minified.len()); } else { break; } } println!("{}", RebootStepPrinter(minified)); } fn to_range(range: (i64, i64)) -> RangeInclusive<i64> { range.0..=range.1 } fn solve_part1(data: &[RebootStep]) -> usize { let target_range = -50i64..=50; target_range .clone() .cartesian_product(target_range.clone()) .cartesian_product(target_range) .filter(|((x, y), z)| { for step in data.iter().rev() { let (x_range, y_range, z_range) = step .range .iter() .copied() .map(to_range) .collect_tuple() .unwrap(); if x_range.contains(x) && y_range.contains(y) && z_range.contains(z) { return step.switch_on; } } false }) .count() } fn solve_part2(data: &[RebootStep]) -> usize { let notable_coordinates: Vec<BTreeSet<i64>> = (0..3).map(|idx| { data .iter() .flat_map(|step| [step.range[idx].0, step.range[idx].1 + 1]) .collect() }).collect_vec(); let splits = notable_coordinates.iter().map(|coords| coords.iter().copied().collect_vec()).collect_vec(); let offsets: Vec<BTreeMap<i64, usize>> = splits.iter().map(|axis_splits| axis_splits.iter().enumerate().map(|(idx, val)| (*val, idx)) .collect()).collect(); let mut cell_is_on = vec![ vec![vec![false; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; let mut cell_volumes = vec![ vec![vec![0usize; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; for (x_idx, (x_start, x_end)) in splits[0].iter().tuple_windows().enumerate() { for (y_idx, (y_start, y_end)) in splits[1].iter().tuple_windows().enumerate() { for (z_idx, (z_start, z_end)) in splits[2].iter().tuple_windows().enumerate() { let x_width = (*x_end - *x_start) as usize; let y_width = (*y_end - *y_start) as usize; let z_width = (*z_end - *z_start) as usize; cell_volumes[x_idx + 1][y_idx + 1][z_idx + 1] = x_width * y_width * z_width; } } } for step in data { let ( (x_start_cell, x_end_cell), (y_start_cell, y_end_cell), (z_start_cell, z_end_cell), ) = step.range.iter().zip(offsets.iter()).map(|((start, end_incl), offset)| { let end = end_incl + 1; (offset[start], offset[&end]) }).collect_tuple().unwrap(); #[allow(clippy::needless_range_loop)] for x in x_start_cell..x_end_cell { for y in y_start_cell..y_end_cell { for z in z_start_cell..z_end_cell { cell_is_on[x][y][z] = step.switch_on; } } } } let cell_volumes_ref = &cell_volumes; cell_is_on .iter() .enumerate() .flat_map(move |(x_idx, y)| { y.iter().enumerate().map::<usize, _>(mov
e |(y_idx, z)| { z.iter() .enumerate() .filter_map(|(z_idx, is_on)| { if *is_on { Some(cell_volumes_ref[x_idx + 1][y_idx + 1][z_idx + 1]) } else { None } }) .sum() }) }) .sum() }
function_block-function_prefixed
[ { "content": "fn neighbors(data: &[Vec<i64>], x: i64, y: i64) -> impl Iterator<Item = (i64, i64)> + '_ {\n\n assert!(get_height(data, x, y).is_some());\n\n\n\n NEIGHBOR_OFFSETS\n\n .iter()\n\n .copied()\n\n .filter_map(move |(dx, dy)| {\n\n let new_x = x + dx;\n\n ...
Rust
src/upstream.rs
ZigzagAK/ws-platform
f00e25dd8bd08d5269cfbd689e18f707c9f8963f
/* * Copyright (C) 2020 Aleksei Konovkin (alkon2000@mail.ru) */ use std::net::SocketAddr; use std::sync::{ Arc, RwLock, atomic::{ AtomicUsize, Ordering } }; use std::collections::{ HashMap, hash_map::Iter }; use std::time::Duration; use std::cmp::min; use crate::connection_pool::*; use crate::error::CoreError; pub trait UpstreamBalance: Send + Sync { fn balance(&self, iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr>; } pub struct RoundRobin { index: AtomicUsize } impl RoundRobin { pub fn new() -> RoundRobin { RoundRobin { index: AtomicUsize::new(0) } } } impl UpstreamBalance for RoundRobin { fn balance(&self, mut iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr> { match iter.nth(self.index.fetch_add(1, Ordering::SeqCst) % iter.len()) { Some((addr, _)) => Some(*addr), None => unreachable!() } } } pub struct Upstream { name: String, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64>, active: Arc<usize>, servers: RwLock<[HashMap<SocketAddr, ConnectionPool>; 2]>, balancer: Box<dyn UpstreamBalance> } impl Upstream { pub fn new( balancer: Box<dyn UpstreamBalance>, name: &str, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64> ) -> Upstream { Upstream { max_keepalive: max_keepalive, max_active: max_active, timeout: timeout, keepalive_timeout: keepalive_timeout, keepalive_requests: keepalive_requests, name: name.to_string(), servers: RwLock::new([HashMap::new(), HashMap::new()]), active: Arc::new(0), balancer: balancer } } pub fn add_primary(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[0] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn add_backup(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[1] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn connect(&self, timeout: Option<Duration>) -> Result<Peer, CoreError> { let userdata = Box::new(Arc::clone(&self.active)); if self.active() == self.max_active { return throw!("Bad gateway"); } let servers = self.servers.read().unwrap(); for i in 0..1 { for _ in 0..servers[i].len() { match self.balancer.balance(servers[i].iter()) { Some(addr) => { match servers[i].get(&addr) { Some(pool) => { if let Ok(mut peer) = pool.connect(&addr, timeout) { peer.attach_userdata(userdata); return Ok(peer); } }, None => { log_error!("error", "Can't find '{}' in upstream '{}'", addr, self.name); break; } } }, None => break } } } throw!("Bad gateway") } pub fn active(&self) -> usize { min(self.max_active, Arc::strong_count(&self.active) - 1) } pub fn idle(&self) -> usize { let servers = self.servers.read().unwrap(); let mut count = 0; for i in 0..1 { for server in servers[i].values() { count += server.idle() } } count } }
/* * Copyright (C) 2020 Aleksei Konovkin (alkon2000@mail.ru) */ use std::net::SocketAddr; use std::sync::{ Arc, RwLock, atomic::{ AtomicUsize, Ordering } }; use std::collections::{ HashMap, hash_map::Iter }; use std::time::Duration; use std::cmp::min; use crate::connection_pool::*; use crate::error::CoreError; pub trait UpstreamBalance: Send + Sync { fn balance(&self, iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr>; } pub struct RoundRobin { index: AtomicUsize } impl RoundRobin { pub fn new() -> RoundRobin { RoundRobin { index: AtomicUsize::new(0) } } } impl UpstreamBalance for RoundRobin { fn balance(&self, mut iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr> { match iter.nth(self.index.fetch_add(1, Ordering::SeqCst) % iter.len()) { Some((addr, _)) => Some(*addr), None => unreachable!() } } } pub struct Upstream { name: String, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64>, active: Arc<usize>, servers: RwLock<[HashMap<SocketAddr, ConnectionPool>; 2]>, balancer: Box<dyn UpstreamBalance> } impl Upstream { pub fn new( balancer: Box<dyn UpstreamBalance>, name: &str, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duratio
g(), servers: RwLock::new([HashMap::new(), HashMap::new()]), active: Arc::new(0), balancer: balancer } } pub fn add_primary(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[0] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn add_backup(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[1] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn connect(&self, timeout: Option<Duration>) -> Result<Peer, CoreError> { let userdata = Box::new(Arc::clone(&self.active)); if self.active() == self.max_active { return throw!("Bad gateway"); } let servers = self.servers.read().unwrap(); for i in 0..1 { for _ in 0..servers[i].len() { match self.balancer.balance(servers[i].iter()) { Some(addr) => { match servers[i].get(&addr) { Some(pool) => { if let Ok(mut peer) = pool.connect(&addr, timeout) { peer.attach_userdata(userdata); return Ok(peer); } }, None => { log_error!("error", "Can't find '{}' in upstream '{}'", addr, self.name); break; } } }, None => break } } } throw!("Bad gateway") } pub fn active(&self) -> usize { min(self.max_active, Arc::strong_count(&self.active) - 1) } pub fn idle(&self) -> usize { let servers = self.servers.read().unwrap(); let mut count = 0; for i in 0..1 { for server in servers[i].values() { count += server.idle() } } count } }
n>, keepalive_requests: Option<u64> ) -> Upstream { Upstream { max_keepalive: max_keepalive, max_active: max_active, timeout: timeout, keepalive_timeout: keepalive_timeout, keepalive_requests: keepalive_requests, name: name.to_strin
random
[]
Rust
sui_core/src/transaction_input_checker.rs
velvia/sui
976b6bbc2573b375776995a6d3239cba5b73c26e
use std::collections::HashSet; use prometheus_exporter::prometheus::IntCounter; use serde::{Deserialize, Serialize}; use sui_types::{ base_types::{ObjectID, ObjectRef, SequenceNumber, SuiAddress}, error::{SuiError, SuiResult}, fp_ensure, gas::{self, SuiGasStatus}, messages::{InputObjectKind, SingleTransactionKind, TransactionData, TransactionEnvelope}, object::{Object, Owner}, }; use tracing::{debug, instrument}; use crate::authority::SuiDataStore; #[instrument(level = "trace", skip_all)] pub async fn check_transaction_input<const A: bool, S, T>( store: &SuiDataStore<A, S>, transaction: &TransactionEnvelope<T>, shared_obj_metric: &IntCounter, ) -> Result<(SuiGasStatus<'static>, Vec<(InputObjectKind, Object)>), SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let (gas_object, mut gas_status) = check_gas( store, transaction.gas_payment_object_ref().0, transaction.data.gas_budget, ) .await?; let objects_by_kind = check_locks(store, &transaction.data, gas_object).await?; if transaction.contains_shared_object() { shared_obj_metric.inc(); gas_status.charge_consensus()?; } Ok((gas_status, objects_by_kind)) } #[instrument(level = "trace", skip_all)] async fn check_gas<const A: bool, S>( store: &SuiDataStore<A, S>, gas_payment_id: ObjectID, gas_budget: u64, ) -> SuiResult<(Object, SuiGasStatus<'static>)> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let gas_object = store.get_object(&gas_payment_id)?; let gas_object = gas_object.ok_or(SuiError::ObjectNotFound { object_id: gas_payment_id, })?; gas::check_gas_balance(&gas_object, gas_budget)?; let gas_status = gas::start_gas_metering(gas_budget, 1, 1)?; Ok((gas_object, gas_status)) } #[instrument(level = "trace", skip_all, fields(num_objects = input_objects.len()))] async fn fetch_objects<const A: bool, S>( store: &SuiDataStore<A, S>, input_objects: &[InputObjectKind], gas_object_opt: Option<Object>, ) -> Result<Vec<Option<Object>>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let ids: Vec<_> = input_objects.iter().map(|kind| kind.object_id()).collect(); if let Some(gas_object) = gas_object_opt { debug_assert_eq!(gas_object.id(), ids[ids.len() - 1]); let mut result = store.get_objects(&ids[..ids.len() - 1])?; result.push(Some(gas_object)); Ok(result) } else { store.get_objects(&ids[..]) } } #[instrument(level = "trace", skip_all)] async fn check_locks<const A: bool, S>( store: &SuiDataStore<A, S>, transaction: &TransactionData, gas_object: Object, ) -> Result<Vec<(InputObjectKind, Object)>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let input_objects = transaction.input_objects()?; let objects = fetch_objects(store, &input_objects, Some(gas_object)).await?; let mut owned_object_authenticators: HashSet<SuiAddress> = HashSet::new(); for object in objects.iter().flatten() { if !object.is_immutable() { fp_ensure!( owned_object_authenticators.insert(object.id().into()), SuiError::InvalidBatchTransaction { error: format!("Mutable object {} cannot appear in more than one single transactions in a batch", object.id()), } ); } } let mut all_objects = Vec::with_capacity(input_objects.len()); let mut errors = Vec::new(); let transfer_object_ids: HashSet<_> = transaction .kind .single_transactions() .filter_map(|s| { if let SingleTransactionKind::TransferCoin(t) = s { Some(t.object_ref.0) } else { None } }) .collect(); for (object_kind, object) in input_objects.into_iter().zip(objects) { let object = match object { Some(object) => object, None => { errors.push(object_kind.object_not_found_error()); continue; } }; if transfer_object_ids.contains(&object.id()) { object.is_transfer_eligible()?; } match check_one_lock( &transaction.signer(), object_kind, &object, &owned_object_authenticators, ) { Ok(()) => all_objects.push((object_kind, object)), Err(e) => { errors.push(e); } } } if !errors.is_empty() { return Err(SuiError::LockErrors { errors }); } fp_ensure!(!all_objects.is_empty(), SuiError::ObjectInputArityViolation); Ok(all_objects) } pub fn filter_owned_objects(all_objects: &[(InputObjectKind, Object)]) -> Vec<ObjectRef> { let owned_objects: Vec<_> = all_objects .iter() .filter_map(|(object_kind, object)| match object_kind { InputObjectKind::MovePackage(_) => None, InputObjectKind::ImmOrOwnedMoveObject(object_ref) => { if object.is_immutable() { None } else { Some(*object_ref) } } InputObjectKind::SharedMoveObject(_) => None, }) .collect(); debug!( num_mutable_objects = owned_objects.len(), "Checked locks and found mutable objects" ); owned_objects } fn check_one_lock( sender: &SuiAddress, object_kind: InputObjectKind, object: &Object, owned_object_authenticators: &HashSet<SuiAddress>, ) -> SuiResult { match object_kind { InputObjectKind::MovePackage(package_id) => { fp_ensure!( object.data.try_as_package().is_some(), SuiError::MoveObjectAsPackage { object_id: package_id } ); } InputObjectKind::ImmOrOwnedMoveObject((object_id, sequence_number, object_digest)) => { fp_ensure!( !object.is_package(), SuiError::MovePackageAsObject { object_id } ); fp_ensure!( sequence_number <= SequenceNumber::MAX, SuiError::InvalidSequenceNumber ); fp_ensure!( object.version() == sequence_number, SuiError::UnexpectedSequenceNumber { object_id, expected_sequence: object.version(), given_sequence: sequence_number, } ); let expected_digest = object.digest(); fp_ensure!( expected_digest == object_digest, SuiError::InvalidObjectDigest { object_id, expected_digest } ); match object.owner { Owner::Immutable => { } Owner::AddressOwner(owner) => { fp_ensure!( sender == &owner, SuiError::IncorrectSigner { error: format!("Object {:?} is owned by account address {:?}, but signer address is {:?}", object_id, owner, sender), } ); } Owner::ObjectOwner(owner) => { fp_ensure!( owned_object_authenticators.contains(&owner), SuiError::IncorrectSigner { error: format!( "Object {:?} is owned by object {:?}, which is not in the input", object.id(), owner ), } ); } Owner::Shared => { return Err(SuiError::NotSharedObjectError); } }; } InputObjectKind::SharedMoveObject(..) => { fp_ensure!(object.is_shared(), SuiError::NotSharedObjectError); } }; Ok(()) }
use std::collections::HashSet; use prometheus_exporter::prometheus::IntCounter; use serde::{Deserialize, Serialize}; use sui_types::{ base_types::{ObjectID, ObjectRef, SequenceNumber, SuiAddress}, error::{SuiError, SuiResult}, fp_ensure, gas::{self, SuiGasStatus}, messages::{InputObjectKind, SingleTransactionKind, TransactionData, TransactionEnvelope}, object::{Object, Owner}, }; use tracing::{debug, instrument}; use crate::authority::SuiDataStore; #[instrument(level = "trace", skip_all)] pub async fn check_transaction_input<const A: bool, S, T>( store: &SuiDataStore<A, S>, transaction: &TransactionEnvelope<T>, shared_obj_metric: &IntCounter, ) -> Result<(SuiGasStatus<'static>, Vec<(InputObjectKind, Object)>), SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let (gas_object, mut gas_status) = check_gas( store, transaction.gas_payment_object_ref().0, transaction.data.gas_budget, ) .await?; let objects_by_kind = check_locks(store, &transaction.data, gas_object).await?; if transaction.contains_shared_object() { shared_obj_metric.inc(); gas_status.charge_consensus()?; } Ok((gas_status, objects_by_kind)) } #[instrument(level = "trace", skip_all)] async fn check_gas<const A: bool, S>( store: &SuiDataStore<A, S>, gas_payment_id: ObjectID, gas_budget: u64, ) -> SuiResult<(Object, SuiGasStatus<'static>)> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let gas_object = store.get_object(&gas_payment_id)?; let gas_object = gas_object.ok_or(SuiError::ObjectNotFound { object_id: gas_payment_id, })?; gas::check_gas_balance(&gas_object, gas_budget)?; let gas_status = gas::start_gas_metering(gas_budget, 1, 1)?; Ok((gas_object, gas_status)) } #[instrument(level = "trace", skip_all, fields(num_objects = input_objects.len()))] async fn fetch_objects<const A: bool, S>( store: &SuiDataStore<A, S>, input_objects: &[InputObjectKind], gas_object_opt: Option<Object>, ) -> Result<Vec<Option<Object>>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let ids: Vec<_> = input_objects.iter().map(|kind| kind.object_id()).collect(); if let Some(gas_object) = gas_object_opt { debug_assert_eq!(gas_object.id(), ids[ids.len() - 1]); let mut result = store.get_objects(&ids[..ids.len() - 1])?; result.push(Some(gas_object)); Ok(result) } else { store.get_objects(&ids[..]) } } #[instrument(level = "trace", skip_all)] async fn check_locks<const A: bool, S>( store: &SuiDataStore<A, S>, transaction: &TransactionData, gas_object: Object, ) -> Result<Vec<(InputObjectKind, Object)>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let input_objects = transaction.input_objects()?; let objects = fetch_objects(store, &input_objects, Some(gas_object)).await?; let mut owned_object_authenticators: HashSet<SuiAddress> = HashSet::new(); for object in objects.iter().flatten() { if !object.is_immutable() { fp_ensure!( owned_object_authenticators.insert(object.id().into()), SuiError::InvalidBatchTransaction { error: format!("Mutable object {} cannot appear in more than one single transact
errors.push(object_kind.object_not_found_error()); continue; } }; if transfer_object_ids.contains(&object.id()) { object.is_transfer_eligible()?; } match check_one_lock( &transaction.signer(), object_kind, &object, &owned_object_authenticators, ) { Ok(()) => all_objects.push((object_kind, object)), Err(e) => { errors.push(e); } } } if !errors.is_empty() { return Err(SuiError::LockErrors { errors }); } fp_ensure!(!all_objects.is_empty(), SuiError::ObjectInputArityViolation); Ok(all_objects) } pub fn filter_owned_objects(all_objects: &[(InputObjectKind, Object)]) -> Vec<ObjectRef> { let owned_objects: Vec<_> = all_objects .iter() .filter_map(|(object_kind, object)| match object_kind { InputObjectKind::MovePackage(_) => None, InputObjectKind::ImmOrOwnedMoveObject(object_ref) => { if object.is_immutable() { None } else { Some(*object_ref) } } InputObjectKind::SharedMoveObject(_) => None, }) .collect(); debug!( num_mutable_objects = owned_objects.len(), "Checked locks and found mutable objects" ); owned_objects } fn check_one_lock( sender: &SuiAddress, object_kind: InputObjectKind, object: &Object, owned_object_authenticators: &HashSet<SuiAddress>, ) -> SuiResult { match object_kind { InputObjectKind::MovePackage(package_id) => { fp_ensure!( object.data.try_as_package().is_some(), SuiError::MoveObjectAsPackage { object_id: package_id } ); } InputObjectKind::ImmOrOwnedMoveObject((object_id, sequence_number, object_digest)) => { fp_ensure!( !object.is_package(), SuiError::MovePackageAsObject { object_id } ); fp_ensure!( sequence_number <= SequenceNumber::MAX, SuiError::InvalidSequenceNumber ); fp_ensure!( object.version() == sequence_number, SuiError::UnexpectedSequenceNumber { object_id, expected_sequence: object.version(), given_sequence: sequence_number, } ); let expected_digest = object.digest(); fp_ensure!( expected_digest == object_digest, SuiError::InvalidObjectDigest { object_id, expected_digest } ); match object.owner { Owner::Immutable => { } Owner::AddressOwner(owner) => { fp_ensure!( sender == &owner, SuiError::IncorrectSigner { error: format!("Object {:?} is owned by account address {:?}, but signer address is {:?}", object_id, owner, sender), } ); } Owner::ObjectOwner(owner) => { fp_ensure!( owned_object_authenticators.contains(&owner), SuiError::IncorrectSigner { error: format!( "Object {:?} is owned by object {:?}, which is not in the input", object.id(), owner ), } ); } Owner::Shared => { return Err(SuiError::NotSharedObjectError); } }; } InputObjectKind::SharedMoveObject(..) => { fp_ensure!(object.is_shared(), SuiError::NotSharedObjectError); } }; Ok(()) }
ions in a batch", object.id()), } ); } } let mut all_objects = Vec::with_capacity(input_objects.len()); let mut errors = Vec::new(); let transfer_object_ids: HashSet<_> = transaction .kind .single_transactions() .filter_map(|s| { if let SingleTransactionKind::TransferCoin(t) = s { Some(t.object_ref.0) } else { None } }) .collect(); for (object_kind, object) in input_objects.into_iter().zip(objects) { let object = match object { Some(object) => object, None => {
random
[ { "content": "/// Creates an object for use in the microbench\n\nfn create_gas_object(object_id: ObjectID, owner: SuiAddress) -> Object {\n\n Object::with_id_owner_gas_coin_object_for_testing(\n\n object_id,\n\n SequenceNumber::new(),\n\n owner,\n\n GAS_PER_TX,\n\n )\n\n}\n\n\n...
Rust
src/app/core/m3u8/m3u8.rs
PeterDing/aget-rs
ebf43a1a2ab9ae88efd66d2f298643c119021372
use std::{cell::Cell, path::PathBuf, rc::Rc, time::Duration}; use futures::{ channel::mpsc::{channel, Sender}, select, stream::StreamExt, SinkExt, }; use actix_rt::{ spawn, time::{delay_for, interval}, System, }; use crate::{ app::{ core::m3u8::common::{get_m3u8, M3u8Segment, SharedM3u8SegmentList}, receive::m3u8_receiver::M3u8Receiver, record::{bytearray_recorder::ByteArrayRecorder, common::RECORDER_FILE_SUFFIX}, }, common::{ bytes::bytes_type::{Buf, Bytes}, crypto::decrypt_aes128, errors::{Error, Result}, net::{ net::{build_http_client, request}, ConnectorConfig, HttpClient, Method, Uri, }, }, features::{args::Args, running::Runnable, stack::StackLike}, }; pub struct M3u8Handler { output: PathBuf, method: Method, uri: Uri, headers: Vec<(String, String)>, data: Option<Bytes>, connector_config: ConnectorConfig, concurrency: u64, proxy: Option<String>, client: HttpClient, } impl M3u8Handler { pub fn new(args: &impl Args) -> Result<M3u8Handler> { let headers = args.headers(); let timeout = args.timeout(); let dns_timeout = args.dns_timeout(); let keep_alive = args.keep_alive(); let lifetime = args.lifetime(); let connector_config = ConnectorConfig { timeout, dns_timeout, keep_alive, lifetime, disable_redirects: true, }; let hds: Vec<(&str, &str)> = headers .iter() .map(|(k, v)| (k.as_str(), v.as_str())) .collect(); let client = build_http_client( hds.as_ref(), timeout, dns_timeout, keep_alive, lifetime, true, ); debug!("M3u8Handler::new"); Ok(M3u8Handler { output: args.output(), method: args.method(), uri: args.uri(), headers, data: args.data().map(|ref mut d| d.to_bytes()), connector_config, concurrency: args.concurrency(), proxy: None, client, }) } async fn start(self) -> Result<()> { debug!("M3u8Handler::start"); debug!("M3u8Handler: check whether task is completed"); let mut bytearrayrecorder = ByteArrayRecorder::new(&*(self.output.to_string_lossy() + RECORDER_FILE_SUFFIX))?; if self.output.exists() && !bytearrayrecorder.exists() { return Ok(()); } debug!("M3u8Handler: get m3u8"); let mut ls = get_m3u8( &self.client, self.method.clone(), self.uri.clone(), self.data.clone(), ) .await?; ls.reverse(); if bytearrayrecorder.exists() { bytearrayrecorder.open()?; let total = bytearrayrecorder.index(0)?; if total != ls.len() as u64 { return Err(Error::PartsAreNotConsistent); } else { let index = bytearrayrecorder.index(1)?; ls.truncate((total - index) as usize); } } else { bytearrayrecorder.open()?; bytearrayrecorder.write(0, ls.len() as u64)?; } let index = ls.last().unwrap().index; let sharedindex = Rc::new(Cell::new(index)); let stack = SharedM3u8SegmentList::new(ls); debug!("M3u8Handler: segments", stack.len()); let (sender, receiver) = channel::<(u64, Bytes)>(self.concurrency as usize + 10); let concurrency = std::cmp::min(stack.len() as u64, self.concurrency); for i in 1..concurrency + 1 { let mut task = RequestTask::new( self.client.clone(), stack.clone(), sender.clone(), i, sharedindex.clone(), self.connector_config.timeout, ); spawn(async move { task.start().await; }); } drop(sender); debug!("M3u8Handler: create receiver"); let mut m3u8receiver = M3u8Receiver::new(&self.output)?; m3u8receiver.start(receiver).await?; bytearrayrecorder.remove().unwrap_or(()); Ok(()) } } impl Runnable for M3u8Handler { fn run(self) -> Result<()> { let mut sys = System::new("M3u8Handler"); sys.block_on(self.start()) } } struct RequestTask { client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, shared_index: Rc<Cell<u64>>, timeout: Duration, } impl RequestTask { fn new( client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, sharedindex: Rc<Cell<u64>>, timeout: Duration, ) -> RequestTask { RequestTask { client, stack, sender, id, shared_index: sharedindex, timeout, } } async fn start(&mut self) { debug!("Fire RequestTask", self.id); while let Some(segment) = self.stack.pop() { loop { match self.req(segment.clone()).await { Err(Error::InnerError(msg)) => { print_err!(format!("RequestTask {}: InnerError", self.id), msg); System::current().stop(); } Err(err @ Error::Timeout) => { debug!(err); } Err(err) => { debug!(format!("RequestTask {}: error", self.id), err); delay_for(Duration::from_secs(1)).await; } _ => break, } } } } async fn req(&mut self, segment: M3u8Segment) -> Result<()> { let resp = request( &self.client, segment.method.clone(), segment.uri.clone(), segment.data.clone(), None, ) .await?; let index = segment.index; let mut buf: Vec<u8> = vec![]; let mut resp = resp.fuse(); let mut tick = interval(self.timeout).fuse(); let mut fire = false; loop { select! { item = resp.next() => { if let Some(item) = item { match item { Ok(chunk) => { buf.extend(chunk); } Err(err) => return Err(err.into()), } } else { break; } } _ = tick.next() => { if fire { return Err(Error::Timeout); } else { fire = true; } } } } let de = if let (Some(key), Some(iv)) = (segment.key, segment.iv) { decrypt_aes128(&key[..], &iv[..], buf.as_ref())? } else { buf.to_vec() }; loop { if self.shared_index.get() == index { if let Err(err) = self.sender.send((index, Bytes::from(de))).await { return Err(Error::InnerError(format!( "Error at `http::RequestTask`: Sender error: {:?}", err ))); } self.shared_index.set(index + 1); return Ok(()); } else { delay_for(Duration::from_millis(500)).await; } } } }
use std::{cell::Cell, path::PathBuf, rc::Rc, time::Duration}; use futures::{ channel::mpsc::{channel, Sender}, select, stream::StreamExt, SinkExt, }; use actix_rt::{ spawn, time::{delay_for, interval}, System, }; use crate::{ app::{ core::m3u8::common::{get_m3u8, M3u8Segment, SharedM3u8SegmentList}, receive::m3u8_receiver::M3u8Receiver, record::{bytearray_recorder::ByteArrayRecorder, common::RECORDER_FILE_SUFFIX}, }, common::{ bytes::bytes_type::{Buf, Bytes}, crypto::decrypt_aes128, errors::{Error, Result}, net::{ net::{build_http_client, request}, ConnectorConfig, HttpClient, Method, Uri, }, }, features::{args::Args, running::Runnable, stack::StackLike}, }; pub struct M3u8Handler { output: PathBuf, method: Method, uri: Uri, headers: Vec<(String, String)>, data: Option<Bytes>, connector_config: ConnectorConfig, concurrency: u64, proxy: Option<String>, client: HttpClient, } impl M3u8Handler { pub fn new(args: &impl Args) -> Result<M3u8Handler> { let headers = args.headers(); let timeout = args.timeout(); let dns_timeout = args.dns_timeout(); let keep_alive = args.keep_alive(); let lifetime = args.lifetime(); let connector_config = ConnectorConfig { timeout, dns_timeout, keep_alive, lifetime, disable_redirects: true, }; let hds: Vec<(&str, &str)> = headers .iter() .map(|(k, v)| (k.as_str(), v.as_str())) .collect(); let client = build_http_client( hds.as_ref(), timeout, dns_timeout, keep_alive, lifetime, true, ); debug!("M3u8Handler::new"); Ok(M3u8Handler { output: args.output(), method: args.method(), uri: args.uri(), headers, data: args.data().map(|ref mut d| d.to_bytes()), connector_config, concurrency: args.concurrency(), proxy: None, client, }) } async fn start(self) -> Result<()> { debug!("M3u8Handler::start"); debug!("M3u8Handler: check whether task is completed"); let mut bytearrayrecorder = ByteArrayRecorder::new(&*(self.output.to_string_lossy() + RECORDER_FILE_SUFFIX))?; if self.output.exists() && !bytearrayrecorder.exists() { return Ok(()); } debug!("M3u8Handler: get m3u8"); let mut ls = get_m3u8( &self.client, self.method.clone(), self.uri.clone(), self.data.clone(), ) .await?; ls.reverse(); if bytearrayrecorder.exists() { bytearrayrecorder.open()?; let total = bytearrayrecorder.index(0)?; if total != ls.len() as u64 { return Err(Error::PartsAreNotConsistent); } else { let index = bytearrayrecorder.index(1)?; ls.truncate((total - index) as usize); } } else { bytearrayrecorder.open()?; bytearrayrecorder.write(0, ls.len() as u64)?; } let index = ls.last().unwrap().index; let sharedindex = Rc::new(Cell::new(index)); let stack = SharedM3u8SegmentList::new(ls); debug!("M3u8Handler: segments", stack.len()); let (sender, receiver) = channel::<(u64, Bytes)>(self.concurrency as usize + 10); let concurrency = std::cmp::min(stack.len() as u64, self.concurrency); for i in 1..concurrency + 1 { let mut task = RequestTask::new( self.client.clone(), stack.clone(), sender.clone(), i, sharedindex.clone(), self.connector_config.timeout, ); spawn(async move { task.start().await; }); } drop(sender); debug!("M3u8Handler: create receiver"); let mut m3u8receiver = M3u8Receiver::new(&self.output)?; m3u8receiver.start(receiver).await?; bytearrayrecorder.remove().unwrap_or(()); Ok(()) } } impl Runnable for M3u8Handler { fn run(self) -> Result<()> { let mut sys = System::new("M3u8Handler"); sys.block_on(self.start()) } } struct RequestTask { client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, shared_index: Rc<Cell<u64>>, timeout: Duration, } impl RequestTask { fn new( client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, sharedindex: Rc<Cell<u64>>, timeout: Duration, ) -> RequestTask { RequestTask { client, stack, sender, id, shared_index: sharedindex, timeout, } } async fn start(&mut self) { debug!("Fire RequestTask", self.id); while let Some(segment) = self.stack.pop() { loop { match self.req(segment.clone()).await { Err(Error::InnerError(msg)) => { print_err!(format!("RequestTask {}: InnerError", self.id), msg); System::current().stop(); } Err(err @ Error::Timeout) => { debug!(err); } Err(err) => { debug!(format!("RequestTask {}: error", self.id), err); delay_for(Duration::from_secs(1)).await; } _ => break, } } } } async fn req(&mut self, segment: M3u8Segment) -> Result<()> { let resp =
.await?; let index = segment.index; let mut buf: Vec<u8> = vec![]; let mut resp = resp.fuse(); let mut tick = interval(self.timeout).fuse(); let mut fire = false; loop { select! { item = resp.next() => { if let Some(item) = item { match item { Ok(chunk) => { buf.extend(chunk); } Err(err) => return Err(err.into()), } } else { break; } } _ = tick.next() => { if fire { return Err(Error::Timeout); } else { fire = true; } } } } let de = if let (Some(key), Some(iv)) = (segment.key, segment.iv) { decrypt_aes128(&key[..], &iv[..], buf.as_ref())? } else { buf.to_vec() }; loop { if self.shared_index.get() == index { if let Err(err) = self.sender.send((index, Bytes::from(de))).await { return Err(Error::InnerError(format!( "Error at `http::RequestTask`: Sender error: {:?}", err ))); } self.shared_index.set(index + 1); return Ok(()); } else { delay_for(Duration::from_millis(500)).await; } } } }
request( &self.client, segment.method.clone(), segment.uri.clone(), segment.data.clone(), None, )
call_expression
[ { "content": "pub fn parse_header(raw: &str) -> Result<(&str, &str), Error> {\n\n if let Some(index) = raw.find(\": \") {\n\n return Ok((&raw[..index], &raw[index + 2..]));\n\n }\n\n if let Some(index) = raw.find(\":\") {\n\n return Ok((&raw[..index], &raw[index + 1..]));\n\n }\n\n ...
Rust
src/input_byte_stream.rs
sunfishcode/nameless
f5986c3f999db099c05214d6fe61bd04bf81bf93
use crate::open_input::{open_input, Input}; use crate::{MediaType, Pseudonym}; use clap::TryFromOsArg; use io_streams::StreamReader; use layered_io::{Bufferable, LayeredReader, ReadLayered, Status}; use std::ffi::OsStr; use std::fmt::{self, Debug, Formatter}; use std::io::{self, IoSliceMut, Read}; use terminal_io::NeverTerminalReader; pub struct InputByteStream { name: String, reader: LayeredReader<NeverTerminalReader<StreamReader>>, media_type: MediaType, initial_size: Option<u64>, } impl InputByteStream { #[inline] pub fn media_type(&self) -> &MediaType { &self.media_type } #[inline] pub fn initial_size(&self) -> Option<u64> { self.initial_size } #[inline] pub fn pseudonym(&self) -> Pseudonym { Pseudonym::new(self.name.clone()) } fn from_input(input: Input) -> Self { let reader = NeverTerminalReader::new(input.reader); let reader = LayeredReader::new(reader); Self { name: input.name, reader, media_type: input.media_type, initial_size: input.initial_size, } } } #[doc(hidden)] impl TryFromOsArg for InputByteStream { type Error = anyhow::Error; #[inline] fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> { open_input(os).map(Self::from_input) } } impl ReadLayered for InputByteStream { #[inline] fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> { self.reader.read_with_status(buf) } #[inline] fn read_vectored_with_status( &mut self, bufs: &mut [IoSliceMut<'_>], ) -> io::Result<(usize, Status)> { self.reader.read_vectored_with_status(bufs) } } impl Read for InputByteStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.reader.read(buf) } #[inline] fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> { self.reader.read_vectored(bufs) } #[cfg(can_vector)] #[inline] fn is_read_vectored(&self) -> bool { self.reader.is_read_vectored() } #[inline] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> { self.reader.read_to_end(buf) } #[inline] fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> { self.reader.read_to_string(buf) } #[inline] fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> { self.reader.read_exact(buf) } } impl Bufferable for InputByteStream { #[inline] fn abandon(&mut self) { self.reader.abandon() } } impl Debug for InputByteStream { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let mut b = f.debug_struct("InputByteStream"); b.field("media_type", &self.media_type); b.field("initial_size", &self.initial_size); b.finish() } } #[test] fn data_url_plain() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:,Hello%2C%20World!".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); } #[test] fn data_url_base64() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:text/plain;base64,SGVsbG8sIFdvcmxkIQ==".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
use crate::open_input::{open_input, Input}; use crate::{MediaType, Pseudonym}; use clap::TryFromOsArg; use io_streams::StreamReader; use layered_io::{Bufferable, LayeredReader, ReadLayered, Status}; use std::ffi::OsStr; use std::fmt::{self, Debug, Formatter}; use std::io::{self, IoSliceMut, Read}; use terminal_io::NeverTerminalReader; pub struct InputByteStream { name: String, reader: LayeredReader<NeverTerminalReader<StreamReader>>, media_type: MediaType, initial_size: Option<u64>, } impl InputByteStream { #[inline] pub fn media_type(&self) -> &MediaType { &self.media_type } #[inline] pub fn initial_size(&self) -> Option<u64> { self.initial_size } #[inline] pub fn pseudonym(&self) -> Pseudonym { Pseudonym::new(self.name.clone()) } fn from_input(input: Input) -> Self { let reader = NeverTerminalReader::new(input.reader); let reader = LayeredReader::new(reader); Self { name: input.name, reader, media_type: input.media_type, initial_size: input.initial_size, } } } #[doc(hidden)] impl TryFromOsArg for InputByteStream { type Error = anyhow::Error; #[inline] fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> { open_input(os).map(Self::from_input) } } impl ReadLayered for InputByteStream { #[inline] fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> { self.reader.read_with_status(buf) } #[inline] fn read_vectored_with_status( &mut self, bufs: &mut [IoSliceMut<'_>], ) -> io::Result<(usize, Status)> { self.reader.read_vectored_with_status(bufs) } } impl Read for InputByteStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.reader.read(buf) } #[inline] fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> { self.reader.read_vectored(bufs) } #[cfg(can_vector)] #[inline] fn is_read_vectored(&self) -> bool { self.reader.is_read_vectored() } #[inline] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> { self.reader.read_to_end(buf) } #[inline] fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> { self.reader.read_to_string(buf) } #[inline] fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> { self.reader.read_exact(buf) } } impl Bufferable for InputByteStream { #[inline] fn abandon(&mut self) { self.reader.abandon() } } impl Debug for InputByteStream { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let mut b = f.debug_struct("InputByteStream"); b.field("media_type", &self.media_type); b.field("initial_size", &self.initial_size); b.finish() } } #[test]
#[test] fn data_url_base64() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:text/plain;base64,SGVsbG8sIFdvcmxkIQ==".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
fn data_url_plain() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:,Hello%2C%20World!".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
function_block-full_function
[ { "content": "#[kommand::main]\n\nfn main(mut input: InputByteStream, mut output: OutputByteStream) -> anyhow::Result<()> {\n\n copy(&mut input, &mut output)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/copy.rs", "rank": 0, "score": 139863.88478210891 }, { "content": "#[kommand::ma...
Rust
plumber_vdf/src/parsers.rs
lasa01/plumber_core
056b42a7f0b29793407153cf8b52c6a0ca2e6076
use nom::{ branch::alt, bytes::complete::{escaped, is_a, is_not, tag, take_till, take_until}, character::complete::{anychar, char, multispace1, none_of, one_of, space0, space1}, combinator::{all_consuming, cut, eof, not, opt, peek, recognize, value}, error::{ErrorKind, ParseError}, sequence::{delimited, preceded, terminated}, Err, IResult, Parser, }; fn unit<I, O, E, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (), E> where F: Parser<I, O, E>, { move |input: I| parser.parse(input).map(|(i, _)| (i, ())) } fn ignore_many0<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind(input, ErrorKind::Many0))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } fn ignore_many1<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { match f.parse(input.clone()) { Err(Err::Error(_)) => Err(Err::Error(E::from_error_kind(input, ErrorKind::Many1))), Err(e) => Err(e), Ok((parsed_input, _)) => { input = parsed_input; loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind( input, ErrorKind::Many1, ))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } } } fn multiline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(tag(b"/*"), take_until(b"*/".as_ref()), tag(b"*/"))(i) } fn singleline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(tag(b"//"), take_till(|c| c == b'\r' || c == b'\n'))(i) } fn comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { alt((singleline_comment, multiline_comment))(i) } fn multispace_comment0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { ignore_many0(alt((multispace1, comment)))(i) } fn trash<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Option<&'a [u8]>, E> { opt(is_not(b"\r\n{}".as_ref()))(i) } fn space_comment_trash0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { delimited(space0, unit(opt(comment)), trash)(i) } fn quoted_token<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(char('"'), take_till(|c| c == b'"'), char('"'))(i) } fn escaped_quoted_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { alt(( delimited( char('"'), escaped(is_not(b"\"\\".as_ref()), '\\', one_of(b"nt\\\"".as_ref())), char('"'), ), value(b"".as_ref(), tag(b"\"\"")), ))(i) } fn unquoted_char_nonspace<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { alt(( none_of(b"{}\"\r\n/ \t".as_ref()), terminated(char('/'), not(char('/'))), ))(i) } fn unquoted_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(unquoted_char_nonspace))(i) } fn unquoted_value<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(alt(( unit(unquoted_char_nonspace), unit(terminated(space1, unquoted_char_nonspace)), ))))(i) } fn specific_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( key: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded( multispace_comment0, alt(( preceded(char('"'), cut(terminated(tag(key), char('"')))), tag(key), )), ) } pub(crate) fn any_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_key)))(i) } pub(crate) fn any_escaped_key<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_key)), )(i) } pub(crate) fn empty_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, tag(b"\"\"".as_ref()))(i) } pub(crate) fn any_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_value)))(i) } pub(crate) fn any_escaped_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_value)), )(i) } pub(crate) fn block_start<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(char('{')))(i) } pub(crate) fn block_end<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded( preceded(space_comment_trash0, multispace_comment0), unit(alt((eof, tag(b"}")))), )(i) } pub(crate) fn block_end_early<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(alt((eof, tag(b"}")))))(i) } pub(crate) fn block_sep<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { unit(preceded(space_comment_trash0, is_a(b"\r\n".as_ref())))(i) } pub(crate) fn peeked_char<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { preceded(multispace_comment0, peek(anychar))(i) } pub(crate) fn comment_eof<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { all_consuming(multispace_comment0)(i) } pub(crate) fn peeked_block_end<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { peek(block_end)(i) } pub(crate) fn block_sep_and_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( token: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded(block_sep, specific_token(token)) } #[cfg(test)] mod tests { use super::*; use nom::error::VerboseError; #[test] fn quoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t \"a quoted key\" value".as_ref()), IResult::Ok((b" value".as_ref(), b"a quoted key".as_ref())) ); } #[test] fn unquoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t $unquotedKey remaining".as_ref()), IResult::Ok((b" remaining".as_ref(), b"$unquotedKey".as_ref())) ); } #[test] fn quoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b" \"quoted value\"".as_ref()), IResult::Ok((b"".as_ref(), b"quoted value".as_ref())) ); } #[test] fn unquoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b"\tcsgo\\models\\stuff.mdl".as_ref()), IResult::Ok((b"".as_ref(), b"csgo\\models\\stuff.mdl".as_ref())) ); } #[test] fn unquoted_value_comment_terminated() { assert_eq!( any_value::<VerboseError<&[u8]>>( b" unquoted value with spaces/shit // and a comment too".as_ref() ), IResult::Ok(( b" // and a comment too".as_ref(), b"unquoted value with spaces/shit".as_ref() )) ); } #[test] fn comment_preceded_key() { assert_eq!( any_key::<VerboseError<&[u8]>>( b"\t//this is a comment\r\n\tNotComment A Value".as_ref() ), IResult::Ok((b" A Value".as_ref(), b"NotComment".as_ref())) ); } #[test] fn empty_comment() { assert_eq!( multispace_comment0::<VerboseError<&[u8]>>(b"\r\n\t//\r\n".as_ref()), IResult::Ok((b"".as_ref(), ())) ); } #[test] fn escaped() { assert_eq!( any_escaped_value::<VerboseError<&[u8]>>(b" \"escaped \\\" value\"".as_ref()), IResult::Ok((b"".as_ref(), b"escaped \\\" value".as_ref())) ); assert_eq!( any_escaped_key::<VerboseError<&[u8]>>(b"\"\"".as_ref()), IResult::Ok((b"".as_ref(), b"".as_ref())) ); } }
use nom::{ branch::alt, bytes::complete::{escaped, is_a, is_not, tag, take_till, take_until}, character::complete::{anychar, char, multispace1, none_of, one_of, space0, space1}, combinator::{all_consuming, cut, eof, not, opt, peek, recognize, value}, error::{ErrorKind, ParseError}, sequence::{delimited, preceded, terminated}, Err, IResult, Parser, }; fn unit<I, O, E, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (), E> where F: Parser<I, O, E>, { move |input: I| parser.parse(input).map(|(i, _)| (i, ())) } fn ignore_many0<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind(input, ErrorKind::Many0))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } fn ignore_many1<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { match f.parse(input.clone()) { Err(Err::Error(_)) => Err(Err::Error(E::from_error_kind(input, ErrorKind::Many1))), Err(e) => Err(e), Ok((parsed_input, _)) => { input = parsed_input; loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind( input, ErrorKind::Many1, ))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } } } fn multiline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(tag(b"/*"), take_until(b"*/".as_ref()), tag(b"*/"))(i) } fn singleline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(tag(b"//"), take_till(|c| c == b'\r' || c == b'\n'))(i) } fn comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { alt((singleline_comment, multiline_comment))(i) } fn multispace_comment0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { ignore_many0(alt((multispace1, comment)))(i) } fn trash<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Option<&'a [u8]>, E> { opt(is_not(b"\r\n{}".as_ref()))(i) } fn space_comment_trash0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { delimited(space0, unit(opt(comment)), trash)(i) } fn quoted_token<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(char('"'), take_till(|c| c == b'"'), char('"'))(i) } fn escaped_quoted_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { alt(( delimited( char('"'), escaped(is_not(b"\"\\".as_ref()), '\\', one_of(b"nt\\\"".as_ref())), char('"'), ), value(b"".as_ref(), tag(b"\"\"")), ))(i) } fn unquoted_char_nonspace<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { alt(( none_of(b"{}\"\r\n/ \t".as_ref()), terminated(char('/'), not(char('/'))), ))(i) } fn unquoted_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(unquoted_char_nonspace))(i) } fn unquoted_value<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(
)(i) } fn specific_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( key: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded( multispace_comment0, alt(( preceded(char('"'), cut(terminated(tag(key), char('"')))), tag(key), )), ) } pub(crate) fn any_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_key)))(i) } pub(crate) fn any_escaped_key<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_key)), )(i) } pub(crate) fn empty_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, tag(b"\"\"".as_ref()))(i) } pub(crate) fn any_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_value)))(i) } pub(crate) fn any_escaped_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_value)), )(i) } pub(crate) fn block_start<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(char('{')))(i) } pub(crate) fn block_end<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded( preceded(space_comment_trash0, multispace_comment0), unit(alt((eof, tag(b"}")))), )(i) } pub(crate) fn block_end_early<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(alt((eof, tag(b"}")))))(i) } pub(crate) fn block_sep<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { unit(preceded(space_comment_trash0, is_a(b"\r\n".as_ref())))(i) } pub(crate) fn peeked_char<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { preceded(multispace_comment0, peek(anychar))(i) } pub(crate) fn comment_eof<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { all_consuming(multispace_comment0)(i) } pub(crate) fn peeked_block_end<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { peek(block_end)(i) } pub(crate) fn block_sep_and_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( token: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded(block_sep, specific_token(token)) } #[cfg(test)] mod tests { use super::*; use nom::error::VerboseError; #[test] fn quoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t \"a quoted key\" value".as_ref()), IResult::Ok((b" value".as_ref(), b"a quoted key".as_ref())) ); } #[test] fn unquoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t $unquotedKey remaining".as_ref()), IResult::Ok((b" remaining".as_ref(), b"$unquotedKey".as_ref())) ); } #[test] fn quoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b" \"quoted value\"".as_ref()), IResult::Ok((b"".as_ref(), b"quoted value".as_ref())) ); } #[test] fn unquoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b"\tcsgo\\models\\stuff.mdl".as_ref()), IResult::Ok((b"".as_ref(), b"csgo\\models\\stuff.mdl".as_ref())) ); } #[test] fn unquoted_value_comment_terminated() { assert_eq!( any_value::<VerboseError<&[u8]>>( b" unquoted value with spaces/shit // and a comment too".as_ref() ), IResult::Ok(( b" // and a comment too".as_ref(), b"unquoted value with spaces/shit".as_ref() )) ); } #[test] fn comment_preceded_key() { assert_eq!( any_key::<VerboseError<&[u8]>>( b"\t//this is a comment\r\n\tNotComment A Value".as_ref() ), IResult::Ok((b" A Value".as_ref(), b"NotComment".as_ref())) ); } #[test] fn empty_comment() { assert_eq!( multispace_comment0::<VerboseError<&[u8]>>(b"\r\n\t//\r\n".as_ref()), IResult::Ok((b"".as_ref(), ())) ); } #[test] fn escaped() { assert_eq!( any_escaped_value::<VerboseError<&[u8]>>(b" \"escaped \\\" value\"".as_ref()), IResult::Ok((b"".as_ref(), b"escaped \\\" value".as_ref())) ); assert_eq!( any_escaped_key::<VerboseError<&[u8]>>(b"\"\"".as_ref()), IResult::Ok((b"".as_ref(), b"".as_ref())) ); } }
ignore_many1(alt(( unit(unquoted_char_nonspace), unit(terminated(space1, unquoted_char_nonspace)), )))
call_expression
[ { "content": "fn read_animation_value(bytes: &mut &[u8]) -> Result<AnimationValue> {\n\n let value_bytes = bytes\n\n .get(..2)\n\n .ok_or_else(|| corrupted(\"animation values out of bounds\"))?\n\n .try_into()\n\n .expect(\"slice must have correct length\");\n\n\n\n *bytes = &b...
Rust
src/bin/day22/swiss_box.rs
mbikovitsky/aoc2021
0d952a7184edefcb6ab1859608daf74ed3fd4a43
use std::ops::{Sub, SubAssign}; use itertools::Itertools; use num::{CheckedAdd, CheckedMul, CheckedSub, Integer}; use petgraph::{ graph::NodeIndex, stable_graph::StableDiGraph, visit::{depth_first_search, Control, Dfs, DfsEvent, Reversed}, EdgeDirection::{Incoming, Outgoing}, }; use crate::r#box::Box; #[derive(Debug, Clone)] pub struct SwissBox<T: Integer> { tree: StableDiGraph<Box<T>, ()>, } impl<T: Integer> SwissBox<T> { pub fn new(initial: Box<T>) -> Self { if initial.is_empty() { Default::default() } else { let mut tree = StableDiGraph::new(); tree.add_node(initial); Self { tree } } } pub fn is_empty(&self) -> bool { self.tree.node_count() == 0 } fn root(&self) -> Option<NodeIndex> { if self.is_empty() { return None; } Some( self.tree .externals(Incoming) .exactly_one() .map_err(|_| "More than one root in the tree") .unwrap(), ) } fn is_leaf(&self, node: NodeIndex) -> bool { self.tree .neighbors_directed(node, Outgoing) .next() .is_none() } } impl<T: Integer> Default for SwissBox<T> { fn default() -> Self { Self { tree: Default::default(), } } } impl<T: Integer> From<Box<T>> for SwissBox<T> { fn from(value: Box<T>) -> Self { Self::new(value) } } impl<T: Integer + Clone> Sub<Box<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: Box<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a Box<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a Box<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> Sub<SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: SwissBox<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a SwissBox<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> SubAssign<Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: Box<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a Box<T>) { if self.is_empty() { return; } let mut to_add = vec![]; let mut to_delete = vec![]; depth_first_search( &self.tree, Some(self.root().unwrap()), |event| -> Control<()> { if let DfsEvent::Discover(node, _) = event { let intersection = self.tree[node].intersect(rhs); if intersection.is_empty() { return Control::Prune; } if !self.is_leaf(node) { return Control::Continue; } if intersection == self.tree[node] { to_delete.push(node); return Control::Continue; } for slice in self.tree[node].subtract_split(rhs) { to_add.push((node, slice)); } } Control::Continue }, ); for (parent, slice) in to_add { let child = self.tree.add_node(slice); self.tree.add_edge(parent, child, ()); } for node in to_delete { let mut ancestors = vec![]; let reversed = Reversed(&self.tree); let mut dfs = Dfs::new(&reversed, node); while let Some(ancestor) = dfs.next(&reversed) { if self.tree.neighbors_directed(ancestor, Outgoing).count() <= 1 { ancestors.push(ancestor); } else { break; } } for ancestor in ancestors { self.tree.remove_node(ancestor); } } } } impl<T: Integer + Clone> SubAssign<SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: SwissBox<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a SwissBox<T>) { for node in rhs.tree.externals(Outgoing) { *self -= &rhs.tree[node]; } } } impl<T: Integer + CheckedAdd + CheckedSub + CheckedMul> SwissBox<T> { pub fn volume(&self) -> Option<T> { let mut volume: T = T::zero(); for node in self.tree.externals(Outgoing) { let r#box = &self.tree[node]; volume = volume.checked_add(&r#box.volume()?)?; } Some(volume) } } #[cfg(test)] mod tests { use super::{Box, SwissBox}; #[test] fn empty_swiss_box_has_no_volume() { assert_eq!(SwissBox::<i32>::default().volume().unwrap(), 0); assert_eq!(SwissBox::new(Box::<i32>::default()).volume().unwrap(), 0); } #[test] fn swiss_box_has_volume_of_initial_box() { assert_eq!( SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }) .volume() .unwrap(), 27 ); } #[test] fn rubik_center() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let center = Box { x: (1..2).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= center; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_corner() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let corner = Box { x: (2..3).into(), y: (2..3).into(), z: (2..3).into(), }; cube -= corner; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_bar() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let bar = Box { x: (0..3).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= bar; assert_eq!(cube.volume().unwrap(), 27 - 3); } #[test] fn death_by_a_thousand_cuts() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); for x in 0..3 { for y in 0..3 { for z in 0..3 { cube -= Box { x: (x..x + 1).into(), y: (y..y + 1).into(), z: (z..z + 1).into(), }; } } } assert_eq!(cube.volume().unwrap(), 0); assert!(cube.is_empty()); } }
use std::ops::{Sub, SubAssign}; use itertools::Itertools; use num::{CheckedAdd, CheckedMul, CheckedSub, Integer}; use petgraph::{ graph::NodeIndex, stable_graph::StableDiGraph, visit::{depth_first_search, Control, Dfs, DfsEvent, Reversed}, EdgeDirection::{Incoming, Outgoing}, }; use crate::r#box::Box; #[derive(Debug, Clone)] pub struct SwissBox<T: Integer> { tree: StableDiGraph<Box<T>, ()>, } impl<T: Integer> SwissBox<T> { pub fn new(initial: Box<T>) -> Self { if initial.is_empty() { Default::default() } else { let mut tree = StableDiGraph::new(); tree.add_node(initial); Self { tree } } } pub fn is_empty(&self) -> bool { self.tree.node_count() == 0 } fn root(&self) -> Option<NodeIndex> { if self.is_empty() { return None; }
} fn is_leaf(&self, node: NodeIndex) -> bool { self.tree .neighbors_directed(node, Outgoing) .next() .is_none() } } impl<T: Integer> Default for SwissBox<T> { fn default() -> Self { Self { tree: Default::default(), } } } impl<T: Integer> From<Box<T>> for SwissBox<T> { fn from(value: Box<T>) -> Self { Self::new(value) } } impl<T: Integer + Clone> Sub<Box<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: Box<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a Box<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a Box<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> Sub<SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: SwissBox<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a SwissBox<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> SubAssign<Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: Box<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a Box<T>) { if self.is_empty() { return; } let mut to_add = vec![]; let mut to_delete = vec![]; depth_first_search( &self.tree, Some(self.root().unwrap()), |event| -> Control<()> { if let DfsEvent::Discover(node, _) = event { let intersection = self.tree[node].intersect(rhs); if intersection.is_empty() { return Control::Prune; } if !self.is_leaf(node) { return Control::Continue; } if intersection == self.tree[node] { to_delete.push(node); return Control::Continue; } for slice in self.tree[node].subtract_split(rhs) { to_add.push((node, slice)); } } Control::Continue }, ); for (parent, slice) in to_add { let child = self.tree.add_node(slice); self.tree.add_edge(parent, child, ()); } for node in to_delete { let mut ancestors = vec![]; let reversed = Reversed(&self.tree); let mut dfs = Dfs::new(&reversed, node); while let Some(ancestor) = dfs.next(&reversed) { if self.tree.neighbors_directed(ancestor, Outgoing).count() <= 1 { ancestors.push(ancestor); } else { break; } } for ancestor in ancestors { self.tree.remove_node(ancestor); } } } } impl<T: Integer + Clone> SubAssign<SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: SwissBox<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a SwissBox<T>) { for node in rhs.tree.externals(Outgoing) { *self -= &rhs.tree[node]; } } } impl<T: Integer + CheckedAdd + CheckedSub + CheckedMul> SwissBox<T> { pub fn volume(&self) -> Option<T> { let mut volume: T = T::zero(); for node in self.tree.externals(Outgoing) { let r#box = &self.tree[node]; volume = volume.checked_add(&r#box.volume()?)?; } Some(volume) } } #[cfg(test)] mod tests { use super::{Box, SwissBox}; #[test] fn empty_swiss_box_has_no_volume() { assert_eq!(SwissBox::<i32>::default().volume().unwrap(), 0); assert_eq!(SwissBox::new(Box::<i32>::default()).volume().unwrap(), 0); } #[test] fn swiss_box_has_volume_of_initial_box() { assert_eq!( SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }) .volume() .unwrap(), 27 ); } #[test] fn rubik_center() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let center = Box { x: (1..2).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= center; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_corner() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let corner = Box { x: (2..3).into(), y: (2..3).into(), z: (2..3).into(), }; cube -= corner; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_bar() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let bar = Box { x: (0..3).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= bar; assert_eq!(cube.volume().unwrap(), 27 - 3); } #[test] fn death_by_a_thousand_cuts() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); for x in 0..3 { for y in 0..3 { for z in 0..3 { cube -= Box { x: (x..x + 1).into(), y: (y..y + 1).into(), z: (z..z + 1).into(), }; } } } assert_eq!(cube.volume().unwrap(), 0); assert!(cube.is_empty()); } }
Some( self.tree .externals(Incoming) .exactly_one() .map_err(|_| "More than one root in the tree") .unwrap(), )
call_expression
[ { "content": "pub fn input_file() -> Result<File> {\n\n let input_filename = match env::args_os().nth(1) {\n\n Some(filename) => filename,\n\n None => {\n\n let mut path: PathBuf = [\n\n OsStr::new(INPUTS_DIRECTORY),\n\n env::current_exe()\n\n ...
Rust
src/fs.rs
autokrator-uog/actix-web
8590eca7f83cf835b458c7e57d6f6ccffc8a58f4
use std::io; use std::io::Read; use std::fmt::Write; use std::fs::{File, DirEntry}; use std::path::{Path, PathBuf}; use std::ops::{Deref, DerefMut}; use mime_guess::get_mime_type; use param::FromParam; use handler::{Handler, Responder}; use headers::ContentEncoding; use httprequest::HttpRequest; use httpresponse::HttpResponse; use httpcodes::{HTTPOk, HTTPFound}; #[derive(Debug)] pub struct NamedFile(PathBuf, File); impl NamedFile { pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> { let file = File::open(path.as_ref())?; Ok(NamedFile(path.as_ref().to_path_buf(), file)) } #[inline] pub fn file(&self) -> &File { &self.1 } #[inline] pub fn path(&self) -> &Path { self.0.as_path() } } impl Deref for NamedFile { type Target = File; fn deref(&self) -> &File { &self.1 } } impl DerefMut for NamedFile { fn deref_mut(&mut self) -> &mut File { &mut self.1 } } impl Responder for NamedFile { type Item = HttpResponse; type Error = io::Error; fn respond_to(mut self, _: HttpRequest) -> Result<HttpResponse, io::Error> { let mut resp = HTTPOk.build(); resp.content_encoding(ContentEncoding::Identity); if let Some(ext) = self.path().extension() { let mime = get_mime_type(&ext.to_string_lossy()); resp.content_type(format!("{}", mime).as_str()); } let mut data = Vec::new(); let _ = self.1.read_to_end(&mut data); Ok(resp.body(data).unwrap()) } } #[derive(Debug)] pub struct Directory{ base: PathBuf, path: PathBuf } impl Directory { pub fn new(base: PathBuf, path: PathBuf) -> Directory { Directory { base: base, path: path } } fn can_list(&self, entry: &io::Result<DirEntry>) -> bool { if let Ok(ref entry) = *entry { if let Some(name) = entry.file_name().to_str() { if name.starts_with('.') { return false } } if let Ok(ref md) = entry.metadata() { let ft = md.file_type(); return ft.is_dir() || ft.is_file() || ft.is_symlink() } } false } } impl Responder for Directory { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { let index_of = format!("Index of {}", req.path()); let mut body = String::new(); let base = Path::new(req.path()); for entry in self.path.read_dir()? { if self.can_list(&entry) { let entry = entry.unwrap(); let p = match entry.path().strip_prefix(&self.path) { Ok(p) => base.join(p), Err(_) => continue }; let file_url = format!("{}", p.to_string_lossy()); if let Ok(metadata) = entry.metadata() { if metadata.is_dir() { let _ = write!(body, "<li><a href=\"{}\">{}/</a></li>", file_url, entry.file_name().to_string_lossy()); } else { let _ = write!(body, "<li><a href=\"{}\">{}</a></li>", file_url, entry.file_name().to_string_lossy()); } } else { continue } } } let html = format!("<html>\ <head><title>{}</title></head>\ <body><h1>{}</h1>\ <ul>\ {}\ </ul></body>\n</html>", index_of, index_of, body); Ok(HTTPOk.build() .content_type("text/html; charset=utf-8") .body(html).unwrap()) } } pub enum FilesystemElement { File(NamedFile), Directory(Directory), Redirect(HttpResponse), } impl Responder for FilesystemElement { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { match self { FilesystemElement::File(file) => file.respond_to(req), FilesystemElement::Directory(dir) => dir.respond_to(req), FilesystemElement::Redirect(resp) => Ok(resp), } } } pub struct StaticFiles { directory: PathBuf, accessible: bool, index: Option<String>, show_index: bool, _chunk_size: usize, _follow_symlinks: bool, } impl StaticFiles { pub fn new<T: Into<PathBuf>>(dir: T, index: bool) -> StaticFiles { let dir = dir.into(); let (dir, access) = match dir.canonicalize() { Ok(dir) => { if dir.is_dir() { (dir, true) } else { warn!("Is not directory `{:?}`", dir); (dir, false) } }, Err(err) => { warn!("Static files directory `{:?}` error: {}", dir, err); (dir, false) } }; StaticFiles { directory: dir, accessible: access, index: None, show_index: index, _chunk_size: 0, _follow_symlinks: false, } } pub fn index_file<T: Into<String>>(mut self, index: T) -> StaticFiles { self.index = Some(index.into()); self } } impl<S> Handler<S> for StaticFiles { type Result = Result<FilesystemElement, io::Error>; fn handle(&mut self, req: HttpRequest<S>) -> Self::Result { if !self.accessible { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } else { let path = if let Some(path) = req.match_info().get("tail") { path } else { return Err(io::Error::new(io::ErrorKind::NotFound, "not found")) }; let relpath = PathBuf::from_param(path) .map_err(|_| io::Error::new(io::ErrorKind::NotFound, "not found"))?; let path = self.directory.join(&relpath).canonicalize()?; if path.is_dir() { if let Some(ref redir_index) = self.index { let mut base = Path::new(req.path()).join(relpath); base.push(redir_index); Ok(FilesystemElement::Redirect( HTTPFound .build() .header("LOCATION", base.to_string_lossy().as_ref()) .finish().unwrap())) } else if self.show_index { Ok(FilesystemElement::Directory(Directory::new(self.directory.clone(), path))) } else { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } } else { Ok(FilesystemElement::File(NamedFile::open(path)?)) } } } } #[cfg(test)] mod tests { use super::*; use http::{header, StatusCode}; #[test] fn test_named_file() { assert!(NamedFile::open("test--").is_err()); let mut file = NamedFile::open("Cargo.toml").unwrap(); { file.file(); let _f: &File = &file; } { let _f: &mut File = &mut file; } let resp = file.respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/x-toml") } #[test] fn test_static_files() { let mut st = StaticFiles::new(".", true); st.accessible = false; assert!(st.handle(HttpRequest::default()).is_err()); st.accessible = true; st.show_index = false; assert!(st.handle(HttpRequest::default()).is_err()); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", ""); st.show_index = true; let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/html; charset=utf-8"); assert!(resp.body().is_binary()); assert!(format!("{:?}", resp.body()).contains("README.md")); } #[test] fn test_redirec_to_index() { let mut st = StaticFiles::new(".", false).index_file("index.html"); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", "guide"); let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.status(), StatusCode::FOUND); assert_eq!(resp.headers().get(header::LOCATION).unwrap(), "/guide/index.html"); } }
use std::io; use std::io::Read; use std::fmt::Write; use std::fs::{File, DirEntry}; use std::path::{Path, PathBuf}; use std::ops::{Deref, DerefMut}; use mime_guess::get_mime_type; use param::FromParam; use handler::{Handler, Responder}; use headers::ContentEncoding; use httprequest::HttpRequest; use httpresponse::HttpResponse; use httpcodes::{HTTPOk, HTTPFound}; #[derive(Debug)] pub struct NamedFile(PathBuf, File); impl NamedFile { pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> { let file = File::open(path.as_ref())?; Ok(NamedFile(path.as_ref().to_path_buf(), file)) } #[inline] pub fn file(&self) -> &File { &self.1 } #[inline] pub fn path(&self) -> &Path { self.0.as_path() } } impl Deref for NamedFile { type Target = File; fn deref(&self) -> &File { &self.1 } } impl DerefMut for NamedFile { fn deref_mut(&mut self) -> &mut File { &mut self.1 } } impl Responder for NamedFile { type Item = HttpResponse; type Error = io::Error; fn respond_to(mut self, _: HttpRequest) -> Result<HttpResponse, io::Error> { let mut resp = HTTPOk.build(); resp.content_encoding(ContentEncoding::Identity); if let Some(ext) = self.path().extension() { let mime = get_mime_type(&ext.to_string_lossy()); resp.content_type(format!("{}", mime).as_str()); } let mut data = Vec::new(); let _ = self.1.read_to_end(&mut data); Ok(resp.body(data).unwrap()) } } #[derive(Debug)] pub struct Directory{ base: PathBuf, path: PathBuf } impl Directory { pub fn new(base: PathBuf, path: PathBuf) -> Directory { Directory { base: base, path: path } } fn can_list(&self, entry: &io::Result<DirEntry>) -> bool { if let Ok(ref entry) = *entry { if let Some(name) = entry.file_name().to_str() { if name.starts_with('.') { return false } } if let Ok(ref md) = entry.metadata() { let ft = md.file_type(); return ft.is_dir() || ft.is_file() || ft.is_symlink() } } false } } impl Responder for Directory { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { let index_of = format!("Index of {}", req.path()); let mut body = String::new(); let base = Path::new(req.path()); for entry in self.path.read_dir()? { if self.can_list(&entry) { let entry = entry.unwrap(); let p = match entry.path().strip_prefix(&self.path) { Ok(p) => base.join(p), Err(_) => continue }; let file_url = format!("{}", p.to_string_lossy()); if let Ok(metadata) = entry.metadata() { if metadata.is_dir() { let _ = write!(body, "<li><a href=\"{}\">{}/</a></li>", file_url, entry.file_name().to_string_lossy()); } else { let _ = write!(body, "<li><a href=\"{}\">{}</a></li>", file_url, entry.file_name().to_string_lossy()); } } else { continue } } } let html = format!("<html>\ <head><title>{}</title></head>\ <body><h1>{}</h1>\ <ul>\ {}\ </ul></body>\n</html>", index_of, index_of, body); Ok(HTTPOk.build() .content_type("text/html; charset=utf-8") .body(html).unwrap()) } } pub enum FilesystemElement { File(NamedFile), Directory(Directory), Redirect(HttpResponse), } impl Responder for FilesystemElement { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { match self { FilesystemElement::File(file) => file.respond_to(req), FilesystemElement::Directory(dir) => dir.respond_to(req), FilesystemElement::Redirect(resp) => Ok(resp), } } } pub struct StaticFiles { directory: PathBuf, accessible: bool, index: Option<String>, show_index: bool, _chunk_size: usize, _follow_symlinks: bool, } impl StaticFiles { pub fn new<T: Into<PathBuf>>(dir: T, index: bool) -> StaticFiles { let dir = dir.into(); let (dir, access) = match dir.canonicalize() { Ok(dir) => { if dir.is_dir() { (dir, true) } else { warn!("Is not directory `{:?}`", dir); (dir, false) } }, Err(err) => { warn!("Static files directory `{:?}` error: {}", dir, err); (dir, false) } }; StaticFiles { directory: dir, accessible: access, index: None, show_index: index, _chunk_size: 0, _follow_symlinks: false, } } pub fn index_file<T: Into<String>>(mut self, index: T) -> StaticFiles { self.index = Some(index.into()); self } } impl<S> Handler<S> for StaticFiles { type Result = Result<FilesystemElement, io::Error>; fn handle(&mut self, req: HttpRequest<S>) -> Self::Result { if !self.accessible { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } else { let path = if let Some(path) = req.match_info().get("tail") { path } else { return Err(io::Error::new(io::ErrorKind::NotFound, "not found")) }; let relpath = PathBuf::from_param(path) .map_err(|_| io::Error::new(io::ErrorKind::NotFound, "not found"))?; let path = self.directory.join(&relpath).canonicalize()?; if path.is_dir() { if let Some(ref redir_index) = self.index { let mut base = Path::new(req.path()).join(relpath); base.push(redir_index); Ok(FilesystemElement::Redirect( HTTPFound .build() .header("LOCATION", base.to_string_lossy().as_ref()) .finish().unwrap())) } else if self.show_index { Ok(FilesystemElement::Directory(Directory::new(self.directory.clone(), path))) } else { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } } else { Ok(FilesystemElement::File(NamedFile::open(path)?)) } } } } #[cfg(test)] mod tests { use super::*; use http::{header, StatusCode}; #[test] fn test_named_file() { assert!(NamedFile::open("test--").is_err()); let mut file = NamedFile::open("Cargo.toml").unwrap(); { file.file(); let _f: &File = &file; } { let _f: &mut File = &mut file; } let resp = file.respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/x-toml") } #[test] fn test_static_files() { let mut st = StaticFiles::new(".", true); st.accessible = false; assert!(st.handle(HttpRequest::default()).is_err()); st.accessible = true; st.show_index = false; assert!(st.handle(HttpRequest::default()).is_err()); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", ""); st.show_index = true; let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/html; charset=utf-8"); assert!(resp.body().is_binary()); assert!(format!("{:?}", resp.body()).contains("README.md")); } #[test] fn test_redirec_to_index() {
}
let mut st = StaticFiles::new(".", false).index_file("index.html"); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", "guide"); let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.status(), StatusCode::FOUND); assert_eq!(resp.headers().get(header::LOCATION).unwrap(), "/guide/index.html"); }
function_block-function_prefix_line
[ { "content": "fn index(mut req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>>\n\n{\n\n println!(\"{:?}\", req);\n\n\n\n req.multipart() // <- get multipart stream for current request\n\n .from_err() // <- convert multipart errors\n\n .and_then(|item| { ...
Rust
src/simple_control.rs
magiclen/gitlab-deploy
9c8e0d0a765d2db598a618c719dcef3b9c837c86
use std::error::Error; use std::fmt::Write as FmtWrite; use execute::Execute; use clap::{ArgMatches, Values}; use crate::constants::*; use crate::functions::*; use crate::parse::*; #[inline] fn handle_command(values: Option<Values>) -> Result<Vec<&str>, &'static str> { match values { Some(values) => Ok(values.collect()), None => Err("A command is needed."), } } pub(crate) fn simple_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> { check_ssh()?; let project_id = parse_parse_id(matches); let commit_sha = parse_commit_sha(matches); let project_name = parse_project_name(matches); let reference_name = parse_reference_name(matches); let phase = parse_phase(matches); let command = handle_command(matches.values_of("COMMAND"))?; let command_string: String = command.join(" "); let inject_project_directory = matches.is_present("INJECT_PROJECT_DIRECTORY"); let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?; if ssh_user_hosts.is_empty() { warn!("No hosts to control!"); return Ok(()); } for ssh_user_host in ssh_user_hosts.iter() { info!("Controlling to {} ({})", ssh_user_host, command_string); let ssh_root = { let mut ssh_home = get_ssh_home(ssh_user_host)?; ssh_home.write_fmt(format_args!( "/{PROJECT_DIRECTORY}", PROJECT_DIRECTORY = PROJECT_DIRECTORY, ))?; ssh_home }; let ssh_project = format!( "{SSH_ROOT}/{PROJECT_NAME}-{PROJECT_ID}/{REFERENCE_NAME}-{SHORT_SHA}", SSH_ROOT = ssh_root, PROJECT_NAME = project_name.as_ref(), PROJECT_ID = project_id, REFERENCE_NAME = reference_name.as_ref(), SHORT_SHA = commit_sha.get_short_sha(), ); { let command_in_ssh = if inject_project_directory { let mut command_in_ssh = String::with_capacity(command_string.len() + ssh_project.len() + 1); if command[0] == "sudo" { command_in_ssh.push_str("sudo "); if command.len() > 1 { command_in_ssh.push_str(command[1]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[2..].join(" ")); } } else { command_in_ssh.push_str(command[0]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[1..].join(" ")); } command_in_ssh } else { command_string.clone() }; let mut command = create_ssh_command(ssh_user_host, command_in_ssh); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } { let mut command = create_ssh_command(ssh_user_host, format!("cd {SSH_PROJECT:?} && echo \"{TIMESTAMP} {COMMAND:?} {REFERENCE_NAME}-{SHORT_SHA}\" >> {SSH_PROJECT:?}/../control.log", SSH_PROJECT = ssh_project, REFERENCE_NAME = reference_name.as_ref(), TIMESTAMP = current_timestamp(), SHORT_SHA = commit_sha.get_short_sha(), COMMAND = command_string, )); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } } info!("Successfully!"); Ok(()) }
use std::error::Error; use std::fmt::Write as FmtWrite; use execute::Execute; use clap::{ArgMatches, Values}; use crate::constants::*; use crate::functions::*; use crate::parse::*; #[inline] fn handle_command(values: Option<Values>) -> Result<Vec<&str>, &'static str> { match values { Some(values) => Ok(values.collect()), None => Err("A command is needed."), } } pub(crate) fn simple_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> { check_ssh()?; let project_id = parse_parse_id(matches); let commit_sha = parse_commit_sha(matches)
rol failed!".into()); } } { let mut command = create_ssh_command(ssh_user_host, format!("cd {SSH_PROJECT:?} && echo \"{TIMESTAMP} {COMMAND:?} {REFERENCE_NAME}-{SHORT_SHA}\" >> {SSH_PROJECT:?}/../control.log", SSH_PROJECT = ssh_project, REFERENCE_NAME = reference_name.as_ref(), TIMESTAMP = current_timestamp(), SHORT_SHA = commit_sha.get_short_sha(), COMMAND = command_string, )); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } } info!("Successfully!"); Ok(()) }
; let project_name = parse_project_name(matches); let reference_name = parse_reference_name(matches); let phase = parse_phase(matches); let command = handle_command(matches.values_of("COMMAND"))?; let command_string: String = command.join(" "); let inject_project_directory = matches.is_present("INJECT_PROJECT_DIRECTORY"); let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?; if ssh_user_hosts.is_empty() { warn!("No hosts to control!"); return Ok(()); } for ssh_user_host in ssh_user_hosts.iter() { info!("Controlling to {} ({})", ssh_user_host, command_string); let ssh_root = { let mut ssh_home = get_ssh_home(ssh_user_host)?; ssh_home.write_fmt(format_args!( "/{PROJECT_DIRECTORY}", PROJECT_DIRECTORY = PROJECT_DIRECTORY, ))?; ssh_home }; let ssh_project = format!( "{SSH_ROOT}/{PROJECT_NAME}-{PROJECT_ID}/{REFERENCE_NAME}-{SHORT_SHA}", SSH_ROOT = ssh_root, PROJECT_NAME = project_name.as_ref(), PROJECT_ID = project_id, REFERENCE_NAME = reference_name.as_ref(), SHORT_SHA = commit_sha.get_short_sha(), ); { let command_in_ssh = if inject_project_directory { let mut command_in_ssh = String::with_capacity(command_string.len() + ssh_project.len() + 1); if command[0] == "sudo" { command_in_ssh.push_str("sudo "); if command.len() > 1 { command_in_ssh.push_str(command[1]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[2..].join(" ")); } } else { command_in_ssh.push_str(command[0]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[1..].join(" ")); } command_in_ssh } else { command_string.clone() }; let mut command = create_ssh_command(ssh_user_host, command_in_ssh); let output = command.execute_output()?; if !output.status.success() { return Err("Cont
random
[ { "content": "fn get_matches() -> ArgMatches {\n\n let app = Command::new(APP_NAME)\n\n .term_width(terminal_size().map(|(width, _)| width.0 as usize).unwrap_or(0))\n\n .version(CARGO_PKG_VERSION)\n\n .author(CARGO_PKG_AUTHORS)\n\n .about(concat!(\"GitLab Deploy is used for deploy...
Rust
src/bin/bevy_client.rs
qkniep/moonshot
e5b37cf15d6f31cb422a1954f86fc443dfb52063
use bevy::{ input::{keyboard::KeyboardInput, ElementState, Input}, log::{Level, LogSettings}, prelude::*, render::{camera::Camera, pass::ClearColor}, ui::camera::UI_CAMERA, }; use moonshot::building::*; use moonshot::combat::*; use moonshot::components::*; use moonshot::cursor_world_coords::*; use moonshot::network::{NetworkPlugin, PlayerAction, Transport}; struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.add_resource(ClearColor(Color::hex("22265A").unwrap())) .add_resource(CursorInWorld::default()) .add_resource(PlayerResources { pink: 30, green: 0 }) .add_startup_system(game_setup) .add_system(cursor_world_coords) .add_system(camera_motion) .add_system(kepler_motion) .add_system(building) .add_system(planet_auras) .add_system(combat) .add_system(resource_mining); } } fn main() { App::build() .add_resource(WindowDescriptor { title: "Moonshot!".to_string(), width: 1920, height: 1080, ..Default::default() }) .add_resource(LogSettings { level: Level::DEBUG, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .add_plugin(NetworkPlugin) .run(); } fn game_setup( commands: &mut Commands, asset_server: Res<AssetServer>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let texture_handle = asset_server.load("sprites/sprite_sheet.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(256.0, 256.0), 4, 4); let texture_atlas_handle = texture_atlases.set("SPRITE_SHEET", texture_atlas); commands .spawn(Camera2dBundle::default()) .spawn(UiCameraBundle::default()) .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexStart, ..Default::default() }, text: Text { value: "0, 0".to_string(), font: asset_server.load("fonts/Nunito-Regular.ttf"), style: TextStyle { font_size: 60.0, color: Color::WHITE, alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(ResourcesText) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(1.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_translation(Vec3::splat(700.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }); } fn camera_motion( time: Res<Time>, keyboard_input: Res<Input<KeyCode>>, mut query: Query<(&Camera, Mut<Transform>)>, ) { for (camera, mut trans) in query.iter_mut() { if camera.name == Some(UI_CAMERA.to_string()) { continue; } let mut direction = Vec3::splat(0.0); if keyboard_input.pressed(KeyCode::Up) { direction += Vec3::new(0.0, 1.0, 0.0) } if keyboard_input.pressed(KeyCode::Down) { direction += Vec3::new(0.0, -1.0, 0.0) } if keyboard_input.pressed(KeyCode::Left) { direction += Vec3::new(-1.0, 0.0, 0.0) } if keyboard_input.pressed(KeyCode::Right) { direction += Vec3::new(1.0, 0.0, 0.0) } let camera_speed = 500.0; let ds = camera_speed * time.delta_seconds; if direction.length() > 0.0 { trans.translation += direction.normalize() * ds; } } } fn kepler_motion(time: Res<Time>, mut query: Query<(&Moon, Mut<Transform>)>) { for (moon, mut trans) in query.iter_mut() { let ds = moon.speed * time.seconds_since_startup; let x = moon.orbit_radius * ds.cos() as f32; let y = moon.orbit_radius * ds.sin() as f32; trans.translation = Vec3::new(x, y, 0.0); } } struct ResourceMiningState { timer: Timer, } impl Default for ResourceMiningState { fn default() -> Self { Self { timer: Timer::from_seconds(1.0, true), } } } fn resource_mining( mut state: Local<ResourceMiningState>, time: Res<Time>, mut resources: ResMut<PlayerResources>, moon_query: Query<&Moon>, mut text_query: Query<(&mut Text, &ResourcesText)>, ) { if state.timer.tick(time.delta_seconds).just_finished() { for moon in moon_query.iter() { if let Some(BuildingType::Mining) = moon.building { resources.pink += 1; } } } for (mut text, _) in text_query.iter_mut() { text.value = format!("{}, {}", resources.pink, resources.green); } } #[derive(Default)] pub struct PlanetAuraState { keyboard_event_reader: EventReader<KeyboardInput>, current_planet: Option<Entity>, } pub fn planet_auras( mut state: Local<PlanetAuraState>, cursor_in_world: Res<CursorInWorld>, keyboard_inputs: Res<Events<KeyboardInput>>, mouse_input: Res<Input<MouseButton>>, mut resources: ResMut<PlayerResources>, mut transport: ResMut<Transport>, mut planet_query: Query<(Entity, Mut<Planet>, &GlobalTransform)>, ) { let world_coords = cursor_in_world.position; for event in state.keyboard_event_reader.iter(&keyboard_inputs) { if let Some(entity) = state.current_planet { if event.state == ElementState::Pressed { let (_, mut planet, _) = planet_query.get_mut(entity).unwrap(); planet.current_aura = match event.key_code { Some(KeyCode::P) => Some(Aura::ProductionSpeed), Some(KeyCode::R) => Some(Aura::RocketSpeed), Some(KeyCode::D) => Some(Aura::RocketDamage), Some(KeyCode::M) => Some(Aura::MoonSpeed), Some(KeyCode::S) => Some(Aura::Shield), _ => planet.current_aura, }; let aura_change = PlayerAction::ChangeAura { aura: planet.current_aura, planet: entity.id(), }; let serialized = bincode::serialize(&aura_change).unwrap(); transport.send(serialized); state.current_planet = None; } } } if mouse_input.pressed(MouseButton::Left) { for (entity, _, trans) in planet_query.iter_mut() { if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x && trans.translation.y - 128.0 * trans.scale.y <= world_coords.y && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y { state.current_planet = Some(entity); } } } }
use bevy::{ input::{keyboard::KeyboardInput, ElementState, Input}, log::{Level, LogSettings}, prelude::*, render::{camera::Camera, pass::ClearColor}, ui::camera::UI_CAMERA, }; use moonshot::building::*; use moonshot::combat::*; use moonshot::components::*; use moonshot::cursor_world_coords::*; use moonshot::network::{NetworkPlugin, PlayerAction, Transport}; struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.add_resource(ClearColor(Color::hex("22265A").unwrap())) .add_resource(CursorInWorld::default()) .add_resource(PlayerResources { pink: 30, green: 0 }) .add_startup_system(game_setup) .add_system(cursor_world_coords) .add_system(camera_motion) .add_system(kepler_motion) .add_system(building) .add_system(planet_auras) .add_system(combat) .add_system(resource_mining); } } fn main() { App::build() .add_resource(WindowDescriptor { title: "Moonshot!".to_string(), width: 1920, height: 1080, ..Default::default() }) .add_resource(LogSettings { level: Level::DEBUG, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .add_plugin(NetworkPlugin) .run(); } fn game_setup( commands: &mut Commands, asset_server: Res<AssetServer>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let texture_handle = asset_server.load("sprites/sprite_sheet.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(256.0, 256.0), 4, 4); let texture_atlas_handle = texture_atlases.set("SPRITE_SHEET", texture_atlas); commands .spawn(Camera2dBundle::default()) .spawn(UiCameraBundle::default()) .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexStart, ..Default::default() }, text: Text { value: "0, 0".to_string(), font: asset_server.load("fonts/Nunito-Regular.ttf"), style: TextStyle { font_size: 60.0, color: Color::WHITE, alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(ResourcesText) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(1.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_translation(Vec3::splat(700.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }); } fn camera_motion( time: Res<Time>, keyboard_input: Res<Input<KeyCode>>, mut query: Query<(&Camera, Mut<Transform>)>, ) { for (camera, mut trans) in query.iter_mut() { if camera.name == Some(UI_CAMERA.to_string()) { continue; } let mut direction = Vec3::splat(0.0); if keyboard_input.pressed(KeyCode::Up) { direction += Vec3::new(0.0, 1.0, 0.0) } if keyboard_input.pressed(KeyCode::Down) { direction += Vec3::new(0.0, -1.0, 0.0) } if keyboard_input.pressed(KeyCode::Left) { direction += Vec3::new(-1.0, 0.0, 0.0) } if keyboard_input.pressed(KeyCode::Right) { direction += Vec3::new(1.0, 0.0, 0.0) } let camera_speed = 500.0; let ds = camera_speed * time.delta_seconds; if direction.length() > 0.0 { trans.translation += direction.normalize() * ds; } } } fn kepler_motion(time: Res<Time>, mut query: Query<(&Moon, Mut<Transform>)>) { for (moon, mut trans) in query.iter_mut() { let ds = moon.speed * time.seconds_since_startup; let x = moon.orbit_radius * ds.cos() as f32; let y = moon.orbit_radius * ds.sin() as f32; trans.translation = Vec3::new(x, y, 0.0); } } struct ResourceMiningState { timer: Timer, } impl Default for ResourceMiningState { fn default() -> Self { Self { timer: Timer::from_seconds(1.0, true), } } } fn resource_mining( mut state: Local<ResourceMiningState>, time: Res<Time>, mut resources: ResMut<PlayerResources>, moon_query: Query<&Moon>, mut text_query: Query<(&mut Text, &ResourcesText)>, ) { if state.timer.tick(time.delta_seconds).just_finished() { for moon in moon_query.iter() { if let Some(BuildingType::Mining) = moon.building { resources.pink += 1; } } } for (mut text, _) in text_query.iter_mut() { text.value = format!("{}, {}", resources.pink, resources.green); } } #[derive(Default)] pub struct PlanetAuraState { keyboard_event_reader: EventReader<KeyboardInput>, current_planet: Option<Entity>, } pub fn planet_auras( mut state: Local<PlanetAuraState>, cursor_in_world: Res<CursorInWorld>, keyboard_inputs: Res<Events<KeyboardInput>>, mouse_input: Res<Input<MouseButton>>, mut resources: ResMut<PlayerResources>, mut transport: ResMut<Transport>, mut planet_query: Query<(Entity, Mut<Planet>, &GlobalTransform)>, ) { let world_coords = cursor_in_world.position; for event in state.keyboard_event_reader.iter(&keyboard_inputs) { if let Some(entity) = state.current_planet { if event.state == ElementState::Pressed { let (_, mut planet, _) = planet_query.get_mut(entity).unwrap(); planet.current_aura = match event.key_code { Some(KeyCode::P) => Some(Aura::ProductionSpeed), Some(KeyCode::R) => Some(Aura::RocketSpeed), Some(KeyCode::D) => Some(Aura::RocketDamage), Some(KeyCode::M) => Some(Aura::MoonSpeed), Some(KeyCode::S) => Some(Aura::Shield), _ => planet.current_aura, }; let aura_change = PlayerAction::ChangeAura { aura: planet.current_aura, planet: entity.id(), }; let serialized = bincode::serialize(&aura_change).unwrap(); transport.send(serialized); state.current_planet = None; } } } if mouse_input.pressed(MouseButton::Left) { for (entity, _, trans) in planet_query.iter_mut() { if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x && trans.translation.
y - 128.0 * trans.scale.y <= world_coords.y && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y { state.current_planet = Some(entity); } } } }
function_block-function_prefixed
[ { "content": "pub fn update_simulation_time(mut sim_time: ResMut<NetworkSimulationTime>, time: Res<Time>) {\n\n sim_time.update_elapsed(time.delta_seconds);\n\n sim_time.reset_frame_lag();\n\n while sim_time.elapsed_duration() > sim_time.per_frame_duration() {\n\n sim_time.increment_frame_number...
Rust
src/day11.rs
jjcomer/advent2019
b6fa95f1ffb6ce56ed5ab47bacd3d0377322c69a
use crate::intcode; use crate::intcode::{run_program, IntCodeResult, Program}; use std::collections::HashMap; #[aoc_generator(day11)] pub fn input_generator(input: &str) -> Program { intcode::input_generator(input) } enum Direction { Up, Down, Left, Right, } fn change_direction(current_direction: &Direction, turn: i64) -> Direction { match turn { 0 => match current_direction { Direction::Up => Direction::Left, Direction::Left => Direction::Down, Direction::Down => Direction::Right, Direction::Right => Direction::Up, }, 1 => match current_direction { Direction::Up => Direction::Right, Direction::Right => Direction::Down, Direction::Down => Direction::Left, Direction::Left => Direction::Up, }, _ => panic!("Unknown direction {}", turn), } } enum Colour { Black, White, } type Coord = (i64, i64); type Map = HashMap<Coord, Colour>; fn move_bot(current_direction: &Direction, (x, y): &Coord) -> Coord { match current_direction { Direction::Up => (*x, y + 1), Direction::Left => (x - 1, *y), Direction::Down => (*x, y - 1), Direction::Right => (x + 1, *y), } } fn check_colour<'a>(map: &'a Map, current_position: &Coord) -> &'a Colour { map.get(current_position).unwrap_or(&Colour::Black) } fn gen_colour(input: i64) -> Colour { match input { 0 => Colour::Black, 1 => Colour::White, _ => panic!("Unexpected colour {}", input), } } #[aoc(day11, part1)] pub fn solve_part1(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 0); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } map.len() } #[aoc(day11, part2)] pub fn solve_part2(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 5); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); map.insert(current_position, Colour::White); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } let max_x = map.keys().map(|x| x.0).max().unwrap(); let max_y = map.keys().map(|x| x.1).max().unwrap(); println!("X {} .. Y {}", max_x, max_y); for y in 0..=max_y { for x in 0..=max_x { match check_colour(&map, &(x, y)) { Colour::Black => print!(" "), Colour::White => print!("X"), } } println!(); } map.len() }
use crate::intcode; use crate::intcode::{run_program, IntCodeResult, Program}; use std::collections::HashMap; #[aoc_generator(day11)] pub fn input_generator(input: &str) -> Program { intcode::input_generator(input) } enum Direction { Up, Down, Left, Right, } fn change_direction(current_direction: &Direction, turn: i64) -> Direction { match turn { 0 => match current_directi
current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } let max_x = map.keys().map(|x| x.0).max().unwrap(); let max_y = map.keys().map(|x| x.1).max().unwrap(); println!("X {} .. Y {}", max_x, max_y); for y in 0..=max_y { for x in 0..=max_x { match check_colour(&map, &(x, y)) { Colour::Black => print!(" "), Colour::White => print!("X"), } } println!(); } map.len() }
on { Direction::Up => Direction::Left, Direction::Left => Direction::Down, Direction::Down => Direction::Right, Direction::Right => Direction::Up, }, 1 => match current_direction { Direction::Up => Direction::Right, Direction::Right => Direction::Down, Direction::Down => Direction::Left, Direction::Left => Direction::Up, }, _ => panic!("Unknown direction {}", turn), } } enum Colour { Black, White, } type Coord = (i64, i64); type Map = HashMap<Coord, Colour>; fn move_bot(current_direction: &Direction, (x, y): &Coord) -> Coord { match current_direction { Direction::Up => (*x, y + 1), Direction::Left => (x - 1, *y), Direction::Down => (*x, y - 1), Direction::Right => (x + 1, *y), } } fn check_colour<'a>(map: &'a Map, current_position: &Coord) -> &'a Colour { map.get(current_position).unwrap_or(&Colour::Black) } fn gen_colour(input: i64) -> Colour { match input { 0 => Colour::Black, 1 => Colour::White, _ => panic!("Unexpected colour {}", input), } } #[aoc(day11, part1)] pub fn solve_part1(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 0); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } map.len() } #[aoc(day11, part2)] pub fn solve_part2(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 5); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); map.insert(current_position, Colour::White); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]);
random
[ { "content": "#[aoc_generator(day19)]\n\npub fn input_generator(input: &str) -> Program {\n\n intcode::input_generator(input)\n\n}\n\n\n\n#[cached(\n\n convert = r#\"{format!(\"{},{}\",x,y)}\"#,\n\n create = \"{UnboundCache::new()}\",\n\n type = \"UnboundCache<String,bool>\"\n\n)]\n", "file_path...
Rust
src/ecs/src/world.rs
lukebitts/Luck
aef25bc3442872789228e14989d203afe3cc8a52
use mopa::Any; use super::entity::Entities; use super::component::Components; use super::{Entity, System}; use std::any::TypeId; pub struct World { entities: Entities, components: Components, systems: Vec<Box<System>>, to_destroy: Vec<Entity>, } unsafe impl Send for World {} unsafe impl Sync for World {} pub struct WorldBuilder { systems: Vec<Box<System>>, } impl WorldBuilder { #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn new() -> Self { WorldBuilder { systems: Vec::new() } } pub fn with_system<T: System>(mut self, system: T) -> Self { self.systems.push(Box::new(system)); self } pub fn build(self) -> World { World { entities: Entities::new(), components: Components::new(), systems: self.systems, to_destroy: Vec::new(), } } pub fn build_with_capacity(self, capacity: usize) -> World { World { entities: Entities::with_capacity(capacity), components: Components::with_capacity(capacity), systems: self.systems, to_destroy: Vec::new(), } } } fn match_entity_signature(system: &System, components: &Box<[TypeId]>) -> bool { let signature = system.signature(); let mut count = 0; for s in &*signature { if components.contains(&s) { count = count + 1; } } count == signature.len() } impl World { pub fn create_entity(&mut self) -> Entity { self.entities.create_entity() } pub fn destroy_entity(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity) && !self.to_destroy.contains(&entity)); self.to_destroy.push(entity); } #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.is_valid(entity) } pub fn add_component<T: Any>(&mut self, entity: Entity, component: T) -> &mut T { assert!(self.entities.is_valid(entity)); self.components.add_component::<T>(entity.id() as usize, component) } pub fn get_component<T: Any>(&self, entity: Entity) -> Option<&T> { assert!(self.entities.is_valid(entity)); self.components.get_component::<T>(entity.id() as usize) } pub fn get_component_mut<T: Any>(&mut self, entity: Entity) -> Option<&mut T> { assert!(self.entities.is_valid(entity)); self.components.get_component_mut::<T>(entity.id() as usize) } pub fn remove_component<T: Any>(&mut self, entity: Entity) -> Option<T> { assert!(self.entities.is_valid(entity)); self.components.remove_component::<T>(entity.id() as usize) } pub fn remove_all_components(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); self.components.remove_all_components(entity.id() as usize) } pub fn get_system_mut<T: System>(&mut self) -> Option<&mut T> { self.systems.iter_mut().filter_map(|s| s.downcast_mut::<T>()).next() } pub fn get_system<T: System>(&self) -> Option<&T> { self.systems.iter().filter_map(|s| s.downcast_ref::<T>()).next() } pub fn apply(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); let World { ref mut systems, ref mut components, .. } = *self; for system in systems.iter_mut() { if match_entity_signature(&**system, &components.generate_signature(entity.id() as usize)) { if !system.has_entity(entity) { system.on_entity_added(entity); } } else if system.has_entity(entity) { system.on_entity_removed(entity); } } } pub fn process(&mut self) { use rayon::par_iter::*; let mut callbacks = Vec::with_capacity(self.systems.len()); self.systems .par_iter() .map(|s| s.process(self)) .collect_into(&mut callbacks); for callback in &mut callbacks { (*callback)(self); } self.destroy_scheduled_entities(); } fn destroy_scheduled_entities(&mut self) { let to_destroy = self.to_destroy.clone(); for entity in to_destroy { self.remove_all_components(entity); self.apply(entity); self.entities.destroy_entity(entity); } self.to_destroy.clear(); } } impl Drop for World { fn drop(&mut self) { for entity in &self.entities { self.to_destroy.push(entity); } self.destroy_scheduled_entities(); } } #[cfg(test)] mod test { use super::WorldBuilder; use super::super::{Signature, Entity, System, World}; use std::ops::FnMut; use std::any::TypeId; use std; #[derive(Default, PartialEq, Debug)] struct PositionComponent(f32, f32, f32); #[derive(Default)] struct VelocityComponent(f32, f32, f32); #[derive(Default)] struct SpatialSystem { entities: Vec<Entity>, marker: bool, } impl_system!(SpatialSystem, (PositionComponent), { Box::new(move |w: &mut World|{ if !w.get_system::<SpatialSystem>().unwrap().marker { assert_eq!(w.get_system::<VelocitySystem>().unwrap().marker, false); w.get_system_mut::<SpatialSystem>().unwrap().marker = true; } }) }); impl Drop for SpatialSystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[derive(Default)] struct VelocitySystem { entities: Vec<Entity>, marker: bool, } impl_system!(VelocitySystem, (PositionComponent, VelocityComponent), { let v1 = PositionComponent(0.0, 0.0, 0.0); Box::new(move |w: &mut World|{ if !w.get_system::<VelocitySystem>().unwrap().marker { assert_eq!(w.get_system::<SpatialSystem>().unwrap().marker, true); w.get_system_mut::<VelocitySystem>().unwrap().marker = true; assert_eq!(v1, v1); } }) }); impl Drop for VelocitySystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[test] fn creation() { let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_none()); assert_eq!(w.systems.len(), 1); let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_some()); assert_eq!(w.systems.len(), 2); } #[test] fn component_system_operations() { let mut w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); let e1 = w.create_entity(); w.add_component(e1, PositionComponent::default()); w.add_component(e1, VelocityComponent::default()); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), true); w.remove_component::<VelocityComponent>(e1); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 0); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.destroy_entity(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.process(); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), false); w.process(); } }
use mopa::Any; use super::entity::Entities; use super::component::Components; use super::{Entity, System}; use std::any::TypeId; pub struct World { entities: Entities, components: Components, systems: Vec<Box<System>>, to_destroy: Vec<Entity>, } unsafe impl Send for World {} unsafe impl Sync for World {} pub struct WorldBuilder { systems: Vec<Box<System>>, } impl WorldBuilder { #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn new() -> Self { WorldBuilder { systems: Vec::new() } } pub fn with_system<T: System>(mut self, system: T) -> Self { self.systems.push(Box::new(system)); self } pub fn build(self) -> World { World { entities: Entities::new(), components: Components::new(), systems: self.systems, to_destroy: Vec::new(), } } pub fn build_with_capacity(self, capacity: usize) -> World { World { entities: Entities::with_capacity(capacity), components: Components::with_capacity(capacity), systems: self.systems, to_destroy: Vec::new(), } } } fn match_entity_signature(system: &System, components: &Box<[TypeId]>) -> bool { let signature = system.signature(); let mut count = 0; for s in &*signature { if components.contains(&s) { count = count + 1; } } count == signature.len() } impl World { pub fn create_entity(&mut self) -> Entity { self.entities.create_entity() } pub fn destroy_entity(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity) && !self.to_destroy.contains(&entity)); self.to_destroy.push(entity); } #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.is_valid(entity) } pub fn add_component<T: Any>(&mut self, entity: Entity, component: T) -> &mut T { assert!(self.entities.is_valid(entity)); self.components.add_component::<T>(entity.id() as usize, component) } pub fn get_component<T: Any>(&self, entity: Entity) -> Option<&T> { assert!(self.entities.is_valid(entity)); self.components.get_component::<T>(entity.id() as usize) } pub fn get_component_mut<T: Any>(&mut self, entity: Entity) -> Option<&mut T> { assert!(self.entities.is_valid(entity)); self.components.get_component_mut::<T>(entity.id() as usize) } pub fn remove_component<T: Any>(&mut self, entity: Entity) -> Option<T> { assert!(self.entities.is_valid(entity)); self.components.remove_component::<T>(entity.id() as usize) } pub fn remove_all_components(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); self.components.remove_all_components(entity.id() as usize) } pub fn get_system_mut<T: System>(&mut self) -> Option<&mut T> { self.systems.iter_mut().filter_map(|s| s.downcast_mut::<T>()).next() } pub fn get_system<T: System>(&self) -> Option<&T> { self.systems.iter().filter_map(|s| s.downcast_ref::<T>()).next() } pub fn apply(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); let World { ref mut systems, ref mut components, .. } = *self; for system in systems.iter_mut() { if match_entity_signature(&**system, &components.generate_signature(entity.id() as usize)) { if !system.has_entity(entity) { system.on_entity_added(entity); } } else if system.has_entity(entity) { system.on_entity_removed(entity); } } } pub fn process(&mut self) { use rayon::par_iter::*; let mut callbacks = Vec::with_capacity(self.systems.len());
fn destroy_scheduled_entities(&mut self) { let to_destroy = self.to_destroy.clone(); for entity in to_destroy { self.remove_all_components(entity); self.apply(entity); self.entities.destroy_entity(entity); } self.to_destroy.clear(); } } impl Drop for World { fn drop(&mut self) { for entity in &self.entities { self.to_destroy.push(entity); } self.destroy_scheduled_entities(); } } #[cfg(test)] mod test { use super::WorldBuilder; use super::super::{Signature, Entity, System, World}; use std::ops::FnMut; use std::any::TypeId; use std; #[derive(Default, PartialEq, Debug)] struct PositionComponent(f32, f32, f32); #[derive(Default)] struct VelocityComponent(f32, f32, f32); #[derive(Default)] struct SpatialSystem { entities: Vec<Entity>, marker: bool, } impl_system!(SpatialSystem, (PositionComponent), { Box::new(move |w: &mut World|{ if !w.get_system::<SpatialSystem>().unwrap().marker { assert_eq!(w.get_system::<VelocitySystem>().unwrap().marker, false); w.get_system_mut::<SpatialSystem>().unwrap().marker = true; } }) }); impl Drop for SpatialSystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[derive(Default)] struct VelocitySystem { entities: Vec<Entity>, marker: bool, } impl_system!(VelocitySystem, (PositionComponent, VelocityComponent), { let v1 = PositionComponent(0.0, 0.0, 0.0); Box::new(move |w: &mut World|{ if !w.get_system::<VelocitySystem>().unwrap().marker { assert_eq!(w.get_system::<SpatialSystem>().unwrap().marker, true); w.get_system_mut::<VelocitySystem>().unwrap().marker = true; assert_eq!(v1, v1); } }) }); impl Drop for VelocitySystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[test] fn creation() { let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_none()); assert_eq!(w.systems.len(), 1); let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_some()); assert_eq!(w.systems.len(), 2); } #[test] fn component_system_operations() { let mut w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); let e1 = w.create_entity(); w.add_component(e1, PositionComponent::default()); w.add_component(e1, VelocityComponent::default()); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), true); w.remove_component::<VelocityComponent>(e1); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 0); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.destroy_entity(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.process(); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), false); w.process(); } }
self.systems .par_iter() .map(|s| s.process(self)) .collect_into(&mut callbacks); for callback in &mut callbacks { (*callback)(self); } self.destroy_scheduled_entities(); }
function_block-function_prefix_line
[ { "content": "/// A trait that describes which components the system should process. It is split from the\n\n/// System trait to allow it to be implemented through the impl_signature macro.\n\npub trait Signature : mopa::Any + Send + Sync {\n\n /// Should return the components this system expects to process...
Rust
src/configuration/archive.rs
dandyvica/clf
0774f971a973d89688a72f7283e251c7a429e946
use std::{ fmt::Debug, path::{Path, PathBuf}, }; use serde::Deserialize; #[derive(Debug, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct LogArchive { pub dir: Option<PathBuf>, pub extension: Option<String>, pub pattern: Option<String>, } impl LogArchive { pub fn default_path<P: AsRef<Path> + Clone>(path: P) -> PathBuf { let default_path = format!("{}.1", path.as_ref().to_string_lossy()); PathBuf::from(default_path) } pub fn archived_path<P: AsRef<Path> + std::fmt::Debug>(&self, path: P) -> PathBuf { let dir = match &self.dir { None => { let dir = path.as_ref().parent(); debug_assert!(dir.is_some()); dir.unwrap() } Some(dir) => &dir, }; debug_assert!(dir.is_dir()); println!("dir={:?}", dir); debug_assert!(path.as_ref().file_name().is_some()); let file_name = path.as_ref().file_name().unwrap().to_string_lossy(); #[cfg(target_family = "windows")] let default_path = if self.extension.is_none() { format!("{}\\{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}\\{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; #[cfg(target_family = "unix")] let default_path = if self.extension.is_none() { format!("{}/{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}/{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; println!( "self={:?}, dir={}, file={}, rotated={}", path, dir.display(), file_name, default_path ); PathBuf::from(default_path) } } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[test] #[cfg(target_family = "unix")] fn default_path() { let yaml = r#" dir: /var/log extension: xz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); println!("{:#?}", archive); let mut p = PathBuf::from("/var/log/kern.log"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/kern.log.1") ); p = PathBuf::from("/var/log/syslog"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/syslog.1") ); } #[test] #[cfg(target_family = "unix")] fn archived_path() { let p = PathBuf::from("/var/log/kern.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.1") ); let yaml = r#" dir: /tmp "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.1")); let yaml = r#" extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.gz") ); let yaml = r#" dir: /tmp extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.gz")); } #[test] #[cfg(target_family = "windows")] fn archived_path() { let p = PathBuf::from(r"C:\Windows\WindowsUpdate.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.1") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\Temp\WindowsUpdate.log.1") ); let archive = LogArchive { dir: None, extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.gz") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"c:\Windows\Temp\WindowsUpdate.log.gz") ); } }
use std::{ fmt::Debug, path::{Path, PathBuf}, }; use serde::Deserialize; #[derive(Debug, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct LogArchive { pub dir: Option<PathBuf>, pub extension: Option<String>, pub pattern: Option<String>, } impl LogArchive { pub fn default_path<P: AsRef<Path> + Clone>(path: P) -> PathBuf { let default_path = format!("{}.1", path.as_ref().to_string_lossy()); PathBuf::from(default_path) } pub fn archived_path<P: AsRef<Path> + std::fmt::Debug>(&self, path: P) -> PathBuf { let dir = match &self.dir { None => { let dir = path.as_ref().parent(); debug_assert!(dir.is_some()); dir.unwrap() } Some(dir) => &dir, }; debug_assert!(dir.is_dir()); println!("dir={:?}", dir); debug_assert!(path.as_ref().file_name().is_some()); let file_name = path.as_ref().file_name().unwrap().to_string_lossy(); #[cfg(target_family = "windows")] let default_path = if self.extension.is_none() { format!("{}\\{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}\\{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; #[cfg(target_family = "unix")] let default_path = if self.extension.is_none() { format!("{}/{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}/{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; println!( "self={:?}, dir={}, file={}, rotated={}", path, dir.display(), file_name, default_path ); PathBuf::from(default_path) } } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[test] #[cfg(target_family = "unix")] fn default_path() { let yaml = r#" dir: /var/log extension: xz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); println!("{:#?}", archive); let mut p = PathBuf::from("/var/log/kern.log"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/kern.log.1") ); p = PathBuf::from("/var/log/syslog"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/syslog.1") ); } #[test] #[cfg(target_family = "unix")] fn archived_path() { let p = PathBuf::from("/var/log/kern.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.1") ); let yaml = r#" dir: /tmp "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.1")); let yaml = r#" extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.gz") ); let yaml = r#" dir: /tmp extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.gz")); } #[test] #[cfg(target_family = "windows")]
}
fn archived_path() { let p = PathBuf::from(r"C:\Windows\WindowsUpdate.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.1") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\Temp\WindowsUpdate.log.1") ); let archive = LogArchive { dir: None, extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.gz") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"c:\Windows\Temp\WindowsUpdate.log.gz") ); }
function_block-full_function
[ { "content": "fn read_file<R: BufRead>(mut reader: R) {\n\n // our read buffer\n\n let mut buffer = Vec::with_capacity(1024);\n\n\n\n loop {\n\n let ret = reader.read_until(b'\\n', &mut buffer);\n\n if let Ok(bytes_read) = ret {\n\n if bytes_read == 0 {\n\n break...
Rust
src/output.rs
tjbell/tcount
535a66463229d97845cfed29031ea2528a4c2f72
use crate::count::Counts; use crate::language::Language; use crate::query::{Query, QueryKind}; use prettytable::{format, Cell, Row, Table}; use regex::Regex; use std::fmt::Display; use std::format; use std::str::FromStr; #[derive(Debug)] pub enum Format { Table, CSV, } impl FromStr for Format { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "table" => Ok(Format::Table), "csv" => Ok(Format::CSV), _ => Err(format!("\"{}\" is not supported. Use one of table|csv", s)), } } } pub fn format_builder() -> format::FormatBuilder { format::FormatBuilder::new() .separators( &[format::LinePosition::Top], format::LineSeparator::new('─', '─', '─', '─'), ) .separators( &[format::LinePosition::Title], format::LineSeparator::new('─', '─', '│', '│'), ) .separators( &[format::LinePosition::Bottom], format::LineSeparator::new('─', '─', '─', '─'), ) .padding(1, 1) } #[inline] fn title_cell(content: &str) -> Cell { Cell::new(content).style_spec("b") } #[inline] fn label_cell(label: &str) -> Cell { Cell::new(label).style_spec("li") } #[inline] fn count_cell(count: u64) -> Cell { Cell::new(&count.to_string()).style_spec("r") } #[inline] fn generic_cell(s: impl Display) -> Cell { Cell::new(&s.to_string()).style_spec("l") } pub fn print( format: &Format, counts: Vec<(String, Counts)>, totals: Option<Counts>, kinds: &Vec<String>, kind_patterns: &Vec<Regex>, queries: &Vec<Query>, ) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); titles.push(title_cell("Group")); titles.push(title_cell("Files")); titles.push(title_cell("Tokens")); kinds .iter() .for_each(|kind| titles.push(title_cell(&format!("Kind({})", kind)))); kind_patterns.iter().for_each(|kind_pat| { titles.push(title_cell(&format!("Pattern({})", kind_pat.to_string()))) }); queries.iter().for_each(|query| match &query.kind { QueryKind::Match => titles.push(title_cell(&format!("Query({})", query.name))), QueryKind::Captures(names) => names.iter().for_each(|name| { titles.push(title_cell(&format!("Query({}@{})", query.name, name))); }), }); table.set_titles(Row::new(titles)); counts .iter() .chain( { if let Some(totals) = totals { vec![(String::from("TOTALS"), totals)] } else { vec![] } } .iter(), ) .map(|(label, count)| { let mut cols = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); cols.push(label_cell(&label.to_string())); cols.push(count_cell(count.nfiles)); cols.push(count_cell(count.ntokens)); count.nkinds.iter().for_each(|n| cols.push(count_cell(*n))); count .nkind_patterns .iter() .for_each(|n| cols.push(count_cell(*n))); count .nqueries .iter() .for_each(|n| cols.push(count_cell(*n))); cols }) .for_each(|row| { table.add_row(Row::new(row)); }); match format { Format::Table => { table.printstd(); } Format::CSV => match table.to_csv(std::io::stdout()) { Ok(_) => {} Err(err) => eprintln!("{}", err), }, } } pub fn print_languages(langs: Vec<(&Language, Vec<String>, &Vec<String>)>) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3); titles.push(title_cell("Language")); titles.push(title_cell("Extensions")); titles.push(title_cell("Query Dir Name")); table.set_titles(Row::new(titles)); langs.into_iter().for_each(|(lang, exts, dirs)| { let mut cols = Vec::new(); cols.push(label_cell(&lang.to_string())); cols.push(generic_cell(exts.join(","))); cols.push(generic_cell(dirs.join(","))); table.add_row(Row::new(cols)); }); table.printstd(); }
use crate::count::Counts; use crate::language::Language; use crate::query::{Query, QueryKind}; use prettytable::{format, Cell, Row, Table}; use regex::Regex; use std::fmt::Display; use std::format; use std::str::FromStr; #[derive(Debug)] pub enum Format { Table, CSV, } impl FromStr for Format { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "table" => Ok(Format::Table), "csv" => Ok(Format::CSV), _ => Err(format!("\"{}\" is not supported. Use one of table|csv", s)), } } }
#[inline] fn title_cell(content: &str) -> Cell { Cell::new(content).style_spec("b") } #[inline] fn label_cell(label: &str) -> Cell { Cell::new(label).style_spec("li") } #[inline] fn count_cell(count: u64) -> Cell { Cell::new(&count.to_string()).style_spec("r") } #[inline] fn generic_cell(s: impl Display) -> Cell { Cell::new(&s.to_string()).style_spec("l") } pub fn print( format: &Format, counts: Vec<(String, Counts)>, totals: Option<Counts>, kinds: &Vec<String>, kind_patterns: &Vec<Regex>, queries: &Vec<Query>, ) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); titles.push(title_cell("Group")); titles.push(title_cell("Files")); titles.push(title_cell("Tokens")); kinds .iter() .for_each(|kind| titles.push(title_cell(&format!("Kind({})", kind)))); kind_patterns.iter().for_each(|kind_pat| { titles.push(title_cell(&format!("Pattern({})", kind_pat.to_string()))) }); queries.iter().for_each(|query| match &query.kind { QueryKind::Match => titles.push(title_cell(&format!("Query({})", query.name))), QueryKind::Captures(names) => names.iter().for_each(|name| { titles.push(title_cell(&format!("Query({}@{})", query.name, name))); }), }); table.set_titles(Row::new(titles)); counts .iter() .chain( { if let Some(totals) = totals { vec![(String::from("TOTALS"), totals)] } else { vec![] } } .iter(), ) .map(|(label, count)| { let mut cols = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); cols.push(label_cell(&label.to_string())); cols.push(count_cell(count.nfiles)); cols.push(count_cell(count.ntokens)); count.nkinds.iter().for_each(|n| cols.push(count_cell(*n))); count .nkind_patterns .iter() .for_each(|n| cols.push(count_cell(*n))); count .nqueries .iter() .for_each(|n| cols.push(count_cell(*n))); cols }) .for_each(|row| { table.add_row(Row::new(row)); }); match format { Format::Table => { table.printstd(); } Format::CSV => match table.to_csv(std::io::stdout()) { Ok(_) => {} Err(err) => eprintln!("{}", err), }, } } pub fn print_languages(langs: Vec<(&Language, Vec<String>, &Vec<String>)>) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3); titles.push(title_cell("Language")); titles.push(title_cell("Extensions")); titles.push(title_cell("Query Dir Name")); table.set_titles(Row::new(titles)); langs.into_iter().for_each(|(lang, exts, dirs)| { let mut cols = Vec::new(); cols.push(label_cell(&lang.to_string())); cols.push(generic_cell(exts.join(","))); cols.push(generic_cell(dirs.join(","))); table.add_row(Row::new(cols)); }); table.printstd(); }
pub fn format_builder() -> format::FormatBuilder { format::FormatBuilder::new() .separators( &[format::LinePosition::Top], format::LineSeparator::new('─', '─', '─', '─'), ) .separators( &[format::LinePosition::Title], format::LineSeparator::new('─', '─', '│', '│'), ) .separators( &[format::LinePosition::Bottom], format::LineSeparator::new('─', '─', '─', '─'), ) .padding(1, 1) }
function_block-full_function
[]
Rust
src/input/touch_controls.rs
khang06/doukutsu-rs
8e0fb80c8cdf59883c9e6ec1c6823b4f958949ed
use ggez::{Context, GameResult}; use winit::event::TouchPhase; use crate::common::Rect; use crate::engine_constants::EngineConstants; use crate::texture_set::TextureSet; #[derive(Copy, Clone, PartialEq, Eq)] pub enum TouchControlType { None, Dialog, Controls, } #[derive(Copy, Clone)] pub struct TouchPoint { id: u64, touch_id: u64, position: (f64, f64), last_position: (f64, f64), } pub struct TouchControls { pub control_type: TouchControlType, pub points: Vec<TouchPoint>, pub interact_icon: bool, touch_id_counter: u64, clicks: Vec<TouchPoint>, } impl TouchControls { pub fn new() -> TouchControls { TouchControls { control_type: TouchControlType::None, touch_id_counter: 0, interact_icon: false, points: Vec::with_capacity(8), clicks: Vec::with_capacity(8), } } pub fn process_winit_event(&mut self, scale: f32, touch: winit::event::Touch) { match touch.phase { TouchPhase::Started | TouchPhase::Moved => { if let Some(point) = self.points.iter_mut().find(|p| p.id == touch.id) { point.last_position = point.position; point.position = (touch.location.x / scale as f64, touch.location.y / scale as f64); } else { self.touch_id_counter = self.touch_id_counter.wrapping_add(1); let point = TouchPoint { id: touch.id, touch_id: self.touch_id_counter, position: (touch.location.x / scale as f64, touch.location.y / scale as f64), last_position: (0.0, 0.0), }; self.points.push(point); if touch.phase == TouchPhase::Started { self.clicks.push(point); } } } TouchPhase::Ended | TouchPhase::Cancelled => { self.points.retain(|p| p.id != touch.id); self.clicks.retain(|p| p.id != touch.id); } } } pub fn point_in(&self, bounds: Rect) -> Option<u64> { for point in self.points.iter() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { return Some(point.touch_id); } } None } pub fn consume_click_in(&mut self, bounds: Rect) -> bool { self.clicks.retain(|p| p.touch_id != 0); for point in self.clicks.iter_mut() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { point.touch_id = 0; return true; } } false } pub fn draw(&self, canvas_size: (f32, f32), constants: &EngineConstants, texture_set: &mut TextureSet, ctx: &mut Context) -> GameResult { let batch = texture_set.get_or_load_batch(ctx, constants, "Caret")?; let rect = Rect::new_size(104, 120, 24, 24); for point in self.points.iter() { batch.add_rect(point.position.0 as f32 - 12.0, point.position.1 as f32 - 12.0, &rect); } batch.draw(ctx)?; if self.control_type == TouchControlType::Controls { let batch = texture_set.get_or_load_batch(ctx, constants, "builtin/touch")?; let color = (255, 255, 255, 160); for x in 0..3 { for y in 0..3 { let mut icon_x = x; let icon_y = y; if self.interact_icon && x == 1 && y == 2 { icon_x = 3; } batch.add_rect_tinted(4.0 + 48.0 * x as f32 + 8.0, (canvas_size.1 - 4.0 - 48.0 * 3.0) + 48.0 * y as f32 + 8.0, color, &Rect::new_size(icon_x * 32, icon_y * 32, 32, 32)); } } batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) + 8.0, color, &Rect::new_size(3 * 32, 32, 32, 32)); batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) * 2.0 + 8.0, color, &Rect::new_size(3 * 32, 0, 32, 32)); batch.draw(ctx)?; } Ok(()) } }
use ggez::{Context, GameResult}; use winit::event::TouchPhase; use crate::common::Rect; use crate::engine_constants::EngineConstants; use crate::texture_set::TextureSet; #[derive(Copy, Clone, PartialEq, Eq)] pub enum TouchControlType { None, Dialog, Controls, } #[derive(Copy, Clone)] pub struct TouchPoint { id: u64, touch_id: u64, position: (f64, f64), last_position: (f64, f64), } pub struct TouchControls { pub control_type: TouchControlType, pub points: Vec<TouchPoint>, pub interact_icon: bool, touch_id_counter: u64, clicks: Vec<TouchPoint>, } impl TouchControls { pub fn new() -> TouchControls { TouchControls { control_type: TouchControlType::None, touch_id_counter: 0, interact_icon: false, points: Vec::with_capacity(8), clicks: Vec::with_capacity(8), } } pub fn process_winit_event(&mut self, scale: f32, touch: winit::event::Touch) { match touch.phase { TouchPhase::Started | TouchPhase::Moved => {
} TouchPhase::Ended | TouchPhase::Cancelled => { self.points.retain(|p| p.id != touch.id); self.clicks.retain(|p| p.id != touch.id); } } } pub fn point_in(&self, bounds: Rect) -> Option<u64> { for point in self.points.iter() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { return Some(point.touch_id); } } None } pub fn consume_click_in(&mut self, bounds: Rect) -> bool { self.clicks.retain(|p| p.touch_id != 0); for point in self.clicks.iter_mut() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { point.touch_id = 0; return true; } } false } pub fn draw(&self, canvas_size: (f32, f32), constants: &EngineConstants, texture_set: &mut TextureSet, ctx: &mut Context) -> GameResult { let batch = texture_set.get_or_load_batch(ctx, constants, "Caret")?; let rect = Rect::new_size(104, 120, 24, 24); for point in self.points.iter() { batch.add_rect(point.position.0 as f32 - 12.0, point.position.1 as f32 - 12.0, &rect); } batch.draw(ctx)?; if self.control_type == TouchControlType::Controls { let batch = texture_set.get_or_load_batch(ctx, constants, "builtin/touch")?; let color = (255, 255, 255, 160); for x in 0..3 { for y in 0..3 { let mut icon_x = x; let icon_y = y; if self.interact_icon && x == 1 && y == 2 { icon_x = 3; } batch.add_rect_tinted(4.0 + 48.0 * x as f32 + 8.0, (canvas_size.1 - 4.0 - 48.0 * 3.0) + 48.0 * y as f32 + 8.0, color, &Rect::new_size(icon_x * 32, icon_y * 32, 32, 32)); } } batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) + 8.0, color, &Rect::new_size(3 * 32, 32, 32, 32)); batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) * 2.0 + 8.0, color, &Rect::new_size(3 * 32, 0, 32, 32)); batch.draw(ctx)?; } Ok(()) } }
if let Some(point) = self.points.iter_mut().find(|p| p.id == touch.id) { point.last_position = point.position; point.position = (touch.location.x / scale as f64, touch.location.y / scale as f64); } else { self.touch_id_counter = self.touch_id_counter.wrapping_add(1); let point = TouchPoint { id: touch.id, touch_id: self.touch_id_counter, position: (touch.location.x / scale as f64, touch.location.y / scale as f64), last_position: (0.0, 0.0), }; self.points.push(point); if touch.phase == TouchPhase::Started { self.clicks.push(point); } }
if_condition
[ { "content": "#[inline(always)]\n\npub fn fix9_scale(val: i32, scale: f32) -> f32 {\n\n (val as f64 * scale as f64 / 512.0).floor() as f32 / scale\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 0, "score": 267354.8191958265 }, { "content": "pub fn interpolate_fix9_scale(old_val: i32, ...
Rust
src/util/encrypt.rs
MarcoPolo/rust-sssmc39
a3e9d53d295b249c0212d6f7ab9dc434c8210df2
use crate::error::Error; #[cfg(feature = "ring_pbkdf2")] use ring::pbkdf2; #[cfg(feature = "ring_pbkdf2")] use std::num::NonZeroU32; #[cfg(feature = "rust_crypto_pbkdf2")] use pbkdf2::pbkdf2; #[cfg(feature = "rust_crypto_pbkdf2")] use sha2::Sha256; #[cfg(feature = "rust_crypto_pbkdf2")] use hmac::Hmac; #[derive(Debug, Clone, PartialEq, Eq)] pub struct MasterSecretEncConfig { pub min_iteration_count: u32, pub round_count: u8, pub customization_string: Vec<u8>, } impl Default for MasterSecretEncConfig { fn default() -> Self { let min_iteration_count = 10000; let round_count = 4; let customization_string = b"shamir".to_vec(); MasterSecretEncConfig { min_iteration_count, round_count, customization_string, } } } impl MasterSecretEncConfig { pub fn new() -> Self { MasterSecretEncConfig { ..Default::default() } } } pub struct MasterSecretEnc { pub config: MasterSecretEncConfig, } impl Default for MasterSecretEnc { fn default() -> Self { MasterSecretEnc { config: MasterSecretEncConfig::new(), } } } impl MasterSecretEnc { pub fn new() -> Result<MasterSecretEnc, Error> { Ok(MasterSecretEnc { config: MasterSecretEncConfig::new(), }) } pub fn encrypt( &self, master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in 0..self.config.round_count { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } pub fn decrypt( &self, enc_master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = enc_master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in (0..self.config.round_count).rev() { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } fn get_salt(&self, identifier: u16) -> Vec<u8> { let mut retval = self.config.customization_string.clone(); retval.append(&mut identifier.to_be_bytes().to_vec()); retval } fn round_function( &self, i: u8, passphrase: &str, e: u8, salt: &[u8], r: &[u8], ) -> Vec<u8> { let iterations = (self.config.min_iteration_count / u32::from(self.config.round_count)) << u32::from(e); let out_length = r.len(); let mut salt = salt.to_owned(); let mut r = r.to_owned(); salt.append(&mut r); let mut password = vec![i]; password.append(&mut passphrase.as_bytes().to_vec()); self.pbkdf2_derive(iterations, &salt, &password, out_length) } #[cfg(feature = "rust_crypto_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::<Hmac<Sha256>>( password, salt, iterations as usize, &mut out, ); out } #[cfg(feature = "ring_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::derive( ring::pbkdf2::PBKDF2_HMAC_SHA256, NonZeroU32::new(iterations as u32).unwrap(), salt, password, &mut out, ); out } fn xor(&self, a: &[u8], b: &[u8]) -> Vec<u8> { let mut retval = vec![0; b.len()]; for i in 0..b.len() { retval[i] = a[i] ^ b[i]; } retval } } #[cfg(test)] mod tests { use super::*; use rand::{thread_rng, Rng}; fn roundtrip_test(secret: Vec<u8>, passphrase: &str, identifier: u16, iteration_exponent: u8) { let enc = MasterSecretEnc::default(); println!("master_secret: {:?}", secret); let encrypted_secret = enc.encrypt(&secret, passphrase, iteration_exponent, identifier); println!("encrypted_secret: {:?}", encrypted_secret); let decrypted_secret = enc.decrypt(&encrypted_secret, passphrase, iteration_exponent, identifier); println!("decrypted_secret: {:?}", decrypted_secret); assert_eq!(secret, decrypted_secret); } #[test] fn roundtrip_test_vector() { for e in vec![0, 6] { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, e); } } #[test] #[ignore] fn roundtrip_test_vector_slow() { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, 12); } #[test] fn roundtrip_16_bytes() { for _ in 0..20 { let s: [u8; 16] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes() { for _ in 0..20 { let s: [u8; 32] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_12_bytes() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes_password() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "pebkac", id, 0); } } }
use crate::error::Error; #[cfg(feature = "ring_pbkdf2")] use ring::pbkdf2; #[cfg(feature = "ring_pbkdf2")] use std::num::NonZeroU32; #[cfg(feature = "rust_crypto_pbkdf2")] use pbkdf2::pbkdf2; #[cfg(feature = "rust_crypto_pbkdf2")] use sha2::Sha256; #[cfg(feature = "rust_crypto_pbkdf2")] use hmac::Hmac; #[derive(Debug, Clone, PartialEq, Eq)] pub struct MasterSecretEncConfig { pub min_iteration_count: u32, pub round_count: u8, pub customization_string: Vec<u8>, } impl Default for MasterSecretEncConfig { fn default() -> Self { let min_iteration_count = 10000; let round_count = 4; let customization_string = b"shamir".to_vec(); MasterSecretEncConfig { min_iteration_count, round_count, customization_string, } } } impl MasterSecretEncConfig { pub fn new() -> Self { MasterSecretEncConfig { ..Default::default() } } } pub struct MasterSecretEnc { pub config: MasterSecretEncConfig, } impl Default for MasterSecretEnc { fn default() -> Self { MasterSecretEnc { config: MasterSecretEncConfig::new(), } } } impl MasterSecretEnc { pub fn new() -> Result<MasterSecretEnc, Error> { Ok(MasterSecretEnc { config: MasterSecretEncConfig::new(), }) } pub fn encrypt( &self, master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in 0..self.config.round_count { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } pub fn decrypt( &self, enc_master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = enc_master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in (0..self.config.round_count).rev() { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } fn get_salt(&self, identifier: u16) -> Vec<u8> { let mut retval = self.config.customization_string.clone(); retval.append(&mut identifier.to_be_bytes().to_vec()); retval } fn round_function( &self, i: u8, passphrase: &str, e: u8, salt: &[u8], r: &[u8], ) -> Vec<u8> { let iterations = (self.config.min_iteration_count / u32::from(self.config.round_count)) << u32::from(e); let out_length = r.len(); let mut salt = salt.to_owned(); let mut r = r.to_owned(); salt.append(&mut r); let mut password = vec![i]; password.append(&mut passphrase.as_bytes().to_vec()); self.pbkdf2_derive(iterations, &salt, &password, out_length) } #[cfg(feature = "rust_crypto_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::<Hmac<Sha256>>( password, salt, iterations as usize, &mut out, ); out } #[cfg(feature = "ring_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::derive( ring::pbk
fn xor(&self, a: &[u8], b: &[u8]) -> Vec<u8> { let mut retval = vec![0; b.len()]; for i in 0..b.len() { retval[i] = a[i] ^ b[i]; } retval } } #[cfg(test)] mod tests { use super::*; use rand::{thread_rng, Rng}; fn roundtrip_test(secret: Vec<u8>, passphrase: &str, identifier: u16, iteration_exponent: u8) { let enc = MasterSecretEnc::default(); println!("master_secret: {:?}", secret); let encrypted_secret = enc.encrypt(&secret, passphrase, iteration_exponent, identifier); println!("encrypted_secret: {:?}", encrypted_secret); let decrypted_secret = enc.decrypt(&encrypted_secret, passphrase, iteration_exponent, identifier); println!("decrypted_secret: {:?}", decrypted_secret); assert_eq!(secret, decrypted_secret); } #[test] fn roundtrip_test_vector() { for e in vec![0, 6] { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, e); } } #[test] #[ignore] fn roundtrip_test_vector_slow() { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, 12); } #[test] fn roundtrip_16_bytes() { for _ in 0..20 { let s: [u8; 16] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes() { for _ in 0..20 { let s: [u8; 32] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_12_bytes() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes_password() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "pebkac", id, 0); } } }
df2::PBKDF2_HMAC_SHA256, NonZeroU32::new(iterations as u32).unwrap(), salt, password, &mut out, ); out }
function_block-function_prefixed
[ { "content": "pub fn verify_checksum(custom_string: &[u8], data: &[u32]) -> Result<(), Error> {\n\n\tlet mut values: Vec<u32> = custom_string.iter().map(|d| u32::from(*d)).collect();\n\n\tfor e in data {\n\n\t\tvalues.push(e.to_owned());\n\n\t}\n\n\tif polymod(&values) != 1 {\n\n\t\treturn Err(ErrorKind::Config...
Rust
runner-integration-tests/src/testkind/optimization.rs
comprakt/comprakt
2315e85972e63ea327c4d115ffe623253b520440
use crate::*; use optimization::{self, Optimization}; use serde_derive::Deserialize; use std::{ fs::File, io::{self, Write}, path::PathBuf, process::Stdio, }; #[derive(Debug, Deserialize, Clone)] #[serde(rename_all = "kebab-case")] pub enum AsmComparisonOutcome { Unchanged, Change, IdenticalTo(ExpectedData), } #[derive(Debug, Deserialize, Clone)] pub struct OptimizationTestData { pub compiler_optimized_stderr: Option<ExpectedData>, pub compiler_optimized_stdout: Option<ExpectedData>, pub compiler_optimized_exitcode: Option<ExpectedData>, pub compiler_reference_stderr: Option<ExpectedData>, pub compiler_reference_stdout: Option<ExpectedData>, pub compiler_reference_exitcode: Option<ExpectedData>, pub stderr: Option<ExpectedData>, pub stdout: Option<ExpectedData>, pub exitcode: Option<ExpectedData>, pub stdin: Option<ExpectedData>, pub optimizations: Vec<optimization::Kind>, pub expect: AsmComparisonOutcome, pub backend_asm: Option<Vec<Backend>>, pub backend: Option<Vec<Backend>>, } impl FromReferencesPath<OptimizationTestData> for OptimizationTestData { fn from_reference_path(_base: &PathBuf) -> Self { Self { compiler_optimized_stderr: None, compiler_optimized_stdout: None, compiler_optimized_exitcode: None, compiler_reference_stderr: None, compiler_reference_stdout: None, compiler_reference_exitcode: None, stderr: None, stdout: None, exitcode: None, stdin: None, optimizations: vec![], expect: AsmComparisonOutcome::Change, backend: Some(vec![Backend::Own, Backend::Libfirm]), backend_asm: Some(vec![Backend::Own, Backend::Libfirm]), } } } impl IntoReferenceData for OptimizationTestData { fn into_reference_data(self, base: &PathBuf) -> ReferenceData { self.into_optimizing_compiler_reference_data(base) } } impl OptimizationTestData { fn into_optimizing_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_optimized_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_optimized_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_optimized_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_reference_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_reference_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_reference_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_reference_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_binary_reference_data(self, _compiler_base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } } pub fn exec_optimization_test(input: PathBuf, backend: Backend) { let path_binary_optimized = input.with_extension(&format!("{}.optimized.out", backend.to_ascii_label())); let path_binary_reference = input.with_extension(&format!("{}.reference.out", backend.to_ascii_label())); let path_asm_optimized = input.with_extension(&format!("{}.optimized.S", backend.to_ascii_label())); let path_asm_reference = input.with_extension(&format!("{}.reference.S", backend.to_ascii_label())); let setup = TestSpec { references: input.clone(), input: input.clone(), generate_tentatives: true, }; let (input_without_yaml_path, test_data) = load_test_data::<OptimizationTestData>(&setup); if test_data.reference.optimizations.is_empty() { panic!("you MUST at least specify one optimization. none given."); } if !test_data .reference .backend .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring {} test for backend {:?}", input.display(), backend ); return; } let callinfo_actual = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_optimized.clone(), assembly: Some(path_asm_optimized.clone()), optimizations: optimization::Level::Custom( test_data .reference .optimizations .clone() .iter() .map(|kind| Optimization { kind: *kind, flags: vec![], }) .collect(), ), }); let mut cmd_actual = compiler_call(callinfo_actual, &input_without_yaml_path); println!("Executing: {:?}", cmd_actual); let output_actual = cmd_actual .output() .expect("failed to call compiler under test for actual input"); assert_output( &output_actual, test_data .reference .clone() .into_optimizing_compiler_reference_data(&path_binary_optimized), &TestSpec { input: path_binary_optimized.clone(), references: path_binary_optimized.clone(), generate_tentatives: true, }, ); let reference_input = match test_data.reference.expect { AsmComparisonOutcome::Change | AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => { input_without_yaml_path.clone() } AsmComparisonOutcome::IdenticalTo(ExpectedData::Inline(ref mj_str)) => { let path = add_extension(&setup.input, "reference"); write(&Some(path.clone()), mj_str).expect( "Failed to write reference mini java \ file to disk (required for input to the compiler under test)", ); path } AsmComparisonOutcome::IdenticalTo(ExpectedData::InFile(ref mj_rel_path)) => { reference_to_absolute_path(&setup, mj_rel_path) } }; let callinfo_reference = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_reference.clone(), assembly: Some(path_asm_reference.clone()), optimizations: optimization::Level::None, }); let mut cmd_reference = compiler_call(callinfo_reference, &reference_input); println!("Executing: {:?}", cmd_reference); let output_reference = cmd_reference .output() .expect("failed to call compiler under test for reference input"); assert_output( &output_reference, test_data .reference .clone() .into_reference_compiler_reference_data(&path_binary_reference), &TestSpec { input: path_binary_reference.clone(), references: path_binary_reference.clone(), generate_tentatives: true, }, ); assert_binary( &path_binary_optimized, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_optimized), ); assert_binary( &path_binary_reference, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_reference), ); let asm_optimized = read(&Some(path_asm_optimized.clone())).unwrap(); let asm_reference = read(&Some(path_asm_reference.clone())).unwrap(); let normalized_optimized_asm = normalize_asm(&asm_optimized); let normalized_reference_asm = normalize_asm(&asm_reference); write( &Some(add_extension(&path_asm_optimized, "normalized")), &normalized_optimized_asm, ) .unwrap(); write( &Some(add_extension(&path_asm_reference, "normalized")), &normalized_reference_asm, ) .unwrap(); if !test_data .reference .backend_asm .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring asm comparison in {} test for backend {:?}", input.display(), backend ); return; } match test_data.reference.expect { AsmComparisonOutcome::Change => { if normalized_reference_asm == normalized_optimized_asm { panic!( "asserted assembly to NOT be identical to the reference. \ But they are the same." ); } } AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => {} AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(_) => assert_changeset( &TestSpec { input: path_asm_optimized, references: path_asm_reference, generate_tentatives: false, }, "asm", &normalized_reference_asm, &normalized_optimized_asm, ) .unwrap_or_else(|msg| match test_data.reference.expect { AsmComparisonOutcome::Unchanged => { panic!("{}. expected asm to be unchanged.", msg.to_string()) } _ => panic!( "{}. expected asm to be identical to reference.", msg.to_string() ), }), }; } fn strip_comments(s: &str) -> String { let regex = regex::Regex::new("/\\*.*?\\*/").unwrap(); regex.replace_all(s, "").to_string() } fn remove_labels(s: &str) -> String { let regex = regex::RegexBuilder::new(r"^\.L[0-9]+:\n") .multi_line(true) .build() .unwrap(); let s = regex.replace_all(s, "").to_string(); let regex = regex::RegexBuilder::new(r"^.*j(mp|lt|gt|e|ne|ge|le) \.L[0-9]+\n") .multi_line(true) .build() .unwrap(); regex.replace_all(&s, "").to_string() } fn sort_functions(s: &str) -> String { let mut blocks = s .split("# -- Begin ") .map(|block| block.trim()) .collect::<Vec<&str>>(); blocks.sort(); format!("# -- Begin {}", blocks.join("\n# -- Begin ")) } fn remove_trailing_whitespace(s: &str) -> String { let mut lines: Vec<&str> = vec![]; for line in s.lines() { let trimmed = line.trim_end(); if !trimmed.is_empty() { lines.push(trimmed); } } lines.join("\n") } fn normalize_asm(asm: &str) -> String { [ strip_comments, remove_trailing_whitespace, remove_labels, sort_functions, ] .iter() .fold(asm.to_owned(), |acc, transform| transform(&acc)) } fn assert_binary( binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec, references: ReferenceData, ) { let output = run_binary(binary_path, stdin, &setup); assert_output( &output, references, &TestSpec { input: binary_path.clone(), references: binary_path.clone(), generate_tentatives: true, }, ); } fn run_binary(binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec) -> Output { let mut cmd = std::process::Command::new(&binary_path); let mut child = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .expect("failed to invoke generated binary"); if let Some(ref stdin_data) = stdin { match stdin_data { ExpectedData::Ignore => {} ExpectedData::Inline(stdin_str) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin .write_all(stdin_str.as_bytes()) .expect("Failed to write to stdin of generated binary"); } ExpectedData::InFile(rel_path) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin_path = reference_to_absolute_path(&setup, &rel_path); let mut stdin_reader = File::open(&stdin_path).expect("failed to open stdin file"); io::copy(&mut stdin_reader, stdin) .expect("failed to write to stdin of generated binary"); } } } child .wait_with_output() .expect("failed to invoke generated binary") }
use crate::*; use optimization::{self, Optimization}; use serde_derive::Deserialize; use std::{ fs::File, io::{self, Write}, path::PathBuf, process::Stdio, }; #[derive(Debug, Deserialize, Clone)] #[serde(rename_all = "kebab-case")] pub enum AsmComparisonOutcome { Unchanged, Change, IdenticalTo(ExpectedData), } #[derive(Debug, Deserialize, Clone)] pub struct OptimizationTestData { pub compiler_optimized_stderr: Option<ExpectedData>, pub compiler_optimized_stdout: Option<ExpectedData>, pub compiler_optimized_exitcode: Option<ExpectedData>, pub compiler_reference_stderr: Option<ExpectedData>, pub compiler_reference_stdout: Option<ExpectedData>, pub compiler_reference_exitcode: Option<ExpectedData>, pub stderr: Option<ExpectedData>, pub stdout: Option<ExpectedData>, pub exitcode: Option<ExpectedData>, pub stdin: Option<ExpectedData>, pub optimizations: Vec<optimization::Kind>, pub expect: AsmComparisonOutcome, pub backend_asm: Option<Vec<Backend>>, pub backend: Option<Vec<Backend>>, } impl FromReferencesPath<OptimizationTestData> for OptimizationTestData { fn from_reference_path(_base: &PathBuf) -> Self { Self { compiler_optimized_stderr: None, compiler_optimized_stdout: None, compiler_optimized_exitcode: None, compiler_reference_stderr: None, compiler_reference_stdout: None, compiler_reference_exitcode: None, stderr: None, stdout: None, exitcode: None, stdin: None, optimizations: vec![], expect: AsmComparisonOutcome::Change, backend: Some(vec![Backend::Own, Backend::Libfirm]), backend_asm: Some(vec![Backend::Own, Backend::Libfirm]), } } } impl IntoReferenceData for OptimizationTestData { fn into_reference_data(self, base: &PathBuf) -> ReferenceData { self.into_optimizing_compiler_reference_data(base) } } impl OptimizationTestData { fn into_optimizing_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_optimized_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_optimized_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_optimized_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_reference_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_reference_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_reference_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_reference_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_binary_reference_data(self, _compiler_base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } } pub fn exec_optimization_test(input: PathBuf, backend: Backend) { let path_binary_optimized = input.with_extension(&format!("{}.optimized.out", backend.to_ascii_label())); let path_binary_reference = input.with_extension(&format!("{}.reference.out", backend.to_ascii_label())); let path_asm_optimized = input.with_extension(&format!("{}.optimized.S", backend.to_ascii_label())); let path_asm_reference = input.with_extension(&format!("{}.reference.S", backend.to_ascii_label())); let setup = TestSpec { references: input.clone(), input: input.clone(), generate_tentatives: true, }; let (input_without_yaml_path, test_data) = load_test_data::<OptimizationTestData>(&setup); if test_data.reference.optimizations.is_empty() { panic!("you MUST at least specify one optimization. none given."); } if !test_data .reference .backend .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring {} test for backend {:?}", input.display(), backend ); return; } let callinfo_actual = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_optimized.clone(), assembly: Some(path_asm_optimized.clone()), optimizations: optimization::Level::Custom( test_data .reference .optimizations .clone() .iter() .map(|kind| Optimization { kind: *kind, flags: vec![], }) .collect(), ), }); let mut cmd_actual = c
.into_optimizing_compiler_reference_data(&path_binary_optimized), &TestSpec { input: path_binary_optimized.clone(), references: path_binary_optimized.clone(), generate_tentatives: true, }, ); let reference_input = match test_data.reference.expect { AsmComparisonOutcome::Change | AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => { input_without_yaml_path.clone() } AsmComparisonOutcome::IdenticalTo(ExpectedData::Inline(ref mj_str)) => { let path = add_extension(&setup.input, "reference"); write(&Some(path.clone()), mj_str).expect( "Failed to write reference mini java \ file to disk (required for input to the compiler under test)", ); path } AsmComparisonOutcome::IdenticalTo(ExpectedData::InFile(ref mj_rel_path)) => { reference_to_absolute_path(&setup, mj_rel_path) } }; let callinfo_reference = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_reference.clone(), assembly: Some(path_asm_reference.clone()), optimizations: optimization::Level::None, }); let mut cmd_reference = compiler_call(callinfo_reference, &reference_input); println!("Executing: {:?}", cmd_reference); let output_reference = cmd_reference .output() .expect("failed to call compiler under test for reference input"); assert_output( &output_reference, test_data .reference .clone() .into_reference_compiler_reference_data(&path_binary_reference), &TestSpec { input: path_binary_reference.clone(), references: path_binary_reference.clone(), generate_tentatives: true, }, ); assert_binary( &path_binary_optimized, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_optimized), ); assert_binary( &path_binary_reference, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_reference), ); let asm_optimized = read(&Some(path_asm_optimized.clone())).unwrap(); let asm_reference = read(&Some(path_asm_reference.clone())).unwrap(); let normalized_optimized_asm = normalize_asm(&asm_optimized); let normalized_reference_asm = normalize_asm(&asm_reference); write( &Some(add_extension(&path_asm_optimized, "normalized")), &normalized_optimized_asm, ) .unwrap(); write( &Some(add_extension(&path_asm_reference, "normalized")), &normalized_reference_asm, ) .unwrap(); if !test_data .reference .backend_asm .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring asm comparison in {} test for backend {:?}", input.display(), backend ); return; } match test_data.reference.expect { AsmComparisonOutcome::Change => { if normalized_reference_asm == normalized_optimized_asm { panic!( "asserted assembly to NOT be identical to the reference. \ But they are the same." ); } } AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => {} AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(_) => assert_changeset( &TestSpec { input: path_asm_optimized, references: path_asm_reference, generate_tentatives: false, }, "asm", &normalized_reference_asm, &normalized_optimized_asm, ) .unwrap_or_else(|msg| match test_data.reference.expect { AsmComparisonOutcome::Unchanged => { panic!("{}. expected asm to be unchanged.", msg.to_string()) } _ => panic!( "{}. expected asm to be identical to reference.", msg.to_string() ), }), }; } fn strip_comments(s: &str) -> String { let regex = regex::Regex::new("/\\*.*?\\*/").unwrap(); regex.replace_all(s, "").to_string() } fn remove_labels(s: &str) -> String { let regex = regex::RegexBuilder::new(r"^\.L[0-9]+:\n") .multi_line(true) .build() .unwrap(); let s = regex.replace_all(s, "").to_string(); let regex = regex::RegexBuilder::new(r"^.*j(mp|lt|gt|e|ne|ge|le) \.L[0-9]+\n") .multi_line(true) .build() .unwrap(); regex.replace_all(&s, "").to_string() } fn sort_functions(s: &str) -> String { let mut blocks = s .split("# -- Begin ") .map(|block| block.trim()) .collect::<Vec<&str>>(); blocks.sort(); format!("# -- Begin {}", blocks.join("\n# -- Begin ")) } fn remove_trailing_whitespace(s: &str) -> String { let mut lines: Vec<&str> = vec![]; for line in s.lines() { let trimmed = line.trim_end(); if !trimmed.is_empty() { lines.push(trimmed); } } lines.join("\n") } fn normalize_asm(asm: &str) -> String { [ strip_comments, remove_trailing_whitespace, remove_labels, sort_functions, ] .iter() .fold(asm.to_owned(), |acc, transform| transform(&acc)) } fn assert_binary( binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec, references: ReferenceData, ) { let output = run_binary(binary_path, stdin, &setup); assert_output( &output, references, &TestSpec { input: binary_path.clone(), references: binary_path.clone(), generate_tentatives: true, }, ); } fn run_binary(binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec) -> Output { let mut cmd = std::process::Command::new(&binary_path); let mut child = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .expect("failed to invoke generated binary"); if let Some(ref stdin_data) = stdin { match stdin_data { ExpectedData::Ignore => {} ExpectedData::Inline(stdin_str) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin .write_all(stdin_str.as_bytes()) .expect("Failed to write to stdin of generated binary"); } ExpectedData::InFile(rel_path) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin_path = reference_to_absolute_path(&setup, &rel_path); let mut stdin_reader = File::open(&stdin_path).expect("failed to open stdin file"); io::copy(&mut stdin_reader, stdin) .expect("failed to write to stdin of generated binary"); } } } child .wait_with_output() .expect("failed to invoke generated binary") }
ompiler_call(callinfo_actual, &input_without_yaml_path); println!("Executing: {:?}", cmd_actual); let output_actual = cmd_actual .output() .expect("failed to call compiler under test for actual input"); assert_output( &output_actual, test_data .reference .clone()
random
[ { "content": "pub fn default_reference_exitcode(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"exitcode\"))\n\n}\n\n\n\nimpl OptionalReferenceData {\n\n pub fn all_from_own_file(base: &PathBuf) -> Self {\n\n Self {\n\n stderr: Some(default_reference_stde...
Rust
game_plugin/src/loading.rs
will-hart/cloud-surfer
4363e0bdf506c3049b77ee110cebc08fcd9e828c
mod paths; use crate::loading::paths::PATHS; use crate::GameState; use bevy::asset::LoadState; use bevy::prelude::*; use bevy_kira_audio::AudioSource; pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut AppBuilder) { app.add_system_set( SystemSet::on_enter(GameState::Loading).with_system(start_loading.system()), ) .add_system_set(SystemSet::on_update(GameState::Loading).with_system(check_state.system())); } } pub struct LoadingState { textures: Vec<HandleUntyped>, fonts: Vec<HandleUntyped>, audio: Vec<HandleUntyped>, } pub struct FontAssets { pub fira_sans: Handle<Font>, } pub struct AudioAssets { pub collect: Handle<AudioSource>, pub music: Handle<AudioSource>, pub tether_break: Handle<AudioSource>, } pub struct TextureAssets { pub cloud_001: Handle<Texture>, pub player_left: Handle<Texture>, pub player_right: Handle<Texture>, pub laser: Handle<Texture>, pub grass: Handle<Texture>, } pub struct LoadingItem; pub struct LoadingText; fn start_loading( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let mut fonts: Vec<HandleUntyped> = vec![]; fonts.push(asset_server.load_untyped(PATHS.fira_sans)); let mut audio: Vec<HandleUntyped> = vec![]; audio.push(asset_server.load_untyped(PATHS.audio_collect)); audio.push(asset_server.load_untyped(PATHS.audio_music)); let mut textures: Vec<HandleUntyped> = vec![]; textures.push(asset_server.load_untyped(PATHS.cloud_001)); textures.push(asset_server.load_untyped(PATHS.player_left)); textures.push(asset_server.load_untyped(PATHS.player_right)); textures.push(asset_server.load_untyped(PATHS.laser)); textures.push(asset_server.load_untyped(PATHS.grass)); commands.insert_resource(LoadingState { textures, fonts, audio, }); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.), Val::Percent(100.)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::BLACK.into()), ..Default::default() }) .insert(LoadingItem) .with_children(|node| { node.spawn_bundle(TextBundle { text: Text { sections: vec![TextSection { value: "Loading".to_string(), style: TextStyle { font: asset_server.get_handle("fonts/FiraSans-Bold.ttf"), font_size: 30.0, color: Color::rgb(0.9, 0.9, 0.9), }, }], alignment: Default::default(), }, ..Default::default() }) .insert(LoadingText); }); } fn check_state( mut commands: Commands, mut state: ResMut<State<GameState>>, asset_server: Res<AssetServer>, loading_state: Res<LoadingState>, mut loading_text: Query<&mut Text, With<LoadingText>>, loading_items: Query<Entity, With<LoadingItem>>, ) { if LoadState::Loaded != asset_server.get_group_load_state(loading_state.fonts.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading fonts...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.textures.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading textures...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.audio.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading audio...".into(); return; } commands.insert_resource(FontAssets { fira_sans: asset_server.get_handle(PATHS.fira_sans), }); commands.insert_resource(AudioAssets { collect: asset_server.get_handle(PATHS.audio_collect), music: asset_server.get_handle(PATHS.audio_music), tether_break: asset_server.get_handle(PATHS.audio_game_over), }); commands.insert_resource(TextureAssets { cloud_001: asset_server.get_handle(PATHS.cloud_001), player_left: asset_server.get_handle(PATHS.player_left), player_right: asset_server.get_handle(PATHS.player_right), laser: asset_server.get_handle(PATHS.laser), grass: asset_server.get_handle(PATHS.grass), }); state.set(GameState::Menu).unwrap(); for item in loading_items.iter() { commands.entity(item).despawn_recursive(); } }
mod paths; use crate::loading::paths::PATHS; use crate::GameState; use bevy::asset::LoadState; use bevy::prelude::*; use bevy_kira_audio::AudioSource; pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut AppBuilder) { app.add_system_set( SystemSet::on_enter(GameState::Loading).with_system(start_loading.system()), ) .add_system_set(SystemSet::on_update(GameState::Loading).with_system(check_state.system())); } } pub struct LoadingState { textures: Vec<HandleUntyped>, fonts: Vec<HandleUntyped>, audio: Vec<HandleUntyped>, } pub struct FontAssets { pub fira_sans: Handle<Font>, } pub struct AudioAssets { pub collect: Handle<AudioSource>, pub music: Handle<AudioSource>, pub tether_break: Handle<AudioSource>, } pub struct TextureAssets { pub cloud_001: Handle<Texture>, pub player_left: Handle<Texture>, pub player_right: Handle<Texture>, pub laser: Handle<Texture>, pub grass: Handle<Texture>, } pub struct LoadingItem; pub struct LoadingText; fn start_loading( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let mut fonts: Vec<HandleUntyped> = vec![]; fonts.push(asset_server.load_untyped(PATHS.fira_sans)); let mut audio: Vec<HandleUntyped> = vec![]; audio.push(asset_server.load_untyped(PATHS.audio_collect)); audio.push(asset_server.load_untyped(PATHS.audio_music)); let mut textures: Vec<HandleUntyped> = vec![]; textures.push(asset_server.load_untyped(PATHS.cloud_001)); textures.push(asset_server.load_untyped(PATHS.player_left)); textures.push(asset_server.load_untyped(PATHS.player_right)); textures.push(asset_server.load_untyped(PATHS.laser)); textures.push(asset_server.load_untyped(PATHS.grass)); commands.insert_resource(LoadingState { textures, fonts, audio, }); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.), Val::Percent(100.)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::BLACK.into()), ..Default::default() }) .insert(LoadingItem) .with_children(|node| { node.spawn_bundle(TextBundle { text: Text { sections: vec![TextSection { value: "Loading".to_string(), style: TextStyle { font: asset_server.get_handle("fonts/FiraSans-Bold.ttf"), font_size: 30.0, color: Color::rgb(0.9, 0.9, 0.9), }, }], alignment: Default::default(), }, ..Default::default() }) .insert(LoadingText); }); }
fn check_state( mut commands: Commands, mut state: ResMut<State<GameState>>, asset_server: Res<AssetServer>, loading_state: Res<LoadingState>, mut loading_text: Query<&mut Text, With<LoadingText>>, loading_items: Query<Entity, With<LoadingItem>>, ) { if LoadState::Loaded != asset_server.get_group_load_state(loading_state.fonts.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading fonts...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.textures.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading textures...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.audio.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading audio...".into(); return; } commands.insert_resource(FontAssets { fira_sans: asset_server.get_handle(PATHS.fira_sans), }); commands.insert_resource(AudioAssets { collect: asset_server.get_handle(PATHS.audio_collect), music: asset_server.get_handle(PATHS.audio_music), tether_break: asset_server.get_handle(PATHS.audio_game_over), }); commands.insert_resource(TextureAssets { cloud_001: asset_server.get_handle(PATHS.cloud_001), player_left: asset_server.get_handle(PATHS.player_left), player_right: asset_server.get_handle(PATHS.player_right), laser: asset_server.get_handle(PATHS.laser), grass: asset_server.get_handle(PATHS.grass), }); state.set(GameState::Menu).unwrap(); for item in loading_items.iter() { commands.entity(item).despawn_recursive(); } }
function_block-full_function
[ { "content": "fn spawn_camera(mut commands: Commands) {\n\n commands.spawn_bundle(OrthographicCameraBundle::new_2d());\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 0, "score": 112530.4475544525 }, { "content": "fn play_game_music(audio: Res<Audio>, channels: Res<AudioCha...
Rust
src/bin/client/gui.rs
CreativeWolfies/sharmat
c139a0adffee32fa6541b284d71934156a57099b
use super::style::SharmatStyleSheet; use super::settings::*; use sharmat::{game::*, player::PlayerColor}; use std::cell::RefCell; use std::collections::HashMap; use std::hash::Hash; use std::rc::Rc; use iced::{ executor, Application, Background, Color, Command, Container, Element, Length, Point, Row, Size, }; use iced_native::{ layout, widget::{svg::Handle, Widget}, MouseCursor, Rectangle, }; use iced_wgpu::{Defaults, Primitive, Renderer}; #[derive(Debug)] pub struct Sharmat { pub game: Rc<RefCell<Game>>, pub stylesheet: SharmatStyleSheet, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, } #[derive(Debug)] pub enum SharmatMessage {} type Message = SharmatMessage; #[derive(Debug)] pub struct GBoard { pub game: Rc<RefCell<Game>>, pub fill_dark: Color, pub fill_light: Color, pub fill_dark_hl: Color, pub fill_light_hl: Color, pub highlight_border_ratio: f32, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, pub flip_board: bool, } impl Application for Sharmat { type Executor = executor::Null; type Message = Message; type Flags = (HashMap<String, Handle>, Game, HashMap<String, SharmatSettingType>); fn new(flags: Self::Flags) -> (Self, Command<Self::Message>) { ( Self { game: Rc::new(RefCell::new(flags.1)), stylesheet: SharmatStyleSheet::default(), piece_assets: Rc::new(flags.0), settings: SharmatSettings::new(flags.2), }, Command::none(), ) } fn title(&self) -> String { String::from("Sharmat") } fn view(&mut self) -> Element<Self::Message> { Container::new( Row::new().push( Container::new::<iced_native::Element<_, _>>( GBoard::new( self.game.clone(), self.piece_assets.clone(), self.settings.clone(), true, ) .into(), ) .width(Length::Units(600)) .height(Length::Units(600)) .padding(10), ), ) .padding(10) .width(Length::Fill) .height(Length::Fill) .center_x() .center_y() .style(self.stylesheet) .into() } fn update(&mut self, _message: Self::Message) -> Command<Self::Message> { Command::none() } } impl GBoard { pub fn new( game: Rc<RefCell<Game>>, piece_assets: Rc<HashMap<String, Handle>>, settings: SharmatSettings, flip_board: bool, ) -> GBoard { GBoard { game, fill_dark: Color::from_rgb8(226, 149, 120), fill_light: Color::from_rgb8(255, 221, 210), fill_dark_hl: Color::from_rgb8(113, 129, 120), fill_light_hl: Color::from_rgb8(128, 165, 165), piece_assets, settings: settings.clone(), highlight_border_ratio: 0.15, flip_board, } } #[inline] pub fn get_board_width(&self) -> usize { self.game.borrow().board().width.get() } #[inline] pub fn get_board_height(&self) -> usize { self.game.borrow().board().height.get() } #[inline] pub fn tile_size(&self, width: f32, height: f32) -> f32 { (width / self.get_board_width() as f32).min(height / self.get_board_height() as f32) } #[inline] pub fn get_raw(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } #[inline] pub fn get(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } fn get_hints(&self, m_x: usize, m_y: usize) -> Vec<(usize, usize)> { let hovered_piece_raw = if m_x == std::usize::MAX { None } else { self.get(m_x, m_y) }; if hovered_piece_raw.is_some() && (hovered_piece_raw.unwrap().1 == self .game .borrow() .current_player() .expect("No player?") .color || self.render_hints_opponent()) && self.render_hints() { let raw = &hovered_piece_raw.unwrap(); let game = self.game.borrow(); let hovered_piece = game .pieces() .get(raw.0) .expect(&format!("Couldn't find piece {}", raw.0)); let hovered_player = game .player(raw.1) .expect(&format!("Couldn't find player {:?}", raw.1)); hovered_piece.movement_type()[0] .flatten(self.game.borrow().board(), hovered_player, m_x, m_y) .unwrap() .into_iter() .map(|(dx, dy)| ((m_x as isize + dx) as usize, (m_y as isize + dy) as usize)) .collect() } else { vec![] } } fn get_mouse_pos(&self, bounds: Rectangle, mouse: Point, tile_size: f32) -> (usize, usize) { if bounds.contains(mouse) { ( ((mouse.x - bounds.x) / tile_size).floor() as usize, ((mouse.y - bounds.y) / tile_size).floor() as usize, ) } else { (std::usize::MAX, std::usize::MAX) } } pub fn render_hints(&self) -> bool { self.settings.get_bool("render_hints").unwrap_or(true) } pub fn render_hints_opponent(&self) -> bool { self.settings.get_bool("render_hints_opponent").unwrap_or(false) } } impl<'a, Message> Widget<Message, Renderer> for GBoard { fn width(&self) -> Length { Length::Fill } fn height(&self) -> Length { Length::Fill } fn layout(&self, _renderer: &Renderer, limits: &layout::Limits) -> layout::Node { layout::Node::new(Size::new( self.tile_size(limits.max().width, limits.max().height) * self.get_board_width() as f32, self.tile_size(limits.max().width, limits.max().height) * self.get_board_height() as f32, )) } fn hash_layout(&self, hasher: &mut iced_native::Hasher) { self.game.borrow().board().hash(hasher); } fn draw( &self, _renderer: &mut Renderer, _defaults: &Defaults, layout: layout::Layout<'_>, mouse: Point, ) -> (Primitive, MouseCursor) { let mut res: Vec<Primitive> = Vec::new(); let tile_size = self.tile_size(layout.bounds().width, layout.bounds().height); let hl_width = tile_size as f32 * self.highlight_border_ratio; let (m_x, m_y) = self.get_mouse_pos(layout.bounds(), mouse, tile_size); let hints = self.get_hints(m_x, m_y); for y in 0..self.get_board_height() { for x in 0..self.get_board_width() { let v_x = layout.bounds().x + tile_size * x as f32; let v_y = layout.bounds().y + tile_size * y as f32; let bounds = Rectangle { x: v_x, y: v_y, width: tile_size, height: tile_size, }; let sub_bounds = Rectangle { x: v_x + hl_width, y: v_y + hl_width, width: tile_size - 2.0 * hl_width, height: tile_size - 2.0 * hl_width, }; if let Some((piece_index, piece_color)) = self.get(x, y) { if let Some(piece) = self.game.borrow().pieces().get(piece_index) { res.push(Primitive::Svg { handle: self .piece_assets .get(if piece_color.white() { piece.display_white() } else { piece.display_black() }) .unwrap() .clone(), bounds, }); } else { panic!("Piece index {} out of bound!", piece_index); } } if hints.iter().find(|(x2, y2)| x == *x2 && y == *y2).is_some() { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light_hl) } else { Background::Color(self.fill_dark_hl) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); res.push(Primitive::Quad { bounds: sub_bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } else { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } } } ( Primitive::Group { primitives: res }, if m_x != std::usize::MAX && self.get(m_x, m_y).is_some() { MouseCursor::Pointer } else { MouseCursor::Idle }, ) } } impl<'a, Message> Into<iced_native::Element<'a, Message, Renderer>> for GBoard { fn into(self) -> iced_native::Element<'a, Message, Renderer> { iced_native::Element::new(self) } }
use super::style::SharmatStyleSheet; use super::settings::*; use sharmat::{game::*, player::PlayerColor}; use std::cell::RefCell; use std::collections::HashMap; use std::hash::Hash; use std::rc::Rc; use iced::{ executor, Application, Background, Color, Command, Container, Element, Length, Point, Row, Size, }; use iced_native::{ layout, widget::{svg::Handle, Widget}, MouseCursor, Rectangle, }; use iced_wgpu::{Defaults, Primitive, Renderer}; #[derive(Debug)] pub struct Sharmat { pub game: Rc<RefCell<Game>>, pub stylesheet: SharmatStyleSheet, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, } #[derive(Debug)] pub enum SharmatMessage {} type Message = SharmatMessage; #[derive(Debug)] pub struct GBoard { pub game: Rc<RefCell<Game>>, pub fill_dark: Color, pub fill_light: Color, pub fill_dark_hl: Color, pub fill_light_hl: Color, pub highlight_border_ratio: f32, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, pub flip_board: bool, } impl Application for Sharmat { type Executor = executor::Null; type Message = Message; type Flags = (HashMap<String, Handle>, Game, HashMap<String, SharmatSettingType>); fn new(flags: Self::Flags) -> (Self, Command<Self::Message>) { ( Self { game: Rc::new(RefCell::new(flags.1)), stylesheet: SharmatStyleSheet::default(), piece_assets: Rc::new(flags.0), settings: SharmatSettings::new(flags.2), }, Command::none(), ) } fn title(&self) -> String { String::from("Sharmat") } fn view(&mut self) -> Element<Self::Message> { Container::new( Row::new().push( Container::new::<iced_native::Element<_, _>>( GBoard::new( self.game.clone(), self.piece_assets.clone(), self.settings.clone(), true, ) .into(), ) .width(Length::Units(600)) .height(Length::Units(600)) .padding(10), ), ) .padding(10) .width(Length::Fill) .height(Length::Fill) .center_x() .center_y() .style(self.stylesheet) .into() } fn update(&mut self, _message: Self::Message) -> Command<Self::Message> { Command::none() } } impl GBoard { pub fn new( game: Rc<RefCell<Game>>, piece_assets: Rc<HashMap<String, Handle>>, settings: SharmatSettings, flip_board: bool, ) -> GBoard { GBoard { game, fill_dark: Color::from_rgb8(226, 149, 120), fill_light: Color::from_rgb8(255, 221, 210), fill_dark_hl: Color::from_rgb8(113, 129, 120), fill_light_hl: Color::from_rgb8(128, 165, 165), piece_assets, settings: settings.clone(), highlight_border_ratio: 0.15, flip_board, } } #[inline] pub fn get_board_width(&self) -> usize { self.game.borrow().board().width.get() } #[inline] pub fn get_board_height(&self) -> usize { self.game.borrow().board().height.get() } #[inline] pub fn tile_size(&self, width: f32, height: f32) -> f32 { (width / self.get_board_width() as f32).min(height / self.get_board_height() as f32) } #[inline] pub fn get_raw(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } #[inline] pub fn get(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } fn get_hints(&self, m_x: usize, m_y: usize) -> Vec<(usize, usize)> { let hovered_piece_raw = if m_x == std::usize::MAX { None } else { self.get(m_x, m_y) }; if hovered_piece_raw.is_some() && (hovered_piece_raw.unwrap().1 == self .game .borrow() .current_player() .expect("No player?") .color || self.render_hints_opponent()) && self.render_hints() { let raw = &hovered_piece_raw.unwrap(); let game = self.game.borrow(); let hovered_piece = game .pieces() .get(raw.0) .expect(&format!("Couldn't find piece {}", raw.0)); let hovered_player = game .player(raw.1) .expect(&format!("Couldn't find player {:?}", raw.1)); hovered_piece.movement_type()[0] .flatten(self.game.borrow().board(), hovered_player, m_x, m_y) .unwrap() .into_iter() .map(|(dx, dy)| ((m_x as isize + dx) as usize, (m_y as isize + dy) as usize)) .collect() } else { vec![] } } fn get_mouse_pos(&self, bounds: Rectangle, mouse: Point, tile_size: f32) -> (us
pub fn render_hints(&self) -> bool { self.settings.get_bool("render_hints").unwrap_or(true) } pub fn render_hints_opponent(&self) -> bool { self.settings.get_bool("render_hints_opponent").unwrap_or(false) } } impl<'a, Message> Widget<Message, Renderer> for GBoard { fn width(&self) -> Length { Length::Fill } fn height(&self) -> Length { Length::Fill } fn layout(&self, _renderer: &Renderer, limits: &layout::Limits) -> layout::Node { layout::Node::new(Size::new( self.tile_size(limits.max().width, limits.max().height) * self.get_board_width() as f32, self.tile_size(limits.max().width, limits.max().height) * self.get_board_height() as f32, )) } fn hash_layout(&self, hasher: &mut iced_native::Hasher) { self.game.borrow().board().hash(hasher); } fn draw( &self, _renderer: &mut Renderer, _defaults: &Defaults, layout: layout::Layout<'_>, mouse: Point, ) -> (Primitive, MouseCursor) { let mut res: Vec<Primitive> = Vec::new(); let tile_size = self.tile_size(layout.bounds().width, layout.bounds().height); let hl_width = tile_size as f32 * self.highlight_border_ratio; let (m_x, m_y) = self.get_mouse_pos(layout.bounds(), mouse, tile_size); let hints = self.get_hints(m_x, m_y); for y in 0..self.get_board_height() { for x in 0..self.get_board_width() { let v_x = layout.bounds().x + tile_size * x as f32; let v_y = layout.bounds().y + tile_size * y as f32; let bounds = Rectangle { x: v_x, y: v_y, width: tile_size, height: tile_size, }; let sub_bounds = Rectangle { x: v_x + hl_width, y: v_y + hl_width, width: tile_size - 2.0 * hl_width, height: tile_size - 2.0 * hl_width, }; if let Some((piece_index, piece_color)) = self.get(x, y) { if let Some(piece) = self.game.borrow().pieces().get(piece_index) { res.push(Primitive::Svg { handle: self .piece_assets .get(if piece_color.white() { piece.display_white() } else { piece.display_black() }) .unwrap() .clone(), bounds, }); } else { panic!("Piece index {} out of bound!", piece_index); } } if hints.iter().find(|(x2, y2)| x == *x2 && y == *y2).is_some() { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light_hl) } else { Background::Color(self.fill_dark_hl) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); res.push(Primitive::Quad { bounds: sub_bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } else { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } } } ( Primitive::Group { primitives: res }, if m_x != std::usize::MAX && self.get(m_x, m_y).is_some() { MouseCursor::Pointer } else { MouseCursor::Idle }, ) } } impl<'a, Message> Into<iced_native::Element<'a, Message, Renderer>> for GBoard { fn into(self) -> iced_native::Element<'a, Message, Renderer> { iced_native::Element::new(self) } }
ize, usize) { if bounds.contains(mouse) { ( ((mouse.x - bounds.x) / tile_size).floor() as usize, ((mouse.y - bounds.y) / tile_size).floor() as usize, ) } else { (std::usize::MAX, std::usize::MAX) } }
function_block-function_prefixed
[ { "content": "type RawPiece = Option<(usize, PlayerColor)>;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone)]\n\npub struct Board {\n\n pub width: NonZeroUsize,\n\n pub height: NonZeroUsize,\n\n board: Vec<Vec<RawPiece>>,\n\n name: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum Bo...
Rust
src/auto/misc.rs
talklittle/gtk
b3af34228bef07e0c22829437d73144857fa44d7
use Buildable; use Widget; use ffi; use glib; use glib::StaticType; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::boxed::Box as Box_; use std::mem; use std::mem::transmute; use std::ptr; glib_wrapper! { pub struct Misc(Object<ffi::GtkMisc, ffi::GtkMiscClass>): Widget, Buildable; match fn { get_type => || ffi::gtk_misc_get_type(), } } pub trait MiscExt { #[cfg_attr(feature = "v3_14", deprecated)] fn get_alignment(&self) -> (f32, f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_padding(&self) -> (i32, i32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_alignment(&self, xalign: f32, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_padding(&self, xpad: i32, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xalign(&self, xalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xpad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xpad(&self, xpad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yalign(&self, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_ypad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_ypad(&self, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Misc> + IsA<glib::object::Object>> MiscExt for O { fn get_alignment(&self) -> (f32, f32) { unsafe { let mut xalign = mem::uninitialized(); let mut yalign = mem::uninitialized(); ffi::gtk_misc_get_alignment(self.to_glib_none().0, &mut xalign, &mut yalign); (xalign, yalign) } } fn get_padding(&self) -> (i32, i32) { unsafe { let mut xpad = mem::uninitialized(); let mut ypad = mem::uninitialized(); ffi::gtk_misc_get_padding(self.to_glib_none().0, &mut xpad, &mut ypad); (xpad, ypad) } } fn set_alignment(&self, xalign: f32, yalign: f32) { unsafe { ffi::gtk_misc_set_alignment(self.to_glib_none().0, xalign, yalign); } } fn set_padding(&self, xpad: i32, ypad: i32) { unsafe { ffi::gtk_misc_set_padding(self.to_glib_none().0, xpad, ypad); } } fn get_property_xalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xalign(&self, xalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xalign".to_glib_none().0, Value::from(&xalign).to_glib_none().0); } } fn get_property_xpad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xpad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xpad(&self, xpad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xpad".to_glib_none().0, Value::from(&xpad).to_glib_none().0); } } fn get_property_yalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yalign(&self, yalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yalign".to_glib_none().0, Value::from(&yalign).to_glib_none().0); } } fn get_property_ypad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "ypad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_ypad(&self, ypad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "ypad".to_glib_none().0, Value::from(&ypad).to_glib_none().0); } } fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xalign", transmute(notify_xalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xpad", transmute(notify_xpad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yalign", transmute(notify_yalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::ypad", transmute(notify_ypad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } unsafe extern "C" fn notify_xalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xpad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_ypad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) }
use Buildable; use Widget; use ffi; use glib; use glib::StaticType; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::boxed::Box as Box_; use std::mem; use std::mem::transmute; use std::ptr; glib_wrapper! { pub struct Misc(Object<ffi::GtkMisc, ffi::GtkMiscClass>): Widget, Buildable; match fn { get_type => || ffi::gtk_misc_get_type(), } } pub trait MiscExt { #[cfg_attr(feature = "v3_14", deprecated)] fn get_alignment(&self) -> (f32, f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_padding(&self) -> (i32, i32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_alignment(&self, xalign: f32, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_padding(&self, xpad: i32, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xalign(&self, xalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xpad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xpad(&self, xpad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yalign(&self, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_ypad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_ypad(&self, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Misc> + IsA<glib::object::Object>> MiscExt for O { fn get_alignment(&self) -> (f32, f32) { unsafe { let mut xalign = mem::uninitialized(); let mut yalign = mem::uninitialized(); ffi::gtk_misc_get_alignment(self.to_glib_none().0, &mut xalign, &mut yalign); (xalign, yalign) } } fn get_padding(&self) -> (i32, i32) { unsafe { let mut xpad = mem::uninitialized(); let mut ypad = mem::uninitialized(); ffi::gtk_misc_get_padding(self.to_glib_none().0, &mut xpad, &mut ypad); (xpad, ypad) } } fn set_alignment(&self, xalign: f32, yalign: f32) { unsafe { ffi::gtk_misc_set_alignment(self.to_glib_none().0, xalign, yalign); } } fn set_padding(&self, xpad: i32, ypad: i32) { unsafe { ffi::gtk_misc_set_padding(self.to_glib_none().0, xpad, ypad); } } fn get_property_xalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xalign(&self, xalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xalign".to_glib_none().0, Value::from(&xalign).to_glib_none().0); } } fn get_property_xpad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xpad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xpad(&self, xpad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xpad".to_glib_none().0, Value::from(&xpad).to_glib_none().0); } } fn get_property_yalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yalign(&self, yalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yalign".to_glib_none().0, Value::from(&yalign).to_glib_none().0); } } fn get_property_ypad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "ypad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_ypad(&self, ypad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "ypad".to_glib_none().0, Value::from(&ypad).to_glib_none().0); } } fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xalign", transmute(notify_xalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xpad", transmute(notify_xpad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerI
fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::ypad", transmute(notify_ypad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } unsafe extern "C" fn notify_xalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xpad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_ypad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) }
d { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yalign", transmute(notify_yalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } }
function_block-function_prefixed
[ { "content": "fn into_raw<F: FnMut() -> Continue + 'static>(func: F) -> gpointer {\n\n let func: Box<RefCell<Box<FnMut() -> Continue + 'static>>> =\n\n Box::new(RefCell::new(Box::new(func)));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 0, "scor...
Rust
src/arena/views/character_overlay.rs
chamons/ArenaGS
0d3c8d4ebc818198b21a8c99dc853286cc16b7c2
use sdl2::pixels::Color; use sdl2::rect::Point as SDLPoint; use sdl2::rect::Rect as SDLRect; use specs::prelude::*; use super::TILE_SIZE; use crate::after_image::prelude::*; use crate::atlas::prelude::*; use crate::clash::{ShortInfo, StatusInfo, StatusKind}; use crate::props::LifeBar; pub struct CharacterOverlay { cache: IconCache, lifebar: LifeBar, } enum OverlayStatus { Burning, Frozen, Static, Aimed, Armored, Regen, } impl OverlayStatus { fn get_file_name(&self) -> &'static str { match self { OverlayStatus::Burning => "fire.png", OverlayStatus::Frozen => "ice.png", OverlayStatus::Static => "shock.png", OverlayStatus::Aimed => "aimed.png", OverlayStatus::Armored => "armor.png", OverlayStatus::Regen => "regen.png", } } } impl CharacterOverlay { pub fn init(render_context: &RenderContext) -> BoxResult<CharacterOverlay> { Ok(CharacterOverlay { cache: IconCache::init_with_alpha( render_context, IconLoader::init_overlay_icons(), &[ "small_frame.png", "large_frame.png", "fire.png", "ice.png", "shock.png", "regen.png", "aimed.png", "armor.png", ], Some(212), )?, lifebar: LifeBar::init(render_context)?, }) } fn get_overlay_statuses(&self, ecs: &World, entity: Entity) -> Vec<OverlayStatus> { let mut status = vec![]; let temperature = ecs.get_temperature(entity); if temperature.is_burning() { status.push(OverlayStatus::Burning); } if temperature.is_freezing() { status.push(OverlayStatus::Frozen); } if ecs.has_status(entity, StatusKind::StaticCharge) { status.push(OverlayStatus::Static); } if ecs.has_status(entity, StatusKind::Armored) { status.push(OverlayStatus::Armored); } if ecs.has_status(entity, StatusKind::Regen) { status.push(OverlayStatus::Regen); } if ecs.has_status(entity, StatusKind::Aimed) { status.push(OverlayStatus::Aimed); } status } pub fn draw_character_overlay(&self, canvas: &mut RenderCanvas, ecs: &World, entity: Entity, screen_position: SDLPoint) -> BoxResult<()> { let size = { let position = ecs.get_position(entity); if position.width == 1 && position.height == 1 { 1 } else if position.width == 2 && position.height == 2 { 2 } else { panic!(); } }; let life_size = { match size { 1 => TILE_SIZE - 5, 2 => 2 * (TILE_SIZE - 5), _ => panic!("Unknown lifebar size"), } }; let lifebar_rect = SDLRect::new( screen_position.x() - (life_size as i32 / 2) + 2, screen_position.y() + ((4 * TILE_SIZE as i32) / 5) + 2, life_size - 4, 6, ); let defenses = ecs.get_defenses(entity); let health = defenses.health as f64 / defenses.max_health as f64; let absorb = f64::min(defenses.absorb as f64 / defenses.max_health as f64, 1.0); self.lifebar.render(lifebar_rect, canvas, health, absorb)?; canvas.set_draw_color(Color::RGBA(0, 0, 0, 128)); for (i, status) in self.get_overlay_statuses(ecs, entity).iter().enumerate().take(if size == 1 { 2 } else { 4 }) { let offset = { match size { 1 => SDLPoint::new(-17, 22), 2 => SDLPoint::new(-38, 22), _ => panic!("Unknown overlay width"), } }; let status_start = SDLPoint::new(screen_position.x() + offset.x() + (i as i32 * 18), screen_position.y() + offset.y()); canvas.fill_rect(SDLRect::new(status_start.x(), status_start.y(), 17, 17))?; canvas.copy( &self.cache.get(status.get_file_name()), None, SDLRect::new(status_start.x(), status_start.y(), 16, 16), )?; } match size { 1 => self.draw_small_bracket(canvas, screen_position)?, 2 => self.draw_large_bracket(canvas, screen_position)?, _ => panic!("Unknown bracket size"), } Ok(()) } fn draw_large_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE * 2, TILE_SIZE * 2); let screen_rect = SDLRect::new( screen_position.x() - TILE_SIZE as i32, screen_position.y() - TILE_SIZE as i32, TILE_SIZE * 2, TILE_SIZE * 2, ); canvas.copy(self.cache.get("large_frame.png"), image_rect, screen_rect)?; Ok(()) } fn draw_small_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE, TILE_SIZE); let screen_rect = SDLRect::new(screen_position.x() - (TILE_SIZE as i32 / 2), screen_position.y(), TILE_SIZE, TILE_SIZE); canvas.copy(self.cache.get("small_frame.png"), image_rect, screen_rect)?; Ok(()) } }
use sdl2::pixels::Color; use sdl2::rect::Point as SDLPoint; use sdl2::rect::Rect as SDLRect; use specs::prelude::*; use super::TILE_SIZE; use crate::after_image::prelude::*; use crate::atlas::prelude::*; use crate::clash::{ShortInfo, StatusInfo, StatusKind}; use crate::props::LifeBar; pub struct CharacterOverlay { cache: IconCache, lifebar: LifeBar, } enum OverlayStatus { Burning, Frozen, Static, Aimed, Armored, Regen, } impl OverlayStatus { fn get_file_name(&self) -> &'static str { match self { OverlayStatus::Burning => "fire.png", OverlayStatus::Frozen => "ice.png", OverlayStatus::Static => "shock.png", OverlayStatus::Aimed => "aimed.png", OverlayStatus::Armored => "armor.png", OverlayStatus::Regen => "regen.png", } } } impl CharacterOverlay { pub fn init(render_context: &RenderContext) -> BoxResult<CharacterOverlay> {
} fn get_overlay_statuses(&self, ecs: &World, entity: Entity) -> Vec<OverlayStatus> { let mut status = vec![]; let temperature = ecs.get_temperature(entity); if temperature.is_burning() { status.push(OverlayStatus::Burning); } if temperature.is_freezing() { status.push(OverlayStatus::Frozen); } if ecs.has_status(entity, StatusKind::StaticCharge) { status.push(OverlayStatus::Static); } if ecs.has_status(entity, StatusKind::Armored) { status.push(OverlayStatus::Armored); } if ecs.has_status(entity, StatusKind::Regen) { status.push(OverlayStatus::Regen); } if ecs.has_status(entity, StatusKind::Aimed) { status.push(OverlayStatus::Aimed); } status } pub fn draw_character_overlay(&self, canvas: &mut RenderCanvas, ecs: &World, entity: Entity, screen_position: SDLPoint) -> BoxResult<()> { let size = { let position = ecs.get_position(entity); if position.width == 1 && position.height == 1 { 1 } else if position.width == 2 && position.height == 2 { 2 } else { panic!(); } }; let life_size = { match size { 1 => TILE_SIZE - 5, 2 => 2 * (TILE_SIZE - 5), _ => panic!("Unknown lifebar size"), } }; let lifebar_rect = SDLRect::new( screen_position.x() - (life_size as i32 / 2) + 2, screen_position.y() + ((4 * TILE_SIZE as i32) / 5) + 2, life_size - 4, 6, ); let defenses = ecs.get_defenses(entity); let health = defenses.health as f64 / defenses.max_health as f64; let absorb = f64::min(defenses.absorb as f64 / defenses.max_health as f64, 1.0); self.lifebar.render(lifebar_rect, canvas, health, absorb)?; canvas.set_draw_color(Color::RGBA(0, 0, 0, 128)); for (i, status) in self.get_overlay_statuses(ecs, entity).iter().enumerate().take(if size == 1 { 2 } else { 4 }) { let offset = { match size { 1 => SDLPoint::new(-17, 22), 2 => SDLPoint::new(-38, 22), _ => panic!("Unknown overlay width"), } }; let status_start = SDLPoint::new(screen_position.x() + offset.x() + (i as i32 * 18), screen_position.y() + offset.y()); canvas.fill_rect(SDLRect::new(status_start.x(), status_start.y(), 17, 17))?; canvas.copy( &self.cache.get(status.get_file_name()), None, SDLRect::new(status_start.x(), status_start.y(), 16, 16), )?; } match size { 1 => self.draw_small_bracket(canvas, screen_position)?, 2 => self.draw_large_bracket(canvas, screen_position)?, _ => panic!("Unknown bracket size"), } Ok(()) } fn draw_large_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE * 2, TILE_SIZE * 2); let screen_rect = SDLRect::new( screen_position.x() - TILE_SIZE as i32, screen_position.y() - TILE_SIZE as i32, TILE_SIZE * 2, TILE_SIZE * 2, ); canvas.copy(self.cache.get("large_frame.png"), image_rect, screen_rect)?; Ok(()) } fn draw_small_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE, TILE_SIZE); let screen_rect = SDLRect::new(screen_position.x() - (TILE_SIZE as i32 / 2), screen_position.y(), TILE_SIZE, TILE_SIZE); canvas.copy(self.cache.get("small_frame.png"), image_rect, screen_rect)?; Ok(()) } }
Ok(CharacterOverlay { cache: IconCache::init_with_alpha( render_context, IconLoader::init_overlay_icons(), &[ "small_frame.png", "large_frame.png", "fire.png", "ice.png", "shock.png", "regen.png", "aimed.png", "armor.png", ], Some(212), )?, lifebar: LifeBar::init(render_context)?, })
call_expression
[ { "content": "pub fn get_elemental_summon_to_use(ecs: &World) -> &'static str {\n\n let mut elements = vec![ElementalKind::Water, ElementalKind::Fire, ElementalKind::Wind, ElementalKind::Earth];\n\n\n\n for e in find_all_characters(ecs).iter().filter(|&&c| is_elemental(ecs, c)) {\n\n match get_elem...
Rust
tachyon-core/src/routers/api/object.rs
LGU-Web3-0/Project-Tachyon
156b5af1dc6e2c84c0818f38d81c42a4886510e1
use crate::session::UserInfo; use crate::{IntoAnyhow, State, StatusCode}; use actix_multipart::Multipart; use actix_session::Session; use actix_web::error::{ErrorBadRequest, ErrorInternalServerError}; use actix_web::http::header::{ContentDisposition, ContentType, DispositionParam, DispositionType}; use actix_web::web::Bytes; use actix_web::{error, web, HttpResponse, Result}; use entity::sea_orm::DatabaseBackend::Postgres; use entity::sea_orm::QueryFilter; use entity::sea_orm::{ActiveModelTrait, ConnectionTrait, Statement}; use entity::sea_orm::{ActiveValue, ColumnTrait, EntityTrait}; use futures::{StreamExt, TryFutureExt}; use sled::IVec; use std::pin::Pin; use std::task::{Context, Poll}; use uuid::Uuid; #[derive(Debug, serde::Deserialize, serde::Serialize)] pub struct ObjectRequest { uuid: Option<Uuid>, name: Option<String>, } struct ObjectData { inner: Option<IVec>, } const CHUNK_SIZE: usize = 1024 * 1024; impl futures::Stream for ObjectData { type Item = Result<Bytes>; fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.get_mut(); if this.inner.is_none() { return Poll::Ready(None); } unsafe { let length = this.inner.as_ref().unwrap_unchecked().len(); match this .inner .as_ref() .unwrap_unchecked() .chunks(CHUNK_SIZE) .next() { None => Poll::Ready(None), Some(x) => { let result = Poll::Ready(Some(Ok(Bytes::copy_from_slice(x)))); if x.len() < length { let slice = this .inner .as_ref() .unwrap_unchecked() .subslice(x.len(), length - x.len()); this.inner.replace(slice); } else { this.inner = None; } result } } } } } #[derive(serde::Deserialize, serde::Serialize, Debug)] struct ObjectResult { success: bool, message: Option<String>, } pub async fn upload( session: Session, data: web::Data<State>, mut payload: Multipart, ) -> Result<HttpResponse> { async fn parse_data(payload: &mut Multipart) -> Result<(entity::object::ActiveModel, Vec<u8>)> { let mut model = entity::object::ActiveModel { uuid: ActiveValue::NotSet, name: ActiveValue::NotSet, visibility: ActiveValue::Set(false), upload_time: ActiveValue::Set(chrono::Utc::now()), mimetype: ActiveValue::NotSet, }; let mut content = Vec::new(); while let Some(item) = payload.next().await { let mut field = item?; match field.name() { "file" => { while let Some(x) = field.next().await { content.extend(x?); } model.mimetype = ActiveValue::Set(field.content_type().to_string()); } "visibility" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } log::error!("{}", String::from_utf8(data.clone()).unwrap()); model.visibility = ActiveValue::Set(data.as_slice() == b"on"); } "filename" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } model.name = ActiveValue::Set( String::from_utf8(data) .map_err(ErrorInternalServerError) .and_then(|x| { if x.trim().is_empty() { Err(ErrorBadRequest("filename cannot be empty")) } else { Ok(x.trim().to_string()) } })?, ); } _ => (), } } Ok((model, content)) } async fn insert_kv( mut model: entity::object::ActiveModel, content: Vec<u8>, data: &web::Data<State>, ) -> Result<entity::object::ActiveModel> { let mut uuid; loop { uuid = Uuid::new_v4(); match data .kv_db .compare_and_swap( uuid.as_bytes(), Option::<&[u8]>::None, Some(content.as_slice()), ) .map_err(ErrorInternalServerError) { Ok(Ok(_)) => { model.uuid = ActiveValue::Set(uuid); break; } Ok(Err(_)) => tokio::task::yield_now().await, Err(e) => return Err(e), } } if let Err(e) = data.kv_db.flush_async().await { log::error!("sled insertion error: {}", e); return Err(ErrorInternalServerError(e)); }; Ok(model) } match session.get::<UserInfo>("user")? { None => simd_json::to_string(&ObjectResult { success: false, message: Some("unauthorized".to_string()), }) .map_err(ErrorInternalServerError) .map(|x| { HttpResponse::Ok() .content_type("application/json") .status(StatusCode::UNAUTHORIZED) .json(ObjectResult { success: false, message: Some(x), }) }), _ => Ok(parse_data(&mut payload) .and_then(|(model, content)| insert_kv(model, content, &data)) .and_then(|model| model.insert(&data.sql_db).map_err(ErrorInternalServerError)) .await .map(|_| { HttpResponse::Created() .content_type("application/json") .json(ObjectResult { success: true, message: None, }) }) .unwrap_or_else(|e| { HttpResponse::BadRequest() .content_type("application/json") .json(ObjectResult { success: false, message: Some(e.to_string()), }) })), } } #[derive(serde::Deserialize, serde::Serialize)] pub struct VisibilityChange { pub uuid: Uuid, } pub async fn change_visibility( info: web::Json<VisibilityChange>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!( r#"UPDATE object SET visibility = NOT visibility WHERE uuid = '{}'"#, info.uuid ), )) .await { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } #[derive(serde::Deserialize, serde::Serialize)] pub struct DeleteRequest { pub uuid: Uuid, } pub async fn delete( info: web::Json<DeleteRequest>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!(r#"DELETE FROM object WHERE uuid = '{}'"#, info.uuid), )) .await .anyhow() .and_then(|x| { if x.rows_affected() != 0 { data.kv_db.remove(info.uuid.as_bytes()).anyhow().and(Ok(())) } else { Ok(()) } }) { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } pub async fn get_handler( info: web::Query<ObjectRequest>, data: web::Data<State>, ) -> Result<HttpResponse> { let metadata: entity::object::Model = if let Some(uuid) = info.uuid { entity::object::Entity::find_by_id(uuid) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else if let Some(name) = &info.name { entity::object::Entity::find() .filter(entity::object::Column::Name.eq(name.as_str())) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else { return Err(error::ErrorBadRequest("invalid request")); }; if !metadata.visibility { return Err(error::ErrorUnauthorized("target not authorized")); } let inner = data .kv_db .get(metadata.uuid.as_bytes()) .map_err(error::ErrorInternalServerError)? .ok_or_else(|| error::ErrorNotFound("not found"))?; let stream = ObjectData { inner: Some(inner) }; Ok(HttpResponse::Ok() .insert_header(ContentType( metadata .mimetype .parse() .map_err(error::ErrorInternalServerError)?, )) .insert_header(ContentDisposition { disposition: DispositionType::Attachment, parameters: vec![DispositionParam::Filename( metadata.name.as_str().to_string(), )], }) .streaming(stream)) } #[cfg(test)] mod test { #[cfg(all(not(miri), test))] #[actix_rt::test] async fn it_polls_fully() { use crate::routers::api::object::ObjectData; use futures::StreamExt; use rand::distributions::Alphanumeric; use rand::prelude::*; let uuid = uuid::Uuid::new_v4(); let db = sled::open(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); for i in [1, 2, 123, 555, 5261, 114514, 1024000] { let rand_string: String = thread_rng() .sample_iter(&Alphanumeric) .take(i) .map(char::from) .collect(); db.insert(format!("test-{}", i), rand_string.as_bytes()) .unwrap(); let data = db.get(format!("test-{}", i)).unwrap().unwrap(); let data = ObjectData { inner: Some(data) }; let data: Vec<u8> = data .collect::<Vec<_>>() .await .into_iter() .filter_map(Result::ok) .map(|x| x.to_vec()) .flatten() .collect(); assert_eq!(data, rand_string.as_bytes()) } std::fs::remove_dir_all(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); } }
use crate::session::UserInfo; use crate::{IntoAnyhow, State, StatusCode}; use actix_multipart::Multipart; use actix_session::Session; use actix_web::error::{ErrorBadRequest, ErrorInternalServerError}; use actix_web::http::header::{ContentDisposition, ContentType, DispositionParam, DispositionType}; use actix_web::web::Bytes; use actix_web::{error, web, HttpResponse, Result}; use entity::sea_orm::DatabaseBackend::Postgres; use entity::sea_orm::QueryFilter; use entity::sea_orm::{ActiveModelTrait, ConnectionTrait, Statement}; use entity::sea_orm::{ActiveValue, ColumnTrait, EntityTrait}; use futures::{StreamExt, TryFutureExt}; use sled::IVec; use std::pin::Pin; use std::task::{Context, Poll}; use uuid::Uuid; #[derive(Debug, serde::Deserialize, serde::Serialize)] pub struct ObjectRequest { uuid: Option<Uuid>, name: Option<String>, } struct ObjectData { inner: Option<IVec>, } const CHUNK_SIZE: usize = 1024 * 1024; impl futures::Stream for ObjectData { type Item = Result<Bytes>; fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.get_mut(); if this.inner.is_none() { return Poll::Ready(None); } unsafe { let length = this.inner.as_ref().unwrap_unchecked().len(); match this .inner .as_ref() .unwrap_unchecked() .chunks(CHUNK_SIZE) .next() { None => Poll::Ready(None), Some(x) => { let result = Poll::Ready(Some(Ok(Bytes::copy_from_slice(x)))); if x.len() < length { let slice = this .inner .as_ref() .unwrap_unchecked() .subslice(x.len(), length - x.len()); this.inner.replace(slice); } else { this.inner = None; } result } } } } } #[derive(serde::Deserialize, serde::Serialize, Debug)] struct ObjectResult { success: bool, message: Option<String>, } pub async fn upload( session: Session, data: web::Data<State>, mut payload: Multipart, ) -> Result<HttpResponse> { async fn parse_data(payload: &mut Multipart) -> Result<(entity::object::ActiveModel, Vec<u8>)> { let mut model = entity::object::ActiveModel { uuid: ActiveValue::NotSet, name: ActiveValue::NotSet, visibility: ActiveValue::Set(false), upload_time: ActiveValue::Set(chrono::Utc::now()), mimetype: ActiveValue::NotSet, }; let mut content = Vec::new(); while let Some(item) = payload.next().await { let mut field = item?; match field.name() { "file" => { while let Some(x) = field.next().await { content.extend(x?); } model.mimetype = ActiveValue::Set(field.content_type().to_string()); } "visibility" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } log::error!("{}", String::from_utf8(data.clone()).unwrap()); model.visibility = ActiveValue::Set(data.as_slice() == b"on"); } "filename" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } model.name = ActiveValue::Set( String::from_utf8(data) .map_err(ErrorInternalServerError) .and_then(|x| { if x.trim().is_empty() { Err(ErrorBadRequest("filename cannot be empty")) } else { Ok(x.trim().to_string()) } })?, ); } _ => (), } } Ok((model, content)) } async fn insert_kv( mut model: entity::object::ActiveModel, content: Vec<u8>, data: &web::Data<State>, ) -> Result<entity::object::ActiveModel> { let mut uuid; loop { uuid = Uuid::new_v4(); match data .kv_db .compare_and_swap( uuid.as_bytes(), Option::<&[u8]>::None, Some(content.as_slice()), ) .map_err(ErrorInternalServerError) { Ok(Ok(_)) => { model.uuid = ActiveValue::Set(uuid); break; } Ok(Err(_)) => tokio::task::yield_now().await, Err(e) => return Err(e), } } if let Err(e) = data.kv_db.flush_async().await { log::error!("sled insertion error: {}", e); return Err(ErrorInternalServerError(e)); }; Ok(model) } match session.get::<UserInfo>("user")? { None => simd_json::to_string(&ObjectResult { success: false, message: Some("unauthorized".to_string()), }) .map_err(ErrorInternalServerError) .map(|x| { HttpResponse::Ok() .content_type("application/json") .status(StatusCode::UNAUTHORIZED) .json(ObjectResult { success: false, message: Some(x), }) }), _ => Ok(parse_data(&mut payload) .and_then(|(model, content)| insert_kv(model, content, &data)) .and_then(|model| model.insert(&data.sql_db).map_err(ErrorInternalServerError)) .await .map(|_| { HttpResponse::Created() .content_type("application/json") .json(ObjectResult { success: true, message: None, }) }) .unwrap_or_else(|e| { HttpResponse::BadRequest() .content_type("application/json") .json(ObjectResult { success: false, message: Some(e.to_string()), }) })), } } #[derive(serde::Deserialize, serde::Serialize)] pub struct VisibilityChange { pub uuid: Uuid, } pub async fn change_visibility( info: web::Json<VisibilityChange>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!( r#"UPDATE object SET visibility = NOT visibility WHERE uuid = '{}'"#, info.uuid ), )) .await { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } #[derive(serde::Deserialize, serde::Serialize)] pub struct DeleteRequest { pub uuid: Uuid, } pub async fn delete( info: web::Json<DeleteRequest>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!(r#"DELETE FROM object WHERE uuid = '{}'"#, info.uuid), )) .await .anyhow() .and_then(|x| { if x.rows_affected() != 0 { data.kv_db.remove(info.uuid.as_bytes()).anyhow().and(Ok(())) } else { Ok(()) } }) { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } pub async fn get_handler( info: web::Query<ObjectRequest>, data: web::Data<State>, ) -> Result<HttpResponse> { let metadata: entity::object::Model = if let Some(uuid) = info.uuid { entity::object::Entity::find_by_id(uuid) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else if let Some(name) = &info.name { entity::object::Entity::find() .filter(entity::object::Column::Name.eq(name.as_str())) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else { return Err(error::ErrorBadRequest("invalid request")); }; if !metadata.visibility { return Err(error::ErrorUnauthorized("target not authorized")); } let inner = data .kv_db .get(metadata.uuid.as_bytes()) .map_err(error::ErrorInternalServerError)? .ok_or_else(|| error::ErrorNotFound("not found"))?; let stream = ObjectData { inner: Some(inner) }; Ok(HttpResponse::Ok() .insert_header(
) .insert_header(ContentDisposition { disposition: DispositionType::Attachment, parameters: vec![DispositionParam::Filename( metadata.name.as_str().to_string(), )], }) .streaming(stream)) } #[cfg(test)] mod test { #[cfg(all(not(miri), test))] #[actix_rt::test] async fn it_polls_fully() { use crate::routers::api::object::ObjectData; use futures::StreamExt; use rand::distributions::Alphanumeric; use rand::prelude::*; let uuid = uuid::Uuid::new_v4(); let db = sled::open(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); for i in [1, 2, 123, 555, 5261, 114514, 1024000] { let rand_string: String = thread_rng() .sample_iter(&Alphanumeric) .take(i) .map(char::from) .collect(); db.insert(format!("test-{}", i), rand_string.as_bytes()) .unwrap(); let data = db.get(format!("test-{}", i)).unwrap().unwrap(); let data = ObjectData { inner: Some(data) }; let data: Vec<u8> = data .collect::<Vec<_>>() .await .into_iter() .filter_map(Result::ok) .map(|x| x.to_vec()) .flatten() .collect(); assert_eq!(data, rand_string.as_bytes()) } std::fs::remove_dir_all(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); } }
ContentType( metadata .mimetype .parse() .map_err(error::ErrorInternalServerError)?, )
call_expression
[ { "content": "pub fn error_handler<B>(res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>>\n\nwhere\n\n B: MessageBody + 'static,\n\n{\n\n if res.request().method() != Method::GET\n\n || res\n\n .response()\n\n .headers()\n\n .get(\"content-type\")\n\n ...
Rust
metalmq-client/src/channel_api.rs
jonasrichard/metalmq
6196238344e95bfc0d76a4b1363ba4b7e2bc8157
use crate::client_api::{ClientRequest, ClientRequestSink, Param, WaitFor}; use crate::model::ChannelNumber; use crate::processor; use anyhow::Result; use metalmq_codec::frame; use std::collections::HashMap; #[derive(Debug)] pub struct Channel { pub(crate) channel: ChannelNumber, pub(crate) sink: ClientRequestSink, pub(crate) consumers: HashMap<String, ClientRequest>, } #[derive(Debug)] pub struct Message { pub channel: ChannelNumber, pub consumer_tag: String, pub delivery_tag: u64, pub length: usize, pub body: Vec<u8>, } #[derive(Debug)] pub(crate) struct DeliveredContent { channel: ChannelNumber, consumer_tag: String, delivery_tag: u64, exchange_name: String, routing_key: String, body_size: Option<u64>, body: Option<Vec<u8>>, } impl Channel { pub(crate) fn new(channel: ChannelNumber, sink: ClientRequestSink) -> Channel { Channel { channel, sink, consumers: HashMap::new(), } } pub async fn exchange_declare( &self, exchange_name: &str, exchange_type: &str, flags: Option<frame::ExchangeDeclareFlags>, ) -> Result<()> { let frame = frame::exchange_declare(self.channel, exchange_name, exchange_type, flags); processor::call(&self.sink, frame).await } pub async fn exchange_delete(&self, exchange_name: &str, if_unused: bool) -> Result<()> { let mut flags = frame::ExchangeDeleteFlags::default(); if if_unused { flags.toggle(frame::ExchangeDeleteFlags::IF_UNUSED); } let frame = frame::exchange_delete(self.channel, exchange_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn queue_declare(&self, queue_name: &str, flags: Option<frame::QueueDeclareFlags>) -> Result<()> { let frame = frame::queue_declare(self.channel, queue_name, flags); processor::call(&self.sink, frame).await } pub async fn queue_bind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_bind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_unbind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_unbind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_purge(&self, queue_name: &str) -> Result<()> { Ok(()) } pub async fn queue_delete(&self, queue_name: &str, if_unused: bool, if_empty: bool) -> Result<()> { let mut flags = frame::QueueDeleteFlags::empty(); flags.set(frame::QueueDeleteFlags::IF_UNUSED, if_unused); flags.set(frame::QueueDeleteFlags::IF_EMPTY, if_empty); let frame = frame::queue_delete(self.channel, queue_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn basic_publish( &self, exchange_name: &str, routing_key: &str, payload: String, mandatory: bool, immediate: bool, ) -> Result<()> { let mut flags = frame::BasicPublishFlags::empty(); flags.set(frame::BasicPublishFlags::MANDATORY, mandatory); flags.set(frame::BasicPublishFlags::IMMEDIATE, immediate); let frame = frame::basic_publish(self.channel, exchange_name, routing_key, Some(flags)); self.sink .send(ClientRequest { param: Param::Publish(frame, payload.as_bytes().to_vec()), response: WaitFor::Nothing, }) .await?; Ok(()) } pub async fn close(&self) -> Result<()> { let (cid, mid) = frame::split_class_method(frame::CHANNEL_CLOSE); processor::call( &self.sink, frame::channel_close(self.channel, 200, "Normal close", cid, mid), ) .await } }
use crate::client_api::{ClientRequest, ClientRequestSink, Param, WaitFor}; use crate::model::ChannelNumber; use crate::processor; use anyhow::Result; use metalmq_codec::frame; use std::collections::HashMap; #[derive(Debug)] pub struct Channel { pub(crate) channel: ChannelNumber, pub(crate) sink: ClientRequestSink, pub(crate) consumers: HashMap<String, ClientRequest>, } #[derive(Debug)] pub struct Message { pub channel: ChannelNumber, pub consumer_tag: String, pub delivery_tag: u64, pub length: usize, pub body: Vec<u8>, } #[derive(Debug)] pub(crate) struct DeliveredContent { channel: ChannelNumber, consumer_tag: String, delivery_tag: u64, exchange_name: String, routing_key: String, body_size: Option<u64>, body: Option<Vec<u8>>, } impl Channel { pub(crate) fn new(channel: ChannelNumber, sink: ClientRequestSink) -> Channel { Channel { channel, sink, consumers: HashMap::new(), } } pub async fn exchange_declare( &self, exchange_name: &str, exchange_type: &str, flags: Option<frame::ExchangeDeclareFlags>, ) -> Result<()> { let frame = frame::exchange_declare(self.channel, exchange_name, exchange_type, flags); processor::call(&self.sink, frame).await } pub async fn exchange_delete(&self, exchange_name: &str, if_unused: bool) -> Result<()> { let mut flags = frame::ExchangeDeleteFlags::default(); if if_unused { flags.toggle(frame::ExchangeDeleteFlags::IF_UNUSED); } let frame = frame::exchange_delete(self.channel, exchange_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn queue_declare(&self, queue_name: &str, flags: Option<frame::QueueDeclareFlags>) -> Result<()> { let frame = frame::queue_declare(self.channel, queue_name, flags); processor::call(&self.sink, frame).await } pub async fn queue_bind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_bind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_unbind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_unbind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_purge(&self, queue_name: &str) -> Result<()> { Ok(()) }
pub async fn basic_publish( &self, exchange_name: &str, routing_key: &str, payload: String, mandatory: bool, immediate: bool, ) -> Result<()> { let mut flags = frame::BasicPublishFlags::empty(); flags.set(frame::BasicPublishFlags::MANDATORY, mandatory); flags.set(frame::BasicPublishFlags::IMMEDIATE, immediate); let frame = frame::basic_publish(self.channel, exchange_name, routing_key, Some(flags)); self.sink .send(ClientRequest { param: Param::Publish(frame, payload.as_bytes().to_vec()), response: WaitFor::Nothing, }) .await?; Ok(()) } pub async fn close(&self) -> Result<()> { let (cid, mid) = frame::split_class_method(frame::CHANNEL_CLOSE); processor::call( &self.sink, frame::channel_close(self.channel, 200, "Normal close", cid, mid), ) .await } }
pub async fn queue_delete(&self, queue_name: &str, if_unused: bool, if_empty: bool) -> Result<()> { let mut flags = frame::QueueDeleteFlags::empty(); flags.set(frame::QueueDeleteFlags::IF_UNUSED, if_unused); flags.set(frame::QueueDeleteFlags::IF_EMPTY, if_empty); let frame = frame::queue_delete(self.channel, queue_name, Some(flags)); processor::call(&self.sink, frame).await }
function_block-full_function
[ { "content": "pub fn basic_consume_ok(channel: u16, consumer_tag: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_CONSUME_OK,\n\n MethodFrameArgs::BasicConsumeOk(BasicConsumeOkArgs {\n\n consumer_tag: consumer_tag.to_string(),\n\n }),\n\n )\n\n}\n\...
Rust
src/model/area/foundation.rs
nakidixon/klondike-rs
b017ed3cc4645df7e48f137e0197252feef44b09
use crate::{ model::{ card::{Card, Rank, Suit}, settings::GameSettings, stack::{Orientation, Stack, StackDetails, StackSelection}, }, utils::vec::SplitOffBounded, }; use super::{ Action, Area, AreaId, Held, InvalidCard, MoveResult, NotSupported, NothingToSelect, Result, SelectedArea, SnafuSelectorExt, TooManyCards, UnselectedArea, }; #[derive(Copy, Clone, Debug)] pub struct Selection { held_from: Option<AreaId>, } #[derive(Debug)] pub struct Foundation<S> { suit: Suit, cards: Vec<Card>, take_from_foundation: bool, selection: S, } pub type UnselectedFoundation = Foundation<()>; pub type SelectedFoundation = Foundation<Selection>; impl<S> Foundation<S> { fn id(&self) -> AreaId { AreaId::Foundation(self.suit) } fn validate_cards(&self, held: &Held) -> Result { if held.source == self.id() { Ok(()) } else if let [card] = held.cards.as_slice() { ensure!( self.suit == card.suit, InvalidCard { message: format!("Wrong suit: card: {:?}, suit: {:?}", card, self.suit), } ); if let Some(foundation_card) = self.cards.last() { ensure!( foundation_card.rank.is_followed_by(card.rank), InvalidCard { message: format!( "Card does not follow: card: {:?}, top: {:?}", card, foundation_card ), } ); Ok(()) } else { ensure!( card.rank == Rank::Ace, InvalidCard { message: format!("Card does not follow: card: {:?}, top: empty", card), } ); Ok(()) } } else { ensure!( held.cards.is_empty(), TooManyCards { message: "Expected only one card", } ); Ok(()) } } fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> { match self.validate_cards(&held) { Ok(_) => { self.cards.append(&mut held.cards); MoveResult::Moved(()) } Err(error) => MoveResult::Unmoved(held, error), } } fn take_cards(&mut self, len: usize, source: AreaId) -> Held { let cards = self.cards.split_off_bounded(len); Held { source, cards } } fn as_stack(&self, selection: Option<Selection>) -> Stack<'_> { let cards_len = self.cards.len(); Stack { cards: &self.cards, details: StackDetails { orientation: Orientation::Horizontal, len: cards_len, face_up_len: cards_len, visible_len: 2, spread_len: 1, selection: selection.map(|selection| StackSelection { len: 1, held: selection.held_from.is_some(), }), }, } } fn with_selection<T>(self, selection: T) -> Foundation<T> { Foundation { suit: self.suit, cards: self.cards, take_from_foundation: self.take_from_foundation, selection, } } } impl UnselectedFoundation { pub fn create( suit: Suit, cards: Vec<Card>, settings: &GameSettings, ) -> Box<dyn UnselectedArea> { Box::new(Foundation { suit, cards, take_from_foundation: settings.take_from_foundation, selection: (), }) } } impl<'a> Area for UnselectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { false } fn is_held(&self) -> bool { false } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { self.take_cards(len, self.id()) } fn take_all_cards(&mut self) -> Held { self.take_cards(self.cards.len(), self.id()) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(None) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl<'a> Area for SelectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { true } fn is_held(&self) -> bool { self.selection.held_from.is_some() } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { self.selection.held_from = None; Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(len, source) } fn take_all_cards(&mut self) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(self.cards.len(), source) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(Some(self.selection)) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl UnselectedArea for UnselectedFoundation { fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> { if !self.cards.is_empty() { MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: None }))) } else { NothingToSelect { message: "Empty area", } .fail_move(self) } } fn select_with_held( mut self: Box<Self>, held: Held, ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> { let source = held.source; match self.give_cards(held) { MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: Some(source), }))), MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error), } } } impl SelectedArea for SelectedFoundation { fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) { let held = if let Some(source) = self.selection.held_from { Some(self.take_cards(1, source)) } else { None }; let unselected = Box::new(self.with_selection(())); (unselected, held) } fn activate(&mut self) -> Result<Option<Action>> { if self.selection.held_from.is_some() { self.put_down()?; } else { self.pick_up()?; } Ok(None) } fn pick_up(&mut self) -> Result { if self.take_from_foundation { self.selection.held_from = Some(self.id()); } Ok(()) } fn put_down(&mut self) -> Result { self.selection.held_from = None; Ok(()) } fn select_more(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn select_less(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn held_from(&self) -> Option<AreaId> { self.selection.held_from } }
use crate::{ model::{ card::{Card, Rank, Suit}, settings::GameSettings, stack::{Orientation, Stack, StackDetails, StackSelection}, }, utils::vec::SplitOffBounded, }; use super::{ Action, Area, AreaId, Held, InvalidCard, MoveResult, NotSupported, NothingToSelect, Result, SelectedArea, SnafuSelectorExt, TooManyCards, UnselectedArea, }; #[derive(Copy, Clone, Debug)] pub struct Selection { held_from: Option<AreaId>, } #[derive(Debug)] pub struct Foundation<S> { suit: Suit, cards: Vec<Card>, take_from_foundation: bool, selection: S, } pub type UnselectedFoundation = Foundation<()>; pub type SelectedFoundation = Foundation<Selection>; impl<S> Foundation<S> { fn id(&self) -> AreaId { AreaId::Foundation(self.suit) } fn validate_cards(&self, held: &Held) -> Result { if held.source == self.id() { Ok(()) } else if let [card] = held.cards.as_slice() { ensure!( self.suit == card.suit, InvalidCard { message: format!("Wrong suit: card: {:?}, suit: {:?}", card, self.suit), } ); if let Some(foundation_card) = self.cards.last() { ensure!( foundation_card.rank.is_followed_by(card.rank), InvalidCard { message: format!( "Card does not follow: card: {:?}, top: {:?}", card, foundation_card ), } ); Ok(()) } else { ensure!( card.rank == Rank::Ace, InvalidCard { message: format!("Card does not follow: card: {:?}, top: empty", card), } ); Ok(()) } } else { ensure!( held.cards.is_empty(), TooManyCards { message: "Expected only one card", } ); Ok(()) } } fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> { match self.validate_cards(&held) { Ok(_) => { self.cards.append(&mut held.cards); MoveResult::Moved(()) } Err(error) => MoveResult::Unmoved(held, error), } } fn take_cards(&mut self, len: usize, source: AreaId) -> Held { let cards = self.cards.split_off_bounded(len); Held { source, cards } } fn as_stack(&self, selection: Option<Selection>) -> Stack<'_> { let cards_len = self.cards.len(); Stack { cards: &self.cards, details: StackDetails { orientation: Orientation::Horizontal, len: cards_len, face_up_len: cards_len, visible_len: 2, spread_len: 1, selection: selection.map(|selection| StackSelection { len: 1, held: selection.held_from.is_some(), }), }, } } fn with_selection<T>(self, selection: T) -> Foundation<T> { Foundation { suit: self.suit, cards: self.cards, take_from_foundation: self.take_from_foundation, selection, } } } impl UnselectedFoundation { pub fn create( suit: Suit, cards: Vec<Card>, settings: &GameSettings, ) -> Box<dyn UnselectedArea> { Box::new(Foundation { suit, cards, take_from_foundation: settings.take_from_foundation, selection: (), }) } } impl<'a> Area for UnselectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { false } fn is_held(&self) -> bool { false } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { Foundation::give_cards(self, held)
p_or_else(|| self.id()); self.take_cards(len, source) } fn take_all_cards(&mut self) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(self.cards.len(), source) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(Some(self.selection)) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl UnselectedArea for UnselectedFoundation { fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> { if !self.cards.is_empty() { MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: None }))) } else { NothingToSelect { message: "Empty area", } .fail_move(self) } } fn select_with_held( mut self: Box<Self>, held: Held, ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> { let source = held.source; match self.give_cards(held) { MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: Some(source), }))), MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error), } } } impl SelectedArea for SelectedFoundation { fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) { let held = if let Some(source) = self.selection.held_from { Some(self.take_cards(1, source)) } else { None }; let unselected = Box::new(self.with_selection(())); (unselected, held) } fn activate(&mut self) -> Result<Option<Action>> { if self.selection.held_from.is_some() { self.put_down()?; } else { self.pick_up()?; } Ok(None) } fn pick_up(&mut self) -> Result { if self.take_from_foundation { self.selection.held_from = Some(self.id()); } Ok(()) } fn put_down(&mut self) -> Result { self.selection.held_from = None; Ok(()) } fn select_more(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn select_less(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn held_from(&self) -> Option<AreaId> { self.selection.held_from } }
} fn take_cards(&mut self, len: usize) -> Held { self.take_cards(len, self.id()) } fn take_all_cards(&mut self) -> Held { self.take_cards(self.cards.len(), self.id()) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(None) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl<'a> Area for SelectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { true } fn is_held(&self) -> bool { self.selection.held_from.is_some() } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { self.selection.held_from = None; Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { let source = self.selection.held_from.take().unwra
random
[ { "content": "pub trait SelectedArea: Area {\n\n fn deselect(self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>);\n\n\n\n fn activate(&mut self) -> Result<Option<Action>>;\n\n fn pick_up(&mut self) -> Result;\n\n fn put_down(&mut self) -> Result;\n\n fn select_more(&mut self) -> Result;\n...
Rust
benches/predict.rs
moisesmcardona/rav1e
5e9d1b0754879eba2f59192af9c0d016ef380eb5
use criterion::*; use rand::{ChaChaRng, Rng, SeedableRng}; use rav1e::partition::BlockSize; use rav1e::predict::{Block4x4, Intra}; pub const MAX_ITER: usize = 50000; pub const BLOCK_SIZE: BlockSize = BlockSize::BLOCK_32X32; pub fn generate_block(rng: &mut ChaChaRng) -> (Vec<u16>, Vec<u16>, Vec<u16>) { let block = vec![0u16; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u16> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u16> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn generate_block_u8(rng: &mut ChaChaRng) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let block = vec![0u8; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u8> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u8> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn pred_bench(c: &mut Criterion) { c.bench_function("intra_dc_4x4", |b| intra_dc_4x4(b)); c.bench_function("intra_dc_128_4x4_u8", |b| intra_dc_128_4x4_u8(b)); c.bench_function("intra_dc_left_4x4", |b| intra_dc_left_4x4(b)); c.bench_function("intra_dc_top_4x4", |b| intra_dc_top_4x4(b)); c.bench_function("intra_h_4x4", |b| intra_h_4x4(b)); c.bench_function("intra_v_4x4", |b| intra_v_4x4(b)); c.bench_function("intra_paeth_4x4", |b| intra_paeth_4x4(b)); c.bench_function("intra_smooth_4x4", |b| intra_smooth_4x4(b)); c.bench_function("intra_smooth_h_4x4", |b| intra_smooth_h_4x4(b)); c.bench_function("intra_smooth_v_4x4", |b| intra_smooth_v_4x4(b)); c.bench_function("intra_cfl_4x4", |b| intra_cfl_4x4(b)); } pub fn intra_dc_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_128_4x4_u8(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block_u8(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_128( &mut block, BLOCK_SIZE.width(), 8 ); } }) } pub fn intra_dc_left_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_left( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_top_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_top( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_h(&mut block, BLOCK_SIZE.width(), &left[..4]); } }) } pub fn intra_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, _left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_v(&mut block, BLOCK_SIZE.width(), &above[..4]); } }) } pub fn intra_paeth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); let above_left = unsafe { *above.as_ptr().offset(-1) }; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_paeth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4], above_left ); } }) } pub fn intra_smooth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_h( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_v( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_cfl_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, _left) = generate_block(&mut rng); let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect(); let alpha = -1 as i16; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_cfl(&mut block, BLOCK_SIZE.width(), &ac, alpha, 8); } }) }
use criterion::*; use rand::{ChaChaRng, Rng, SeedableRng}; use rav1e::partition::BlockSize; use rav1e::predict::{Block4x4, Intra}; pub const MAX_ITER: usize = 50000; pub const BLOCK_SIZE: BlockSize = BlockSize::BLOCK_32X32; pub fn generate_block(rng: &mut ChaChaRng) -> (Vec<u16>, Vec<u16>, Vec<u16>) { let block = vec![0u16; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u16> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u16> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn generate_block_u8(rng: &mut ChaChaRng) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let block = vec![0u8; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u8> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u8> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) }
pub fn intra_dc_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_128_4x4_u8(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block_u8(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_128( &mut block, BLOCK_SIZE.width(), 8 ); } }) } pub fn intra_dc_left_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_left( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_top_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_top( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_h(&mut block, BLOCK_SIZE.width(), &left[..4]); } }) } pub fn intra_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, _left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_v(&mut block, BLOCK_SIZE.width(), &above[..4]); } }) } pub fn intra_paeth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); let above_left = unsafe { *above.as_ptr().offset(-1) }; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_paeth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4], above_left ); } }) } pub fn intra_smooth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_h( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_v( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_cfl_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, _left) = generate_block(&mut rng); let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect(); let alpha = -1 as i16; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_cfl(&mut block, BLOCK_SIZE.width(), &ac, alpha, 8); } }) }
pub fn pred_bench(c: &mut Criterion) { c.bench_function("intra_dc_4x4", |b| intra_dc_4x4(b)); c.bench_function("intra_dc_128_4x4_u8", |b| intra_dc_128_4x4_u8(b)); c.bench_function("intra_dc_left_4x4", |b| intra_dc_left_4x4(b)); c.bench_function("intra_dc_top_4x4", |b| intra_dc_top_4x4(b)); c.bench_function("intra_h_4x4", |b| intra_h_4x4(b)); c.bench_function("intra_v_4x4", |b| intra_v_4x4(b)); c.bench_function("intra_paeth_4x4", |b| intra_paeth_4x4(b)); c.bench_function("intra_smooth_4x4", |b| intra_smooth_4x4(b)); c.bench_function("intra_smooth_h_4x4", |b| intra_smooth_h_4x4(b)); c.bench_function("intra_smooth_v_4x4", |b| intra_smooth_v_4x4(b)); c.bench_function("intra_cfl_4x4", |b| intra_cfl_4x4(b)); }
function_block-full_function
[ { "content": "pub fn intra_bench(c: &mut Criterion) {\n\n c.bench_functions(\n\n \"intra_dc_4x4\",\n\n vec![\n\n Fun::new(\"native\", |b, _: &Option<usize>| {\n\n predict_native::intra_dc_4x4(b)\n\n }),\n\n Fun::new(\"aom\", |b, _: &Option<usize>| {\n\n predict_intra_4x4_aom(...
Rust
src/config.rs
video-audio/va-tool
e97918d0aa936ace07dfd3df84758b6a7646a4d2
use std::env; use regex::Regex; use url::Url; use crate::error::{Error, Result}; use crate::opt::{Match as OptMatch, Matcher as OptMatcher, Opt, OptKind, Opts}; #[rustfmt::skip] const OPTS: Opts = &[ &Opt(&"vv", &["verbose"], OptKind::NoArg), &Opt(&"vvv", &["very-verbose"], OptKind::NoArg), &Opt(&"help", &["h"], OptKind::NoArg), &Opt(&"version", &["v"], OptKind::NoArg), &Opt(&"print-config", &[], OptKind::NoArg), &Opt(&"config", &["c", "cfg"], OptKind::Arg), &Opt(&"input", &["i"], OptKind::Arg), &Opt(&"fifo-sz", &["udp-fifo-sz", "udp-fifo-size","fifo-size"], OptKind::Arg), &Opt(&"out", &["o", "output"], OptKind::Arg), ]; #[allow(dead_code)] pub struct ConfigOutput { url: Url, } pub struct ConfigInput { id: u64, pub url: Url, pub udp_fifo_sz: usize, } pub struct Config { pub print_help: bool, pub print_version: bool, pub print_config: bool, pub log_level: log::Level, pub inputs: Vec<ConfigInput>, } impl Config { pub(crate) fn parse() -> Result<Config> { let mut c = Config { print_help: false, print_version: false, print_config: false, log_level: log::Level::Info, inputs: Default::default(), }; let opt_matcher = OptMatcher::new(env::args().skip(1).collect(), OPTS); for (i, mtch) in opt_matcher.into_iter().enumerate() { match mtch { OptMatch::Key(key, _) => match key { "vv" => c.log_level = log::Level::Debug, "vvv" => c.log_level = log::Level::Trace, "help" => c.print_help = true, "version" => c.print_version = true, "print-config" => c.print_config = true, _ => {} }, OptMatch::KeyValue(key, value) => match key { "input" => c.push_input(value)?, "fifo-sz" => { let udp_fifo_sz = value.parse::<usize>().unwrap(); c.inputs.last_mut().and_then(|input| { input.udp_fifo_sz = udp_fifo_sz; Some(input) }); } _ => {} }, OptMatch::Positional(value) | OptMatch::ExtraPositional(value) => { if i == 0 && value == "analyze" { } else { c.push_input(value)? } } OptMatch::UnknownKey(key) => { log::warn!(r#"unrecognized option "{}""#, key); } OptMatch::UnknownKeyValue(key, value) => { log::warn!(r#"unrecognized option "{}" with argument "{}""#, key, value); } OptMatch::No(key) => { log::warn!(r#"unknown argument "{}""#, key); } _ => {} } } Ok(c) } pub(crate) fn print_help(&self) { println!("Video/Audio tool version {}", env!("CARGO_PKG_VERSION")); println!(); println!("Usage:"); println!(r#" va-tool [...] [-arg ...] [--arg[="..."]] [--] [...]"#); println!(); println!("Flags:"); println!(" -vv, --verbose | <bool> | ... "); println!(" -vvv, --very-verbose | <bool> | ... "); println!(" -i, --intput | <str/url> | Where to read from"); println!(" --fifo-sz | <size> | circular buffer size; result allocaed size"); println!(" . is $(mpeg-ts-packer-size) * $(fifo-size)"); println!(" . mpeg-ts-packer-size is 188"); println!(" -o, --output, --out | <str/url> | Where to write to"); println!(); } pub(crate) fn print_version(&self) { println!("version: {}", env!("CARGO_PKG_VERSION")); } pub(crate) fn print_config(&self) { println!("log-level: {}", self.log_level.to_string().to_lowercase()); println!("inputs:"); for input in self.inputs.iter() { println!(" - id: {}", input.id); println!(" url: {}", input.url); if input.url.scheme() == "udp" { println!(" udp-fifo-sz: {}", input.udp_fifo_sz); } } } pub(crate) fn validate(&self) -> Result<()> { Ok(()) } fn push_input(&mut self, url_raw: String) -> Result<()> { let cfg_input = ConfigInput { id: 0, url: url_parse(&url_raw)?, udp_fifo_sz: 5 * 1000, }; self.inputs.push(cfg_input); Ok(()) } } fn url_parse<UR: AsRef<str>>(url_raw: UR) -> Result<url::Url> { lazy_static! { static ref RE_UDP_MCAST_GROUP: Regex = Regex::new( r#"(?x) ^ 2(?:2[4-9]|3[0-9]) (?: \. (?: 25[0-5] | 2[0-4][0-9] | 1[0-9]{2} | [1-9][0-9] | [0-9] ) ){3} "#, ) .unwrap(); } let mut url_raw = url_raw.as_ref().to_string(); if RE_UDP_MCAST_GROUP.is_match(&url_raw) { url_raw.insert_str(0, "udp://"); } else if url_raw.starts_with('.') || url_raw.starts_with('/') { url_raw.insert_str(0, "file://"); } Url::parse(&url_raw).map_err(|err| Error::url_parse(err, url_raw)) }
use std::env; use regex::Regex; use url::Url; use crate::error::{Error, Result}; use crate::opt::{Match as OptMatch, Matcher as OptMatcher, Opt, OptKind, Opts}; #[rustfmt::skip] const OPTS: Opts = &[ &Opt(&"vv", &["verbose"], OptKind::NoArg), &Opt(&"vvv", &["very-verbose"], OptKind::NoArg), &Opt(&"help", &["h"], OptKind::NoArg), &Opt(&"version", &["v"], OptKind::NoArg), &Opt(&"print-config", &[], OptKind::NoArg), &Opt(&"config", &["c", "cfg"], OptKind::Arg), &Opt(&"input", &["i"], OptKind::Arg), &Opt(&"fifo-sz", &["udp-fifo-sz", "udp-fifo-size","fifo-size"], OptKind::Arg), &Opt(&"out", &["o", "output"], OptKind::Arg), ]; #[allow(dead_code)] pub struct ConfigOutput { url: Url, } pub struct ConfigInput { id: u64, pub url: Url, pub udp_fifo_sz: usize, } pub struct Config { pub print_help: bool, pub print_version: bool, pub print_config: bool, pub log_level: log::Level, pub inputs: Vec<ConfigInput>, } impl Config { pub(crate) fn parse() -> Result<Config> { let mut c = Config { print_help: false, print_version: false, print_config: false, log_level: log::Level::Info, inputs: Default::default(), }; let opt_matcher = OptMatcher::new(env::args().skip(1).collect(), OPTS); for (i, mtch) in opt_matcher.into_iter().enumerate() { match mtch { OptMatch::Key(key, _) => match key { "vv" => c.log_level = log::Level::Debug, "vvv" => c.log_level = log::Level::Trace, "help" => c.print_help = true, "version" => c.print_version = true, "print-config" => c.print_config = true, _ => {} }, OptMatch::KeyValue(key, value) => match key { "input" => c.push_input(value)?, "fifo-sz" => { let udp_fifo_sz = value.parse::<usize>().unwrap(); c.inputs.last_mut().and_then(|input| { input.udp_fifo_sz = udp_fifo_sz; Some(input) }); } _ => {} }, OptMatch::Positional(value) | OptMatch::ExtraPositional(value) => { if i == 0 && value == "analyze" { } else { c.push_input(value)? } } OptMatch::UnknownKey(key) => { log::warn!(r#"unrecognized option "{}""#, key); } OptMatch::UnknownKeyValue(key, value) => { log::warn!(r#"unrecognized option "{}" with argument "{}""#, key, value); } OptMatch::No(key) => { log::warn!(r#"unknown argument "{}""#, key); } _ => {} } } Ok(c) } pub(crate) fn print_help(&self) { println!("Video/Audio tool version {}", env!("CARGO_PKG_VERSION")); println!(); println!("Usage:"); println!(r#" va-tool [...] [-arg ...] [--arg[="..."]] [--] [...]"#); println!(); println!("Flags:"); println!(" -vv, --verbose | <bool> | ... "); println!(" -vvv, --very-verbose | <bool> | ... "); println!(" -i, --intput | <str/url> | Where to read from"); println!(" --fifo-sz | <size> | circular buffer size; result allocaed size"); println!("
| <str/url> | Where to write to"); println!(); } pub(crate) fn print_version(&self) { println!("version: {}", env!("CARGO_PKG_VERSION")); } pub(crate) fn print_config(&self) { println!("log-level: {}", self.log_level.to_string().to_lowercase()); println!("inputs:"); for input in self.inputs.iter() { println!(" - id: {}", input.id); println!(" url: {}", input.url); if input.url.scheme() == "udp" { println!(" udp-fifo-sz: {}", input.udp_fifo_sz); } } } pub(crate) fn validate(&self) -> Result<()> { Ok(()) } fn push_input(&mut self, url_raw: String) -> Result<()> { let cfg_input = ConfigInput { id: 0, url: url_parse(&url_raw)?, udp_fifo_sz: 5 * 1000, }; self.inputs.push(cfg_input); Ok(()) } } fn url_parse<UR: AsRef<str>>(url_raw: UR) -> Result<url::Url> { lazy_static! { static ref RE_UDP_MCAST_GROUP: Regex = Regex::new( r#"(?x) ^ 2(?:2[4-9]|3[0-9]) (?: \. (?: 25[0-5] | 2[0-4][0-9] | 1[0-9]{2} | [1-9][0-9] | [0-9] ) ){3} "#, ) .unwrap(); } let mut url_raw = url_raw.as_ref().to_string(); if RE_UDP_MCAST_GROUP.is_match(&url_raw) { url_raw.insert_str(0, "udp://"); } else if url_raw.starts_with('.') || url_raw.starts_with('/') { url_raw.insert_str(0, "file://"); } Url::parse(&url_raw).map_err(|err| Error::url_parse(err, url_raw)) }
. is $(mpeg-ts-packer-size) * $(fifo-size)"); println!(" . mpeg-ts-packer-size is 188"); println!(" -o, --output, --out
random
[ { "content": "fn opts_get<'opt, 's>(opts: Opts<'opt>, key: &'s str) -> Option<&'opt Opt<'opt>> {\n\n for opt in opts {\n\n if opt.0 == key {\n\n return Some(opt);\n\n }\n\n\n\n for k in opt.1 {\n\n if *k == key {\n\n return Some(opt);\n\n }...