text stringlengths 1 1.05M |
|---|
<filename>extern/typed-geometry/src/typed-geometry/functions/objects/intersection.hh
#pragma once
#include <typed-geometry/detail/optional.hh>
#include <typed-geometry/feature/assert.hh>
#include <typed-geometry/functions/basic/scalar_math.hh>
#include <typed-geometry/types/objects/aabb.hh>
#include <typed-geometry/types/objects/box.hh>
#include <typed-geometry/types/objects/capsule.hh>
#include <typed-geometry/types/objects/cylinder.hh>
#include <typed-geometry/types/objects/ellipse.hh>
#include <typed-geometry/types/objects/halfspace.hh>
#include <typed-geometry/types/objects/hemisphere.hh>
#include <typed-geometry/types/objects/inf_cone.hh>
#include <typed-geometry/types/objects/inf_cylinder.hh>
#include <typed-geometry/types/objects/line.hh>
#include <typed-geometry/types/objects/plane.hh>
#include <typed-geometry/types/objects/pyramid.hh>
#include <typed-geometry/types/objects/ray.hh>
#include <typed-geometry/types/objects/segment.hh>
#include <typed-geometry/types/objects/sphere.hh>
#include <typed-geometry/types/objects/triangle.hh>
#include <typed-geometry/functions/vector/cross.hh>
#include <typed-geometry/functions/vector/dot.hh>
#include <typed-geometry/functions/vector/length.hh>
#include "aabb.hh"
#include "closest_points.hh"
#include "contains.hh"
#include "coordinates.hh"
#include "direction.hh"
#include "faces.hh"
#include "normal.hh"
#include "plane.hh"
#include "project.hh"
#include <utility>
#include <vector>
// family of intersection functions:
// intersects(a, b) -> bool
// intersection(a, b) -> ???
// intersection_safe(a, b) -> optional<???>
// intersection_parameter(a, b) -> coords? (for a line or a ray: hits<N, ScalarT> or optional<hit_interval> (when b is solid))
// intersection_parameters(a, b) -> pair<coords, coords>?
// intersection_exact(a, b) -> variant
// closest_intersection(a, b) -> position/object (for a ray: optional<pos>)
// closest_intersection_parameter(a, b) -> coords (for a ray: optional<ScalarT>)
// "intersects" returns true iff any point lies in a and in b
// "intersection" returns an object describing the intersection (NOTE: does NOT handle degenerate cases)
// "intersection_safe" is the same as "intersection" but returns nullopt for degenerate cases
// "intersection_parameter" returns coordinates for the first object such that a[coords] == intersection(a, b)
// "intersection_parameters" returns coordinates for both objects
// "intersection_exact" returns a variant type describing all possible intersections, including degenerate cases
// the "closest_" variants only return the closest intersection for objects where that concept is applicable (e.g. for rays)
// Notes:
// - intersection_exact is currently unsupported
// - intersection_safe is currently unsupported
// - for more elaborate ray-tracing, a future ray_cast function will exist (which also returns the intersection normal)
// Implementation guidelines:
// if object has boundary_of(obj) defined
// explicit intersection_parameter(line, obj_boundary), which gives intersection_parameter(line, obj)
// if obj is infinite and contains(obj, pos) is not cheap, intersection_parameter(line, obj) can be implemented additionally
// else
// explicit intersection_parameter(line, obj)
// this gives intersection, intersects, closest_intersection_parameter, and closest_intersection for line and ray
//
// if closest ray intersection is faster than computing all line intersections
// explicit closest_intersection_parameter(ray, obj), same for obj_boundary
// this is then also used by closest_intersection and intersects
//
// explicit intersects(obj, aabb), which gives intersects(aabb, obj)
//
// for convex compound objects (like cylinder or pyramids), decompose the object into primitive shapes and pass them to a helper function:
// - call merge_hits(line, objPart1, objPart2, ...) in the implementation of intersection_parameter(line, obj_boundary)
// - call intersects_any(lineRayObj, objPart1, objPart2, ...) in the implementation of intersects(lineRayObj, obj<TraitsT>), which can shortcut and be faster than the default
namespace tg
{
// ====================================== Result Structs ======================================
/// ordered list of ray intersection hits
/// behaves like a container with
/// .size()
/// operator[]
/// range-based-for
template <int MaxHits, class HitT>
struct hits
{
static constexpr bool is_hits = true; // tag
static constexpr int max_hits = MaxHits;
using hit_t = HitT;
template <class OtherT>
using as_hits = hits<MaxHits, OtherT>;
[[nodiscard]] int size() const { return _size; }
[[nodiscard]] bool any() const { return _size > 0; }
HitT const& operator[](int idx) const
{
TG_ASSERT(0 <= idx && idx < _size);
return _hit[idx];
}
[[nodiscard]] HitT const& first() const
{
TG_ASSERT(_size > 0);
return _hit[0];
}
[[nodiscard]] HitT const& last() const
{
TG_ASSERT(_size > 0);
return _hit[_size - 1];
}
[[nodiscard]] HitT const* begin() const { return _hit; }
[[nodiscard]] HitT const* end() const { return _hit + _size; }
hits() = default;
hits(HitT* hits, int size) : _size(size)
{
for (auto i = 0; i < size; ++i)
_hit[i] = hits[i];
}
template <typename... HitTs>
hits(HitTs... hits) : _size(sizeof...(HitTs)), _hit{hits...}
{
}
private:
int _size = 0;
HitT _hit[MaxHits] = {};
};
/// describes a continuous interval on a line or ray between start and end
template <class ScalarT>
struct hit_interval
{
static constexpr bool is_hit_interval = true; // tag
ScalarT start;
ScalarT end;
[[nodiscard]] constexpr bool is_unbounded() const { return end == tg::max<ScalarT>() || start == tg::min<ScalarT>(); }
};
// ====================================== Helper functions ======================================
namespace detail
{
// intersects the given line with all given objects and returns the concatenated intersections. A maximal number of 2 intersections is assumed.
template <int D, class ScalarT, class... Objs>
[[nodiscard]] constexpr hits<2, ScalarT> merge_hits(line<D, ScalarT> const& line, Objs const&... objs)
{
ScalarT hits[2] = {};
hits[0] = tg::max<ScalarT>();
hits[1] = tg::min<ScalarT>();
auto numHits = 0;
const auto find_hits = [&](const auto& obj) {
const auto inters = intersection_parameter(line, obj);
for (const auto& inter : inters)
{
hits[0] = tg::min(hits[0], inter);
hits[1] = tg::max(hits[1], inter);
numHits++;
}
};
(find_hits(objs), ...);
TG_ASSERT(numHits <= 2);
return {hits, numHits};
}
template <int D, class ScalarT, class ObjT, u64 N, size_t... I>
[[nodiscard]] constexpr hits<2, ScalarT> merge_hits_array(line<D, ScalarT> const& line, array<ObjT, N> objs, std::index_sequence<I...>)
{
return merge_hits(line, objs[I]...);
}
// returns true, iff the given line or ray object intersects any of the given other objects (with short-circuiting after the first intersection)
template <class Obj, class... Objs>
[[nodiscard]] constexpr bool intersects_any(Obj const& obj, Objs const&... objs)
{
return (intersects(obj, objs) || ...);
}
template <class Obj, class ObjT, u64 N, size_t... I>
[[nodiscard]] constexpr bool intersects_any_array(Obj const& obj, array<ObjT, N> objs, std::index_sequence<I...>)
{
return intersects_any(obj, objs[I]...);
}
// Solves the quadratic equation ax^2 + bx + c = 0
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> solve_quadratic(ScalarT const& a, ScalarT const& b, ScalarT const& c)
{
const auto discriminant = b * b - ScalarT(4) * a * c;
if (discriminant < ScalarT(0))
return {}; // No solution
const auto sqrtD = sqrt(discriminant);
const auto t1 = (-b - sqrtD) / (ScalarT(2) * a);
const auto t2 = (-b + sqrtD) / (ScalarT(2) * a);
const auto [tMin, tMax] = minmax(t1, t2);
return {tMin, tMax};
}
template <class A, class B>
using try_closest_intersection_parameter = decltype(closest_intersection_parameter(std::declval<A const&>(), std::declval<B const&>()));
}
// ====================================== Default Implementations ======================================
// TODO: intersection_parameter from intersection_parameters
// returns whether two objects intersect
// if intersection is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto intersects(A const& a, B const& b)
-> std::enable_if_t<!can_apply<detail::try_closest_intersection_parameter, A, B>, decltype(intersection(a, b).has_value())>
{
return intersection(a, b).has_value();
}
// if closest intersection parameter is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto intersects(A const& a, B const& b) -> decltype(closest_intersection_parameter(a, b).has_value())
{
return closest_intersection_parameter(a, b).has_value();
}
// if A is a _boundary, check if B is completely contained within (then false), otherwise same as intersects solid_of(A)
template <class A, class B>
[[nodiscard]] constexpr auto intersects(A const& a, B const& b) -> enable_if<std::is_same_v<typename object_traits<A>::tag_t, boundary_tag>, bool>
{
using ScalarT = typename A::scalar_t;
auto const solidA = solid_of(a);
if (contains(solidA, b, ScalarT(-16) * tg::epsilon<ScalarT>))
return false;
return intersects(solidA, b);
}
// parameters for intersects with aabb can switch order
template <int D, class ScalarT, class Obj>
[[nodiscard]] constexpr bool intersects(aabb<D, ScalarT> const& b, Obj const& obj)
{
return intersects(obj, b);
}
// if a value-typed intersection parameter is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto intersection(A const& a, B const& b) -> decltype(a[intersection_parameter(a, b)])
{
return a[intersection_parameter(a, b)];
}
// if an optional intersection parameter is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto intersection(A const& a, B const& b) -> optional<decltype(a[intersection_parameter(a, b).value()])>
{
if (auto t = intersection_parameter(a, b); t.has_value())
return a[t.value()];
return {};
}
// if a value-typed closest intersection parameter is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto closest_intersection(A const& a, B const& b) -> decltype(a[closest_intersection_parameter(a, b)])
{
return a[closest_intersection_parameter(a, b)];
}
// if an optional closest intersection parameter is available and applicable, use that
template <class A, class B>
[[nodiscard]] constexpr auto closest_intersection(A const& a, B const& b) -> optional<decltype(a[closest_intersection_parameter(a, b).value()])>
{
if (auto t = closest_intersection_parameter(a, b); t.has_value())
return a[t.value()];
return {};
}
// if hits intersection parameter is available, use that
template <class A, class B>
[[nodiscard]] constexpr auto intersection(A const& a, B const& b) -> typename decltype(intersection_parameter(a, b))::template as_hits<typename A::pos_t>
{
auto ts = intersection_parameter(a, b);
typename A::pos_t hits[ts.max_hits] = {};
for (auto i = 0; i < ts.size(); ++i)
hits[i] = a[ts[i]];
return {hits, ts.size()};
}
// if an optional hit_interval intersection parameter is available, use that
template <class A, class B, std::enable_if_t<decltype(intersection_parameter(std::declval<A>(), std::declval<B>()).value())::is_hit_interval, int> = 0>
[[nodiscard]] constexpr auto intersection(A const& a, B const& b)
-> optional<segment<object_traits<A>::domain_dimension, typename object_traits<A>::scalar_t>>
{
using seg_t = segment<object_traits<A>::domain_dimension, typename object_traits<A>::scalar_t>;
auto ts = intersection_parameter(a, b);
if (ts.has_value())
return seg_t{a[ts.value().start], a[ts.value().end]};
return {};
}
// if hits intersection parameter is available, use that
template <class A, class B>
[[nodiscard]] constexpr auto closest_intersection_parameter(A const& a, B const& b) -> optional<typename decltype(intersection_parameter(a, b))::hit_t>
{
const auto hits = intersection_parameter(a, b);
if (hits.any())
return hits.first();
return {};
}
// if an optional hit_interval intersection parameter is available, use that
template <class A, class B>
[[nodiscard]] constexpr auto closest_intersection_parameter(A const& a, B const& b) -> optional<decltype(intersection_parameter(a, b).value().start)>
{
const auto hits = intersection_parameter(a, b);
if (hits.has_value())
return hits.value().start;
return {};
}
// if boundary_of a solid object returns hits, use this to construct the hit_interval result of the solid intersection
template <int D, class ScalarT, class Obj>
[[nodiscard]] constexpr auto intersection_parameter(line<D, ScalarT> const& l, Obj const& obj)
-> enable_if<!std::is_same_v<Obj, decltype(boundary_of(obj))>, optional<hit_interval<ScalarT>>>
{
const hits<2, ScalarT> inter = intersection_parameter(l, boundary_of(obj));
if (inter.size() == 2)
return {{inter[0], inter[1]}};
if constexpr (object_traits<Obj>::is_finite)
{
TG_ASSERT(inter.size() == 0); // a line intersects a finite solid object either twice or not at all
return {};
}
else
{
if (inter.size() == 0)
{
if (contains(obj, l.pos))
return {{tg::min<ScalarT>(), tg::max<ScalarT>()}};
return {};
}
TG_ASSERT(inter.size() == 1); // no other number remains
if (contains(obj, l[inter.first() + ScalarT(1)])) // the line continues into the object
return {{inter.first(), tg::max<ScalarT>()}};
return {{tg::min<ScalarT>(), inter.first()}};
}
}
// intersection between point and obj is same as contains
template <int D, class ScalarT, class Obj, class = void_t<decltype(contains(std::declval<pos<D, ScalarT>>(), std::declval<Obj>()))>>
constexpr optional<pos<D, ScalarT>> intersection(pos<D, ScalarT> const& p, Obj const& obj)
{
if (contains(obj, p))
return p;
return {};
}
// intersection between point and obj is same as contains
template <int D, class ScalarT, class Obj, class = void_t<decltype(contains(std::declval<pos<D, ScalarT>>(), std::declval<Obj>()))>>
constexpr optional<pos<D, ScalarT>> intersection(Obj const& obj, pos<D, ScalarT> const& p)
{
if (contains(obj, p))
return p;
return {};
}
// intersects between point and obj is same as contains
template <int D, class ScalarT, class Obj>
constexpr bool intersects(pos<D, ScalarT> const& p, Obj const& obj)
{
return contains(obj, p);
}
// ====================================== Ray Intersections from Line Intersections ======================================
template <int D, class ScalarT, class Obj>
[[nodiscard]] constexpr auto intersection_parameter(ray<D, ScalarT> const& ray, Obj const& obj)
-> decltype(intersection_parameter(inf_of(ray), obj).is_hits, intersection_parameter(inf_of(ray), obj))
{
const auto inter = intersection_parameter(inf_of(ray), obj);
constexpr auto maxHits = inter.max_hits;
if (!inter.any() || inter.last() < ScalarT(0))
return {};
if constexpr (maxHits == 2)
{
if (inter.first() < ScalarT(0))
return {inter[1]};
}
else
static_assert(maxHits == 1, "Only up to two intersections implemented");
return inter;
}
template <int D, class ScalarT, class Obj>
[[nodiscard]] constexpr auto intersection_parameter(ray<D, ScalarT> const& ray, Obj const& obj)
-> decltype(intersection_parameter(inf_of(ray), obj).value().is_hit_interval, intersection_parameter(inf_of(ray), obj))
{
const auto inter = intersection_parameter(inf_of(ray), obj);
if (!inter.has_value())
return inter;
auto interval = inter.value();
if (interval.end < ScalarT(0))
return {};
TG_ASSERT((interval.start <= ScalarT(0)) == contains(obj, ray.origin));
interval.start = max(interval.start, ScalarT(0));
return interval;
}
// ====================================== Line - Object Intersections ======================================
// line - point
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<1, ScalarT> const& l, pos<1, ScalarT> const& p)
{
return coordinates(l, p);
}
// line - line
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<2, ScalarT> const& l0, line<2, ScalarT> const& l1)
{
// l0.pos + l0.dir * t.x == l1.pos + l1.dir * t.y <=> (l0.dir | -l1.dir) * (t.x | t.y)^T == l1.pos - l0.pos
auto M = mat<2, 2, ScalarT>::from_cols(l0.dir, -l1.dir);
auto t = inverse(M) * (l1.pos - l0.pos);
if (!tg::is_finite(t.x))
return {};
return t.x;
}
// line - ray
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<2, ScalarT> const& l, ray<2, ScalarT> const& r)
{
auto M = mat<2, 2, ScalarT>::from_cols(l.dir, -r.dir);
auto t = inverse(M) * (r.origin - l.pos);
if (t.y < ScalarT(0) || !tg::is_finite(t.x))
return {};
return t.x;
}
// line - segment
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<2, ScalarT> const& l, segment<2, ScalarT> const& s)
{
auto M = mat<2, 2, ScalarT>::from_cols(l.dir, s.pos0 - s.pos1);
auto t = inverse(M) * (s.pos0 - l.pos);
if (t.y < ScalarT(0) || t.y > ScalarT(1) || !tg::is_finite(t.x))
return {};
return t.x;
}
// line - plane
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<D, ScalarT> const& l, plane<D, ScalarT> const& p)
{
const auto dotND = dot(p.normal, l.dir);
if (dotND == ScalarT(0)) // if plane normal and line direction are orthogonal, there is no intersection
return {};
// <l.pos + t * l.dir, p.normal> = p.dis <=> t = (p.dis - <l.pos, p.normal>) / <l.dir, p.normal>
return (p.dis - dot(p.normal, l.pos)) / dotND;
}
// line - halfspace
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<hit_interval<ScalarT>> intersection_parameter(line<D, ScalarT> const& l, halfspace<D, ScalarT> const& h)
{
const auto dotND = dot(h.normal, l.dir);
const auto dist = signed_distance(l.pos, h);
if (dotND == ScalarT(0)) // if halfspace normal and line direction are orthogonal, there is no intersection
{
if (dist <= ScalarT(0))
return {{tg::min<ScalarT>(), tg::max<ScalarT>()}}; // completely contained
return {}; // completely outside
}
const auto t = -dist / dotND;
if (dotND < ScalarT(0))
return {{t, tg::max<ScalarT>()}}; // line goes into the halfspace
return {{tg::min<ScalarT>(), t}}; // line goes out of the halfspace
}
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<ScalarT> closest_intersection_parameter(ray<D, ScalarT> const& r, halfspace<D, ScalarT> const& h)
{
// check if ray origin is already contained in the halfspace
const auto dist = signed_distance(r.origin, h);
if (dist <= ScalarT(0))
return ScalarT(0);
// if ray points away from the halfspace there is no intersection
const auto dotND = dot(h.normal, r.dir);
if (dotND >= ScalarT(0))
return {};
return -dist / dotND;
}
// line - aabb
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, aabb_boundary<D, ScalarT> const& b)
{
// based on ideas from https://gamedev.stackexchange.com/q/18436
auto tFirst = tg::min<ScalarT>();
auto tSecond = tg::max<ScalarT>();
for (auto i = 0; i < D; ++i)
{
if (abs(l.dir[i]) > ScalarT(100) * tg::epsilon<ScalarT>)
{
const auto tMin = (b.min[i] - l.pos[i]) / l.dir[i];
const auto tMax = (b.max[i] - l.pos[i]) / l.dir[i];
tFirst = max(tFirst, min(tMin, tMax));
tSecond = min(tSecond, max(tMin, tMax));
}
else if (l.pos[i] < b.min[i] || l.pos[i] > b.max[i])
return {}; // ray parallel to this axis and outside of the aabb
}
// no intersection if line misses the aabb
if (tFirst > tSecond)
return {};
return {tFirst, tSecond};
}
// line - box
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, box_boundary<D, ScalarT> const& b)
{
const auto bMin = b[comp<D, ScalarT>(-1)] - l.pos;
const auto bMax = b[comp<D, ScalarT>(1)] - l.pos;
auto tFirst = tg::min<ScalarT>();
auto tSecond = tg::max<ScalarT>();
for (auto i = 0; i < D; ++i)
{
const auto rDir = dot(l.dir, b.half_extents[i]);
if (abs(rDir) > ScalarT(100) * tg::epsilon<ScalarT>)
{
const auto tMin = dot(bMin, b.half_extents[i]) / rDir;
const auto tMax = dot(bMax, b.half_extents[i]) / rDir;
tFirst = max(tFirst, min(tMin, tMax));
tSecond = min(tSecond, max(tMin, tMax));
}
else if (dot(bMin, b.half_extents[i]) > ScalarT(0) || dot(bMax, b.half_extents[i]) < ScalarT(0))
return {}; // ray parallel to this half_extents axis and outside of the box
}
// no intersection if line misses the box
if (tFirst > tSecond)
return {};
return {tFirst, tSecond};
}
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<3, ScalarT> const& l, box<2, ScalarT, 3> const& b)
{
const auto t = intersection_parameter(l, plane<3, ScalarT>(normal_of(b), b.center));
if (!t.any()) // line parallel to box plane
return {};
const auto p = l[t.first()] - b.center;
if (abs(dot(b.half_extents[0], p)) > length_sqr(b.half_extents[0]) || abs(dot(b.half_extents[1], p)) > length_sqr(b.half_extents[1]))
return {};
return t;
}
// line - disk
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<3, ScalarT> const& l, sphere<2, ScalarT, 3> const& d)
{
const auto t = intersection_parameter(l, plane<3, ScalarT>(d.normal, d.center));
if (!t.any()) // line parallel to disk plane
return {};
const auto p = l[t.first()];
if (distance_sqr(p, d.center) > d.radius * d.radius)
return {};
return t;
}
// line - sphere
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, sphere_boundary<D, ScalarT> const& s)
{
auto t = dot(s.center - l.pos, l.dir);
auto d_sqr = distance_sqr(l[t], s.center);
auto r_sqr = s.radius * s.radius;
if (d_sqr > r_sqr)
return {};
auto dt = sqrt(r_sqr - d_sqr);
return {t - dt, t + dt};
}
// line - hemisphere
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, hemisphere_boundary<D, ScalarT> const& h)
{
return detail::merge_hits(l, caps_of(h), boundary_no_caps_of(h));
}
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, hemisphere_boundary_no_caps<D, ScalarT> const& h)
{
ScalarT hits[2] = {};
auto numHits = 0;
const auto sphereHits = intersection_parameter(l, sphere_boundary<D, ScalarT>(h.center, h.radius));
const auto halfSpace = halfspace<D, ScalarT>(-h.normal, h.center); // the intersection of this halfspace and the sphere is exactly the hemisphere
for (const auto& hit : sphereHits)
if (contains(halfSpace, l[hit]))
hits[numHits++] = hit;
return {hits, numHits};
}
template <class Obj, int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr bool intersects(Obj const& lr, hemisphere<D, ScalarT, TraitsT> const& h)
{
static_assert(object_traits<Obj>::is_infinite, "For finite objects, complete containment within boundary has to be considered as well");
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return intersection_parameter(lr, h).any();
else
return detail::intersects_any(lr, caps_of(h), boundary_no_caps_of(h));
}
// line - capsule
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, capsule_boundary<3, ScalarT> const& c)
{
using caps_t = hemisphere_boundary_no_caps<3, ScalarT>;
const auto n = direction(c);
return detail::merge_hits(l, caps_t(c.axis.pos0, c.radius, -n), caps_t(c.axis.pos1, c.radius, n), cylinder_boundary_no_caps<3, ScalarT>(c.axis, c.radius));
}
template <class Obj, class ScalarT, class TraitsT>
[[nodiscard]] constexpr bool intersects(Obj const& lr, capsule<3, ScalarT, TraitsT> const& c)
{
static_assert(object_traits<Obj>::is_infinite, "For finite objects, complete containment within boundary has to be considered as well");
using caps_t = sphere_boundary<3, ScalarT>; // spheres are faster than hemispheres and equivalent for the yes/no decision
return detail::intersects_any(lr, caps_t(c.axis.pos0, c.radius), caps_t(c.axis.pos1, c.radius), cylinder_boundary_no_caps<3, ScalarT>(c.axis, c.radius));
}
// line - cylinder
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, cylinder_boundary<3, ScalarT> const& c)
{
const auto caps = caps_of(c);
return detail::merge_hits(l, caps[0], caps[1], boundary_no_caps_of(c));
}
template <class Obj, class ScalarT, class TraitsT>
[[nodiscard]] constexpr bool intersects(Obj const& lr, cylinder<3, ScalarT, TraitsT> const& c)
{
static_assert(object_traits<Obj>::is_infinite, "For finite objects, complete containment within boundary has to be considered as well");
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return intersection_parameter(lr, c).any();
else
{
const auto caps = caps_of(c);
return detail::intersects_any(lr, caps[0], caps[1], boundary_no_caps_of(c));
}
}
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, cylinder_boundary_no_caps<3, ScalarT> const& c)
{
auto const infInter = intersection_parameter(l, inf_of(c));
if (!infInter.any())
return infInter;
auto const d = c.axis.pos1 - c.axis.pos0;
auto const lambda0 = dot(l[infInter[0]] - c.axis.pos0, d);
auto const lambda1 = dot(l[infInter[1]] - c.axis.pos0, d);
ScalarT hits[2] = {};
auto numHits = 0;
auto const dDotD = dot(d, d);
if (ScalarT(0) <= lambda0 && lambda0 <= dDotD)
hits[numHits++] = infInter[0];
if (ScalarT(0) <= lambda1 && lambda1 <= dDotD)
hits[numHits++] = infInter[1];
return {hits, numHits};
}
// line - inf_cylinder
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, inf_cylinder_boundary<3, ScalarT> const& c)
{
auto const cosA = dot(c.axis.dir, l.dir);
auto const sinA_sqr = 1 - cosA * cosA;
if (sinA_sqr <= 0)
return {}; // line and cylinder are parallel
// compute closest points of line and cylinder axis
auto const origDiff = l.pos - c.axis.pos;
auto const fLine = dot(l.dir, origDiff);
auto const fAxis = dot(c.axis.dir, origDiff);
auto const tLine = (cosA * fAxis - fLine) / sinA_sqr;
auto const tAxis = (fAxis - cosA * fLine) / sinA_sqr;
auto const line_axis_dist_sqr = distance_sqr(l[tLine], c.axis[tAxis]);
auto const cyl_radius_sqr = c.radius * c.radius;
if (cyl_radius_sqr < line_axis_dist_sqr)
return {}; // line misses the cylinder
// radius in 2D slice = sqrt(cyl_radius_sqr - line_axis_dist_sqr)
// infinite tube intersection
auto const s = sqrt((cyl_radius_sqr - line_axis_dist_sqr) / sinA_sqr);
return {tLine - s, tLine + s};
}
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<2, ScalarT> const& l, inf_cylinder_boundary<2, ScalarT> const& c)
{
const auto n = perpendicular(c.axis.dir);
const auto d = dot(l.dir, n);
if (d == ScalarT(0)) // line parallel to inf_cylinder
return {};
const auto dist = dot(c.axis.pos - l.pos, n);
const auto [tMin, tMax] = minmax((dist - c.radius) / d, (dist + c.radius) / d);
return {tMin, tMax};
}
// line - inf_cone
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<2, ScalarT> const& l, inf_cone_boundary<2, ScalarT> const& c)
{
auto ray1 = ray<2, ScalarT>(c.apex, rotate(c.opening_dir, c.opening_angle / ScalarT(2)));
auto ray2 = ray<2, ScalarT>(c.apex, rotate(c.opening_dir, -c.opening_angle / ScalarT(2)));
return detail::merge_hits(l, ray1, ray2);
}
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, inf_cone_boundary<3, ScalarT> const& ic)
{
// see https://lousodrome.net/blog/light/2017/01/03/intersection-of-a-ray-and-a-cone/
auto const dv = dot(l.dir, ic.opening_dir);
auto const cos2 = pow2(cos(ic.opening_angle * ScalarT(0.5)));
auto const co = l.pos - ic.apex;
auto const cov = dot(co, ic.opening_dir);
auto const a = dv * dv - cos2;
auto const b = ScalarT(2) * (dv * cov - dot(l.dir, co) * cos2);
auto const c = cov * cov - dot(co, co) * cos2;
auto const inter = detail::solve_quadratic(a, b, c);
if (!inter.any())
return inter;
// exclude intersections with mirrored cone
ScalarT hits[2] = {};
auto numHits = 0;
TG_ASSERT(ic.opening_angle <= tg::angle::from_degree(ScalarT(180))
&& "Only convex objects are supported, but an inf_cone with openinge angle > 180 degree is not convex.");
// if it is not used for solid cones, this works:
// auto const coneDir = ic.opening_angle > 180_deg ? -ic.opening_dir : ic.opening_dir;
// if (dot(l[inter[0]] - ic.apex, coneDir) >= ScalarT(0)) ...
if (dot(l[inter[0]] - ic.apex, ic.opening_dir) >= ScalarT(0))
hits[numHits++] = inter[0];
if (dot(l[inter[1]] - ic.apex, ic.opening_dir) >= ScalarT(0))
hits[numHits++] = inter[1];
return {hits, numHits};
}
// line - cone
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, cone_boundary_no_caps<3, ScalarT> const& cone)
{
auto const apex = apex_of(cone);
auto const openingDir = -normal_of(cone.base);
auto const borderPos = any_point(boundary_of(cone.base));
auto const openingAngleHalf = angle_between(openingDir, normalize(borderPos - apex));
// see https://lousodrome.net/blog/light/2017/01/03/intersection-of-a-ray-and-a-cone/
auto const dv = dot(l.dir, openingDir);
auto const cos2 = pow2(cos(openingAngleHalf));
auto const co = l.pos - apex;
auto const cov = dot(co, openingDir);
auto const a = dv * dv - cos2;
auto const b = ScalarT(2) * (dv * cov - dot(l.dir, co) * cos2);
auto const c = cov * cov - dot(co, co) * cos2;
auto const inter = detail::solve_quadratic(a, b, c);
if (!inter.any())
return inter;
// exclude intersections with mirrored cone
ScalarT hits[2] = {};
auto numHits = 0;
auto const h0 = dot(l[inter[0]] - apex, openingDir);
auto const h1 = dot(l[inter[1]] - apex, openingDir);
if (ScalarT(0) <= h0 && h0 <= cone.height)
hits[numHits++] = inter[0];
if (ScalarT(0) <= h1 && h1 <= cone.height)
hits[numHits++] = inter[1];
return {hits, numHits};
}
// line - pyramid
template <class BaseT, typename = std::enable_if_t<!std::is_same_v<BaseT, sphere<2, typename BaseT::scalar_t, 3>>>>
[[nodiscard]] constexpr hits<2, typename BaseT::scalar_t> intersection_parameter(line<3, typename BaseT::scalar_t> const& l,
pyramid_boundary_no_caps<BaseT> const& py)
{
auto const faces = faces_of(py);
return detail::merge_hits_array(l, faces, std::make_index_sequence<faces.size()>{});
}
template <class BaseT>
[[nodiscard]] constexpr hits<2, typename BaseT::scalar_t> intersection_parameter(line<3, typename BaseT::scalar_t> const& l, pyramid_boundary<BaseT> const& py)
{
return detail::merge_hits(l, py.base, boundary_no_caps_of(py));
}
// line - triangle2
template <class ScalarT>
[[nodiscard]] constexpr optional<hit_interval<ScalarT>> intersection_parameter(line<2, ScalarT> const& l, triangle<2, ScalarT> const& t)
{
ScalarT closestIntersection = tg::max<ScalarT>();
ScalarT furtherIntersection = tg::min<ScalarT>();
auto numIntersections = 0;
for (const auto& edge : edges_of(t))
{
const auto inter = intersection_parameter(l, edge);
if (inter.any())
{
numIntersections++;
closestIntersection = min(closestIntersection, inter.first());
furtherIntersection = max(furtherIntersection, inter.first());
}
}
if (numIntersections == 0)
return {}; // No intersection
TG_ASSERT(numIntersections == 2);
return {{closestIntersection, furtherIntersection}};
}
// line - triangle3
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<3, ScalarT> const& l,
triangle<3, ScalarT> const& t,
dont_deduce<ScalarT> eps = 100 * tg::epsilon<ScalarT>)
{
auto e1 = t.pos1 - t.pos0;
auto e2 = t.pos2 - t.pos0;
auto pvec = tg::cross(l.dir, e2);
auto det = dot(pvec, e1);
if (det < ScalarT(0))
{
std::swap(e1, e2);
pvec = tg::cross(l.dir, e2);
det = -det;
}
if (det < eps)
return {};
auto tvec = l.pos - t.pos0;
auto u = dot(tvec, pvec);
if (u < ScalarT(0) || u > det)
return {};
auto qvec = cross(tvec, e1);
auto v = dot(l.dir, qvec);
if (v < ScalarT(0) || v + u > det)
return {};
return (ScalarT(1) / det) * dot(e2, qvec);
}
// line - ellipse
template <class ScalarT>
[[nodiscard]] constexpr hits<1, ScalarT> intersection_parameter(line<3, ScalarT> const& l, ellipse<2, ScalarT, 3> const& e)
{
const auto t = intersection_parameter(l, plane<3, ScalarT>(normal_of(e), e.center));
if (!t.any()) // line parallel to ellipse plane
return {};
// simplified contains(e, p) without plane check and eps == 0
auto pc = l[t.first()] - e.center;
auto x = dot(pc, e.semi_axes[0]);
auto y = dot(pc, e.semi_axes[1]);
auto a = length_sqr(e.semi_axes[0]);
auto b = length_sqr(e.semi_axes[1]);
if (pow2(x / a) + pow2(y / b) <= ScalarT(1))
return t;
return {};
}
template <int D, class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<D, ScalarT> const& l, ellipse_boundary<D, ScalarT> const& e)
{
// transform line to ellipse space (ellipse gets unit sphere at origin)
auto const pc = l.pos - e.center;
vec<D, ScalarT> p;
vec<D, ScalarT> d; // in ellipse space, this is no longer a unit vector
for (auto i = 0; i < D; ++i)
{
auto const axis2 = dot(e.semi_axes[i], e.semi_axes[i]);
p[i] = dot(pc, e.semi_axes[i]) / axis2;
d[i] = dot(l.dir, e.semi_axes[i]) / axis2;
}
// to find intersection with unit sphere, <t*d + p, t*d + p> == 1 has to be solved
return detail::solve_quadratic(dot(d, d), ScalarT(2) * dot(d, p), dot(p, p) - ScalarT(1));
}
// line - quadric_boundary (as an isosurface, not error quadric)
template <class ScalarT>
[[nodiscard]] constexpr hits<2, ScalarT> intersection_parameter(line<3, ScalarT> const& l, quadric<3, ScalarT> const& q)
{
const auto Ad = q.A() * l.dir;
const auto p = l.pos;
// Substituting x in Quadric equation x^TAx + 2b^Tx + c = 0 by ray equation x = t * dir + p yields
// d^TAd t^2 + (2p^TAd + 2bd) t + p^TAp + 2bp + c = 0
const auto a = dot(l.dir, Ad);
const auto b = ScalarT(2) * (dot(p, Ad) + dot(q.b(), l.dir));
const auto c = dot(p, q.A() * vec3(p)) + ScalarT(2) * dot(q.b(), p) + q.c;
return detail::solve_quadratic(a, b, c);
}
// ====================================== Object - Object Intersections ======================================
// sphere boundary - sphere boundary
// returns intersection circle of sphere and sphere (normal points from a to b)
// for now does not work if spheres are identical (result would be a sphere3 again)
template <class ScalarT>
[[nodiscard]] constexpr optional<sphere_boundary<2, ScalarT, 3>> intersection(sphere_boundary<3, ScalarT> const& a, sphere_boundary<3, ScalarT> const& b)
{
auto d2 = distance_sqr(a.center, b.center);
// TODO: intersection sphere
if (a.center == b.center && a.radius == b.radius)
return {};
auto d = sqrt(d2);
// no intersection
if (d > a.radius + b.radius)
return {};
// radius and centers of larger sphere (ls) and smaller sphere (ss)
auto lsr = a.radius;
auto ssr = b.radius;
auto lsc = a.center;
auto ssc = b.center;
if (b.radius > a.radius)
{
// TODO: tg::swap?
lsr = b.radius;
ssr = a.radius;
lsc = b.center;
ssc = a.center;
}
if (d + ssr < lsr)
{
// Smaller sphere inside larger one and not touching it
return {};
}
TG_INTERNAL_ASSERT(d > ScalarT(0));
// squared radii of a and b
auto ar2 = a.radius * a.radius;
auto br2 = b.radius * b.radius;
auto t = ScalarT(0.5) + (ar2 - br2) / (ScalarT(2) * d2);
// position and radius of intersection circle
auto ipos = a.center + t * (b.center - a.center);
auto irad = sqrt(ar2 - t * t * d2);
// non-empty intersection (circle)
return sphere_boundary<2, ScalarT, 3>{ipos, irad, dir<3, ScalarT>((b.center - a.center) / d)};
}
// sphere boundary - sphere boundary
// returns intersection points of two circles in 2D
// for now does not work if circles are identical (result would be a circle2 again)
template <class ScalarT>
[[nodiscard]] constexpr optional<pair<pos<2, ScalarT>, pos<2, ScalarT>>> intersection(sphere_boundary<2, ScalarT> const& a, sphere_boundary<2, ScalarT> const& b)
{
if (a.center == b.center && a.radius == b.radius)
return {}; // degenerate case
auto d2 = distance_sqr(a.center, b.center);
auto d = sqrt(d2);
auto ar = a.radius;
auto br = b.radius;
if (ar + br < d) // no intersection
return {};
if (d < abs(ar - br)) // no intersection (one inside the other)
return {};
TG_INTERNAL_ASSERT(d > ScalarT(0));
auto t = (ar * ar - br * br + d2) / (2 * d);
auto h2 = ar * ar - t * t;
TG_INTERNAL_ASSERT(h2 >= ScalarT(0));
auto h = sqrt(h2);
auto h_by_d = h / d;
auto p_between = a.center + t / d * (b.center - a.center);
auto a_to_b = b.center - a.center;
auto a_to_b_swap = tg::vec2(-a_to_b.y, a_to_b.x);
// imagining circle a on the left side of circle b...
auto p_above = p_between + h_by_d * a_to_b_swap;
auto p_below = p_between - h_by_d * a_to_b_swap;
return pair{p_above, p_below};
}
// sphere boundary - plane
// returns intersection circle of sphere and sphere (normal points from a to b)
// for now does not work if spheres are identical (result would be a sphere3 again)
template <class ScalarT>
[[nodiscard]] constexpr optional<sphere_boundary<2, ScalarT, 3>> intersection(sphere_boundary<3, ScalarT> const& a, plane<3, ScalarT> const& b)
{
auto const d = dot(a.center, b.normal) - b.dis;
if (d > a.radius)
return {};
if (d < -a.radius)
return {};
sphere_boundary<2, ScalarT, 3> r;
r.center = a.center - b.normal * d;
r.normal = d >= ScalarT(0) ? b.normal : -b.normal;
r.radius = sqrt(a.radius * a.radius - d * d);
return r;
}
template <class ScalarT>
[[nodiscard]] constexpr optional<sphere_boundary<2, ScalarT, 3>> intersection(plane<3, ScalarT> const& a, sphere_boundary<3, ScalarT> const& b)
{
auto r = intersection(b, a);
if (r.has_value())
{
auto c = r.value();
c.normal = -c.normal; // make sure to point from a to b
return c;
}
return r;
}
// circle - plane
template <class ScalarT>
[[nodiscard]] constexpr hits<2, pos<3, ScalarT>> intersection(sphere<2, ScalarT, 3, boundary_tag> const& a, plane<3, ScalarT> const& b)
{
auto const l = intersection(plane_of(a), b);
return intersection(l, sphere_boundary<3, ScalarT>(a.center, a.radius));
}
template <class ScalarT>
[[nodiscard]] constexpr hits<2, pos<3, ScalarT>> intersection(plane<3, ScalarT> const& a, sphere<2, ScalarT, 3, boundary_tag> const& b)
{
return intersection(b, a);
}
// circle - sphere
template <class ScalarT>
[[nodiscard]] constexpr hits<2, pos<3, ScalarT>> intersection(sphere<2, ScalarT, 3, boundary_tag> const& a, sphere_boundary<3, ScalarT> const& s)
{
auto const is = intersection(plane_of(a), s);
if (!is.has_value())
return {};
auto const b = is.value();
auto d2 = distance_sqr(a.center, b.center);
auto d = sqrt(d2);
auto ar = a.radius;
auto br = b.radius;
if (ar + br < d) // no intersection
return {};
if (d < abs(ar - br)) // no intersection (one inside the other)
return {};
TG_INTERNAL_ASSERT(d > ScalarT(0));
auto t = (ar * ar - br * br + d2) / (2 * d);
auto h2 = ar * ar - t * t;
TG_INTERNAL_ASSERT(h2 >= ScalarT(0));
auto h = sqrt(h2);
auto h_by_d = h / d;
auto p_between = a.center + t / d * (b.center - a.center);
auto bitangent = cross(b.center - a.center, a.normal);
// imagining circle a on the left side of circle b...
auto p_above = p_between + h_by_d * bitangent;
auto p_below = p_between - h_by_d * bitangent;
return {p_above, p_below};
}
template <class ScalarT>
[[nodiscard]] constexpr hits<2, pos<3, ScalarT>> intersection(sphere_boundary<3, ScalarT> const& a, sphere<2, ScalarT, 3, boundary_tag> const& b)
{
return intersection(b, a);
}
// plane - plane
template <class ScalarT>
[[nodiscard]] constexpr line<3, ScalarT> intersection(plane<3, ScalarT> const& a, plane<3, ScalarT> const& b)
{
// see http://mathworld.wolfram.com/Plane-PlaneIntersection.html
auto dir = normalize(cross(a.normal, b.normal));
auto p = pos<3, ScalarT>::zero;
if (abs(dir.z) > abs(dir.x)) // solve with p.z = 0
{
auto n0 = tg::vec<2, ScalarT>(a.normal.x, b.normal.x);
auto n1 = tg::vec<2, ScalarT>(a.normal.y, b.normal.y);
auto r = tg::vec<2, ScalarT>(a.dis, b.dis);
auto p2 = inverse(mat<2, 2, ScalarT>::from_cols(n0, n1)) * r;
p.x = p2.x;
p.y = p2.y;
}
else if (abs(dir.y) > abs(dir.x)) // solve with p.y = 0
{
auto n0 = tg::vec<2, ScalarT>(a.normal.x, b.normal.x);
auto n1 = tg::vec<2, ScalarT>(a.normal.z, b.normal.z);
auto r = tg::vec<2, ScalarT>(a.dis, b.dis);
auto p2 = inverse(mat<2, 2, ScalarT>::from_cols(n0, n1)) * r;
p.x = p2.x;
p.z = p2.y;
}
else // solve with p.x = 0
{
auto n0 = tg::vec<2, ScalarT>(a.normal.y, b.normal.y);
auto n1 = tg::vec<2, ScalarT>(a.normal.z, b.normal.z);
auto r = tg::vec<2, ScalarT>(a.dis, b.dis);
auto p2 = inverse(mat<2, 2, ScalarT>::from_cols(n0, n1)) * r;
p.y = p2.x;
p.z = p2.y;
}
return {p, dir};
}
template <class ScalarT>
[[nodiscard]] constexpr optional<pair<ScalarT, ScalarT>> intersection_parameters(segment<2, ScalarT> const& seg_0, segment<2, ScalarT> const& seg_1)
{
/// https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
auto const denom = (seg_0.pos0.x - seg_0.pos1.x) * (seg_1.pos0.y - seg_1.pos1.y) - (seg_0.pos0.y - seg_0.pos1.y) * (seg_1.pos0.x - seg_1.pos1.x);
// todo: might want to check == 0 with an epsilon corridor
// todo: colinear line segments can still intersect in a point or a line segment.
// This might require api changes, as either a point or a line segment can be returned!
// Possible solution: return a segment where pos0 == pos1
if (denom == ScalarT(0))
return {}; // colinear
auto const num0 = (seg_0.pos0.x - seg_1.pos0.x) * (seg_1.pos0.y - seg_1.pos1.y) - (seg_0.pos0.y - seg_1.pos0.y) * (seg_1.pos0.x - seg_1.pos1.x);
auto const num1 = (seg_0.pos0.x - seg_0.pos1.x) * (seg_0.pos0.y - seg_1.pos0.y) - (seg_0.pos0.y - seg_0.pos1.y) * (seg_0.pos0.x - seg_1.pos0.x);
auto const t = num0 / denom;
auto const u = -num1 / denom;
if (ScalarT(0) <= t && t <= ScalarT(1) && ScalarT(0) <= u && u <= ScalarT(1))
return pair<ScalarT, ScalarT>{t, u};
return {};
}
template <class ScalarT>
[[nodiscard]] constexpr optional<ScalarT> intersection_parameter(segment<2, ScalarT> const& seg_0, segment<2, ScalarT> const& seg_1)
{
auto ip = intersection_parameters(seg_0, seg_1);
if (ip.has_value())
return ip.value().first;
return {};
}
template <class ScalarT>
[[nodiscard]] constexpr pair<ScalarT, ScalarT> intersection_parameters(line<2, ScalarT> const& l0, line<2, ScalarT> const& l1)
{
auto M = mat<2, 2, ScalarT>::from_cols(l0.dir, -l1.dir);
auto t = inverse(M) * (l1.pos - l0.pos);
return {t.x, t.y};
}
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<aabb<D, ScalarT>> intersection(aabb<D, ScalarT> const& a, aabb<D, ScalarT> const& b)
{
for (auto i = 0; i < D; ++i)
{
if (a.max[i] < b.min[i])
return {};
if (b.max[i] < a.min[i])
return {};
}
// overlap!
aabb<D, ScalarT> res;
for (auto i = 0; i < D; ++i)
{
res.min[i] = max(a.min[i], b.min[i]);
res.max[i] = min(a.max[i], b.max[i]);
}
return {res};
}
template <class ScalarT>
[[nodiscard]] constexpr pos<3, ScalarT> intersection(plane<3, ScalarT> const& a, plane<3, ScalarT> const& b, plane<3, ScalarT> const& c)
{
return intersection(intersection(a, b), c);
}
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<ScalarT> intersection_parameter(segment<D, ScalarT> const& a, plane<D, ScalarT> const& p)
{
auto denom = dot(p.normal, a.pos1 - a.pos0);
if (denom == ScalarT(0))
return {};
auto t = (p.dis - dot(p.normal, a.pos0)) / denom;
if (t < 0 || t > 1)
return {};
return t;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<segment<D, ScalarT>> intersection(segment<D, ScalarT> const& a, sphere<D, ScalarT> const& b)
{
static_assert(always_false<ScalarT>, "not implemented");
(void)a;
(void)b;
return {}; // TODO
}
template <int D, class ScalarT>
[[nodiscard]] constexpr optional<segment<D, ScalarT>> intersection(sphere<D, ScalarT> const& b, segment<D, ScalarT> const& a)
{
return intersection(b, a);
}
// ====================================== Checks if Object Intersects aabb ======================================
namespace detail
{
// Helper function that uses the separating axis theorem and the provided list of axes to determine whether a and b intersect
template <class A, class B>
[[nodiscard]] constexpr bool intersects_SAT(A const& a, B const& b, std::vector<vec<object_traits<B>::domain_dimension, typename B::scalar_t>> const& axes)
{
for (auto const& axis : axes)
if (are_separate(shadow(a, axis), shadow(b, axis)))
return false;
return true;
}
template <class ScalarT>
[[nodiscard]] constexpr bool are_separate(hit_interval<ScalarT> const& a, hit_interval<ScalarT> const& b)
{
return b.end < a.start || a.end < b.start;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr hit_interval<ScalarT> shadow(aabb<D, ScalarT> const& b, vec<D, ScalarT> const& axis)
{
auto const center = centroid_of(b);
auto const c = dot(center, axis);
auto const e = dot(b.max - center, abs(axis));
return {c - e, c + e};
}
template <int ObjectD, class ScalarT, int DomainD>
[[nodiscard]] constexpr hit_interval<ScalarT> shadow(box<ObjectD, ScalarT, DomainD> const& b, vec<DomainD, ScalarT> const& axis)
{
auto const c = dot(b.center, axis);
auto e = ScalarT(0);
for (auto i = 0; i < ObjectD; ++i)
e += abs(dot(b.half_extents[i], axis));
return {c - e, c + e};
}
template <class BaseT>
[[nodiscard]] constexpr hit_interval<typename BaseT::scalar_t> shadow(pyramid<BaseT> const& p, vec<3, typename BaseT::scalar_t> const& axis)
{
using ScalarT = typename BaseT::scalar_t;
auto tMin = tg::max<ScalarT>();
auto tMax = tg::min<ScalarT>();
for (auto const& vertex : vertices_of(p))
{
auto const t = dot(vertex, axis);
tMin = tg::min(tMin, t);
tMax = tg::max(tMax, t);
}
return {tMin, tMax};
}
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(line<1, ScalarT> const& l, aabb<1, ScalarT> const& b)
{
return true;
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(line<2, ScalarT> const& l, aabb<2, ScalarT> const& b)
{
auto const c = centroid_of(b);
auto const shadow = dot(b.max - c, abs(perpendicular(l.dir)));
return pow2(shadow) >= distance_sqr(c, l);
}
// line3 and line4 is deduced from intersection_parameter(l, b).has_value()
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(ray<D, ScalarT> const& r, aabb<D, ScalarT> const& b)
{
for (auto i = 0; i < D; ++i)
{
if ((r.origin[i] > b.max[i] && r.dir[i] >= ScalarT(0)) || (r.origin[i] < b.min[i] && r.dir[i] <= ScalarT(0)))
return false;
}
return intersects(inf_of(r), b);
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(segment<D, ScalarT> const& s, aabb<D, ScalarT> const& b)
{
if (!intersects(aabb_of(s), b))
return false;
return intersects(inf_of(s), b);
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(plane<D, ScalarT> const& p, aabb<D, ScalarT> const& b)
{
auto const c = centroid_of(b);
auto const shadow = dot(b.max - c, abs(p.normal));
return shadow >= distance(c, p); // Note: no square needed, since no sqrt involved
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(halfspace<D, ScalarT> const& h, aabb<D, ScalarT> const& b)
{
auto const c = centroid_of(b);
auto const dist = signed_distance(c, h);
if (dist <= ScalarT(0))
return true;
auto const shadow = dot(b.max - c, abs(h.normal));
return shadow >= dist;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(aabb<D, ScalarT> const& a, aabb<D, ScalarT> const& b)
{
for (auto i = 0; i < D; ++i)
{
if (b.max[i] < a.min[i] || a.max[i] < b.min[i])
return false;
}
return true;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(aabb_boundary<D, ScalarT> const& a, aabb<D, ScalarT> const& b)
{
auto contained = true;
for (auto i = 0; i < D; ++i)
{
if (b.max[i] < a.min[i] || a.max[i] < b.min[i])
return false;
contained = contained && a.min[i] < b.min[i] && b.max[i] < a.max[i];
}
return !contained;
}
template <int ObjectD, class ScalarT, int DomainD>
[[nodiscard]] constexpr bool intersects(box<ObjectD, ScalarT, DomainD> const& box, aabb<DomainD, ScalarT> const& b)
{
if (!intersects(aabb_of(box), b))
return false;
if constexpr (DomainD == 1)
return true; // the only axis was already checked above
using vec_t = vec<DomainD, ScalarT>;
auto axes = std::vector<vec_t>();
auto axisDirs = tg::array<vec_t, DomainD>();
if constexpr (DomainD == 3)
axisDirs = {vec_t::unit_x, vec_t::unit_y, vec_t::unit_z};
for (auto i = 0; i < DomainD; ++i)
{
vec_t d;
if constexpr (ObjectD == 2 && DomainD == 3) // box2in3
d = i == 2 ? normal_of(box) : box.half_extents[i];
else
d = box.half_extents[i];
axes.emplace_back(d);
if constexpr (DomainD > 2)
for (auto j = 0; j < DomainD; ++j)
axes.push_back(cross(d, axisDirs[j]));
static_assert(DomainD < 4 && "Not implemented for 4D");
}
return detail::intersects_SAT(box, b, axes);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(box_boundary<2, ScalarT, 3> const& box, aabb<3, ScalarT> const& b)
{
return detail::intersects_any_array(b, edges_of(box), std::make_index_sequence<4>{});
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere<D, ScalarT> const& s, aabb<D, ScalarT> const& b)
{
auto const b_min = b.min;
auto const b_max = b.max;
auto const c = s.center;
auto const clamped_sqr = [](ScalarT v) {
v = tg::max(ScalarT(0), v);
return v * v;
};
auto d_min = ScalarT(0);
if constexpr (D >= 1)
{
d_min += clamped_sqr(b_min.x - c.x);
d_min += clamped_sqr(c.x - b_max.x);
}
if constexpr (D >= 2)
{
d_min += clamped_sqr(b_min.y - c.y);
d_min += clamped_sqr(c.y - b_max.y);
}
if constexpr (D >= 3)
{
d_min += clamped_sqr(b_min.z - c.z);
d_min += clamped_sqr(c.z - b_max.z);
}
if constexpr (D >= 4)
{
d_min += clamped_sqr(b_min.w - c.w);
d_min += clamped_sqr(c.w - b_max.w);
}
return d_min <= s.radius * s.radius;
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere<1, ScalarT, 2> const& s, aabb<2, ScalarT> const& b)
{
auto const v = perpendicular(s.normal) * s.radius;
return intersects(segment<2, ScalarT>(s.center - v, s.center + v), b);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere_boundary<1, ScalarT, 2> const& s, aabb<2, ScalarT> const& b)
{
auto const v = perpendicular(s.normal) * s.radius;
return contains(b, s.center - v) || contains(b, s.center + v);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere<2, ScalarT, 3> const& s, aabb<3, ScalarT> const& b)
{
auto const diskPlane = plane<3, ScalarT>(s.normal, s.center);
if (!intersects(diskPlane, b))
return false;
// early out, contains SAT for each aabb axis
if (!intersects(sphere<3, ScalarT>(s.center, s.radius), b))
return false;
// check if disk extrema are within aabb. cross(cross(axisDir, n)) yields the following vectors
if (contains(b, s.center))
return true;
auto const c = s.center;
auto const n = s.normal;
using vec_t = vec<3, ScalarT>;
auto const vx = s.radius * normalize(vec_t(-n.y * n.y - n.z * n.z, n.x * n.y, n.x * n.z));
if (contains(b, c + vx) || contains(b, c - vx))
return true;
auto const vy = s.radius * normalize(vec_t(n.x * n.y, -n.x * n.x - n.z * n.z, n.y * n.z));
if (contains(b, c + vy) || contains(b, c - vy))
return true;
auto const vz = s.radius * normalize(vec_t(n.x * n.z, n.y * n.z, -n.x * n.x - n.y * n.y));
if (contains(b, c + vz) || contains(b, c - vz))
return true;
// intersection test with each aabb edge
for (auto const& edge : edges_of(b))
if (intersects(edge, s))
return true;
return false;
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere_boundary<2, ScalarT, 3> const& s, aabb<3, ScalarT> const& b)
{
auto const diskPlane = plane<3, ScalarT>(s.normal, s.center);
if (!intersects(diskPlane, b))
return false;
// early out, contains SAT for each aabb axis
if (!intersects(sphere<3, ScalarT>(s.center, s.radius), b))
return false;
// check if disk extrema are within aabb. cross product of axis dir and two times with n yield the following vectors
auto const c = s.center;
auto const n = s.normal;
using vec_t = vec<3, ScalarT>;
auto const eps = ScalarT(16) * epsilon<ScalarT>;
auto const vx = s.radius * normalize(vec_t(-n.y * n.y - n.z * n.z, n.x * n.y, n.x * n.z));
if (contains(b, c + vx, eps) || contains(b, c - vx, eps))
return true;
auto const vy = s.radius * normalize(vec_t(n.x * n.y, -n.x * n.x - n.z * n.z, n.y * n.z));
if (contains(b, c + vy, eps) || contains(b, c - vy, eps))
return true;
auto const vz = s.radius * normalize(vec_t(n.x * n.z, n.y * n.z, -n.x * n.x - n.y * n.y));
if (contains(b, c + vz, eps) || contains(b, c - vz, eps))
return true;
// intersection test with each aabb edge
auto inside = 0, outside = 0;
for (auto const& edge : edges_of(b))
{
auto const t = intersection(edge, diskPlane);
if (!t.has_value())
continue;
if (distance_sqr(t.value(), s.center) <= pow2(s.radius))
inside++;
else
outside++;
if (inside > 0 && outside > 0)
return true;
}
return false;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(hemisphere<D, ScalarT> const& h, aabb<D, ScalarT> const& b)
{
auto const closestP = project(h.center, b);
return contains(h, closestP) || intersects(caps_of(h), b);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(hemisphere_boundary_no_caps<1, ScalarT> const& h, aabb<1, ScalarT> const& b)
{
return contains(b, h.center + h.radius * h.normal);
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(hemisphere_boundary_no_caps<D, ScalarT> const& h, aabb<D, ScalarT> const& b)
{
auto const fullSphere = sphere<D, ScalarT>(h.center, h.radius);
if (!intersects(fullSphere, b))
return false; // early out
if (intersects(caps_of(h), b))
return true;
// classify aabb vertices
auto const spaceUnder = halfspace<D, ScalarT>(h.normal, h.center);
auto inside = 0, outside = 0, under = 0;
for (auto const& vertex : vertices_of(b))
{
if (contains(spaceUnder, vertex))
under++;
else if (contains(fullSphere, vertex))
inside++;
else
outside++;
if (inside > 0 && outside > 0)
return true; // has to intersect the boundary
}
if (outside < 2)
return false; // cannot cross the boundary without intersecting the caps_of(h)
// note: outside and under cannot cross hemisphere through the inside due to Thales' theorem
// now only a secant is left to check. Since inside == 0, we can check the closest projection onto the aabb
auto const closestP = project(h.center, b);
return contains(solid_of(h), closestP);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(capsule<3, ScalarT> const& c, aabb<3, ScalarT> const& b)
{
if (!intersects(aabb_of(c), b))
return false;
// check if the line through the axis intersects the aabb
auto const line = inf_of(c.axis);
auto const hits = intersection_parameter(line, boundary_of(b));
if (hits.any())
{
auto const len = length(c.axis);
auto const t = clamp(hits.first(), ScalarT(0), len);
for (auto const& hit : hits)
{
if (ScalarT(0) - c.radius <= hit && hit <= len + c.radius)
return true; // capsule axis intersects aabb
if (t != clamp(hit, ScalarT(0), len))
return true; // intersections before and after the axis can only occur if it lies within aabb
}
return intersects(sphere<3, ScalarT>(line[t], c.radius), b);
}
// test spheres at both capsule ends (cheap)
if (intersects(sphere<3, ScalarT>(c.axis.pos0, c.radius), b) || intersects(sphere<3, ScalarT>(c.axis.pos1, c.radius), b))
return true;
// now only intersections between aabb edges and capsule mantle remain
auto const r2 = c.radius * c.radius;
for (auto const& edge : edges_of(b))
if (distance_sqr(edge, c.axis) <= r2)
return true;
return false;
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(cylinder<3, ScalarT> const& c, aabb<3, ScalarT> const& b)
{
if (!intersects(aabb_of(c), b))
return false;
// check if the line through the axis intersects the aabb
auto const line = inf_of(c.axis);
auto const len = length(c.axis);
auto const hits = intersection_parameter(line, boundary_of(b));
if (hits.any())
{
auto const t = clamp(hits.first(), ScalarT(0), len);
for (auto const& hit : hits)
{
if (ScalarT(0) <= hit && hit <= len)
return true; // cylinder axis intersects aabb
if (t != clamp(hit, ScalarT(0), len))
return true; // intersections before and after the axis can only occur if it lies within aabb
}
return intersects(sphere<2, ScalarT, 3>(line[t], c.radius, line.dir), b);
}
// test disks at both cylinder ends
if (intersects(sphere<2, ScalarT, 3>(c.axis.pos0, c.radius, line.dir), b) || //
intersects(sphere<2, ScalarT, 3>(c.axis.pos1, c.radius, line.dir), b))
return true;
// now only intersections between aabb edges and cylinder mantle remain
auto const r2 = c.radius * c.radius;
for (auto const& edge : edges_of(b))
{
auto [te, tl] = closest_points_parameters(edge, line);
if (ScalarT(0) < tl && tl < len && distance_sqr(edge[te], line[tl]) <= r2)
return true;
}
return false;
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(cylinder_boundary_no_caps<3, ScalarT> const& c, aabb<3, ScalarT> const& b)
{
// alternative idea that is more efficient:
// compute the polygon of the aabb from the projection in cylinder direction and intersect it with a circle
//
// line = inf_of(c.axis);
// len = length(c.axis);
// compute planes p1 and p2 spanned by both caps
// foreach(vertex in vertices_of(b)) {
// t = coordinates(line, vertex);
// if (0 <= t <= len) add(vertex - t*line.dir);
// }
// foreach(edge in edges_of(b)) {
// p = intersection(edge, p1);
// if (p.has_value()) add(p.value());
// p = intersection(edge, p2);
// if (p.has_value()) add(p.value() - len*line.dir);
// }
// compute polygon as convex hull of all added vertices
// return intersects(caps_of(c)[0], polygon); // maybe in 2D
if (!intersects(aabb_of(c), b))
return false;
// check intersections between line through the axis and the aabb
auto const line = inf_of(c.axis);
auto const len = length(c.axis);
auto const intersects_at = [&](ScalarT t) { return intersects(sphere_boundary<2, ScalarT, 3>(line[t], c.radius, line.dir), b); };
auto const hits = intersection_parameter(line, boundary_of(b));
for (auto const& hit : hits)
if (ScalarT(0) < hit && hit < len && intersects_at(hit))
return true;
// test disks at both cylinder ends
if (intersects(sphere_boundary<2, ScalarT, 3>(c.axis.pos0, c.radius, line.dir), b) || //
intersects(sphere_boundary<2, ScalarT, 3>(c.axis.pos1, c.radius, line.dir), b))
return true;
// now only intersections between aabb edges and cylinder mantle remain
for (auto const& edge : edges_of(b))
{
auto [te, tl] = closest_points_parameters(edge, line);
if (ScalarT(0) < tl && tl < len && intersects_at(tl))
return true;
}
return false;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr bool intersects(inf_cylinder<D, ScalarT> const& c, aabb<D, ScalarT> const& b)
{
if (intersects(c.axis, b))
return true;
auto const r2 = c.radius * c.radius;
for (auto const& edge : edges_of(b))
if (distance_sqr(edge, c.axis) <= r2)
return true;
return false;
}
template <class BaseT>
[[nodiscard]] constexpr auto intersects(pyramid<BaseT> const& p, aabb<3, typename BaseT::scalar_t> const& b) -> decltype(faces_of(p), true)
{
// SAT: box faces
if (!intersects(aabb_of(p), b))
return false;
// SAT: pyramid faces
using vec_t = vec<3, typename BaseT::scalar_t>;
auto axes = std::vector<vec_t>();
auto const faces = faces_of(p);
axes.emplace_back(normal_of(faces.base));
for (auto const& face : faces.mantle)
axes.emplace_back(normal_of(face));
if (!detail::intersects_SAT(p, b, axes))
return false;
// SAT: cross product of edge pairs
axes.clear();
array<vec_t, 3> axisDirs = {vec_t::unit_x, vec_t::unit_y, vec_t::unit_z};
for (auto const& edge : edges_of(p))
{
vec_t d = direction(edge);
for (auto j = 0; j < 3; ++j)
axes.push_back(cross(d, axisDirs[j]));
}
return detail::intersects_SAT(p, b, axes);
}
template <class BaseT>
[[nodiscard]] constexpr auto intersects(pyramid_boundary_no_caps<BaseT> const& p, aabb<3, typename BaseT::scalar_t> const& b) -> decltype(faces_of(p), true)
{
// SAT: box faces
if (!intersects(aabb_of(p), b))
return false;
auto const faces = faces_of(p);
return detail::intersects_any_array(b, faces, std::make_index_sequence<faces.size()>{});
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(triangle<2, ScalarT> const& a, aabb<2, ScalarT> const& b)
{
if (!intersects(aabb_of(a), b))
return false;
auto p0 = a.pos0;
auto p1 = a.pos1;
auto p2 = a.pos2;
if (contains(b, p0) || contains(b, p1) || contains(b, p2))
return true;
auto aabb_pts = vertices_of(b);
auto const is_separate = [&](pos<2, ScalarT> pa, vec<2, ScalarT> n, pos<2, ScalarT> pb) {
auto da = dot(n, pa);
auto db = dot(n, pb);
// TODO: faster
auto a_min = min(da, db);
auto a_max = max(da, db);
auto b_min = dot(n, aabb_pts[0]);
auto b_max = b_min;
for (auto i = 1; i < 4; ++i)
{
auto d = dot(n, aabb_pts[i]);
b_min = min(b_min, d);
b_max = max(b_max, d);
}
if (b_max < a_min || b_min > a_max)
return true;
return false;
};
if (is_separate(p0, perpendicular(p1 - p0), p2))
return false;
if (is_separate(p1, perpendicular(p2 - p1), p0))
return false;
if (is_separate(p2, perpendicular(p0 - p2), p1))
return false;
return true;
}
// NOTE: does NOT work for integer objects
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(triangle<3, ScalarT> const& tri_in, aabb<3, ScalarT> const& bb_in)
{
using pos_t = pos<3, ScalarT>;
using vec_t = vec<3, ScalarT>;
auto const center = centroid_of(bb_in);
auto const amin = pos_t(bb_in.min - center);
auto const amax = pos_t(bb_in.max - center);
auto const bb = aabb<3, ScalarT>(amin, amax);
auto const p0 = pos_t(tri_in.pos0 - center);
auto const p1 = pos_t(tri_in.pos1 - center);
auto const p2 = pos_t(tri_in.pos2 - center);
// early out: AABB vs tri AABB
auto tri_aabb = aabb_of(p0, p1, p2);
if (tri_aabb.max.x < amin.x || tri_aabb.max.y < amin.y || tri_aabb.max.z < amin.z || //
tri_aabb.min.x > amax.x || tri_aabb.min.y > amax.y || tri_aabb.min.z > amax.z)
return false;
auto const proper_contains = [](aabb<3, ScalarT> const& b, pos_t const& p) {
return b.min.x < p.x && p.x < b.max.x && //
b.min.y < p.y && p.y < b.max.y && //
b.min.z < p.z && p.z < b.max.z;
};
// early in: tri points vs AABB
if (proper_contains(bb, p0) || proper_contains(bb, p1) || proper_contains(bb, p2))
return true;
// get adjusted tri base plane
auto plane = tg::plane<3, ScalarT>(normal_of(tri_in), p0);
// fast plane / AABB test
{
auto pn = plane.normal;
auto bn = dot(abs(pn), amax);
// min dis: d - bn
if (bn < -plane.dis)
return false;
// max dis: d + bn
if (-plane.dis < -bn)
return false;
}
// 9 axis SAT test
{
auto const is_seperating = [amax](vec<3, ScalarT> const& n, pos_t const& tp0, pos_t const& tp1) -> bool {
if (tg::is_zero_vector(n))
return false; // not a real candidate axis
// fast point / AABB separation test
auto bn = dot(abs(n), amax);
auto tn0 = dot(n, tp0);
auto tn1 = dot(n, tp1);
auto tmin = min(tn0, tn1);
auto tmax = max(tn0, tn1);
auto bmin = -bn;
auto bmax = bn;
if (tmax < bmin)
return true;
if (bmax < tmin)
return true;
return false;
};
if (is_seperating(cross(p1 - p0, vec_t::unit_x), p0, p2))
return false;
if (is_seperating(cross(p1 - p0, vec_t::unit_y), p0, p2))
return false;
if (is_seperating(cross(p1 - p0, vec_t::unit_z), p0, p2))
return false;
if (is_seperating(cross(p2 - p0, vec_t::unit_x), p0, p1))
return false;
if (is_seperating(cross(p2 - p0, vec_t::unit_y), p0, p1))
return false;
if (is_seperating(cross(p2 - p0, vec_t::unit_z), p0, p1))
return false;
if (is_seperating(cross(p1 - p2, vec_t::unit_x), p0, p2))
return false;
if (is_seperating(cross(p1 - p2, vec_t::unit_y), p0, p2))
return false;
if (is_seperating(cross(p1 - p2, vec_t::unit_z), p0, p2))
return false;
}
// found no separating axis? -> intersection
return true;
}
// ====================================== Checks if Object Intersects Object ======================================
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(segment<3, ScalarT> const& seg, sphere<2, ScalarT, 3> const& disk)
{
auto t = intersection(seg, tg::plane<3, ScalarT>(disk.normal, disk.center));
return t.has_value() && distance_sqr(t.value(), disk.center) <= pow2(disk.radius);
}
template <class ScalarT>
[[nodiscard]] constexpr bool intersects(sphere<2, ScalarT, 3> const& disk, segment<3, ScalarT> const& seg)
{
return intersects(seg, disk);
}
} // namespace tg
|
package co.infinum.goldfinger;
class InitializationException extends Exception {
InitializationException() {
super("Goldfinger failed to initialize.");
}
}
|
package lister
import (
"fmt"
"strings"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/greengrass"
"github.com/trek10inc/awsets/context"
"github.com/trek10inc/awsets/resource"
)
type AWSGreengrassResourceDefinition struct {
}
func init() {
i := AWSGreengrassResourceDefinition{}
listers = append(listers, i)
}
func (l AWSGreengrassResourceDefinition) Types() []resource.ResourceType {
return []resource.ResourceType{
resource.GreengrassResourceDefinition,
resource.GreengrassResourceDefinitionVersion,
}
}
func (l AWSGreengrassResourceDefinition) List(ctx context.AWSetsCtx) (*resource.Group, error) {
svc := greengrass.New(ctx.AWSCfg)
rg := resource.NewGroup()
var nextToken *string
for {
resourcedefs, err := svc.ListResourceDefinitionsRequest(&greengrass.ListResourceDefinitionsInput{
MaxResults: aws.String("100"),
NextToken: nextToken,
}).Send(ctx.Context)
if err != nil {
// greengrass errors are not of type awserr.Error
if strings.Contains(err.Error(), "TooManyRequestsException") {
// If greengrass is not supported in a region, returns "TooManyRequests exception"
return rg, nil
}
return rg, fmt.Errorf("failed to list greengrass resource definitions: %w", err)
}
for _, v := range resourcedefs.Definitions {
r := resource.New(ctx, resource.GreengrassGroup, v.Id, v.Name, v)
var cdNextToken *string
for {
resourceDefVersions, err := svc.ListResourceDefinitionVersionsRequest(&greengrass.ListResourceDefinitionVersionsInput{
ResourceDefinitionId: v.Id,
MaxResults: aws.String("100"),
NextToken: cdNextToken,
}).Send(ctx.Context)
if err != nil {
return rg, fmt.Errorf("failed to list greengrass resource definition versions for %s: %w", *v.Id, err)
}
for _, rdId := range resourceDefVersions.Versions {
rd, err := svc.GetResourceDefinitionVersionRequest(&greengrass.GetResourceDefinitionVersionInput{
ResourceDefinitionId: rdId.Id,
ResourceDefinitionVersionId: rdId.Version,
}).Send(ctx.Context)
if err != nil {
return rg, fmt.Errorf("failed to list greengrass resource definition version for %s, %s: %w", *rdId.Id, *rdId.Version, err)
}
rdRes := resource.NewVersion(ctx, resource.GreengrassResourceDefinitionVersion, rd.Id, rd.Id, rd.Version, rd)
rdRes.AddRelation(resource.GreengrassResourceDefinition, v.Id, "")
// TODO relationships to resources
r.AddRelation(resource.GreengrassResourceDefinitionVersion, rd.Id, rd.Version)
rg.AddResource(rdRes)
}
if resourceDefVersions.NextToken == nil {
break
}
cdNextToken = resourceDefVersions.NextToken
}
rg.AddResource(r)
}
if resourcedefs.NextToken == nil {
break
}
nextToken = resourcedefs.NextToken
}
return rg, nil
}
|
def permutations(arr):
result = []
# Array of size 1 or 0
if len(arr) == 0:
return [arr]
if len(arr) == 1:
return [arr]
#Find all Permutations for all elemtents in given array
for i in range(len(arr)):
m = arr[i]
# Create new arr without current element
remain = arr[:i] + arr[i+1:]
# Generate all permutaions of the small array
for c in permutations(remain):
result.append([m] + c)
# return permutations
return result |
<gh_stars>0
#include "MultiSelect.h"
MultiSelect::MultiSelect(ApplicationManager *pAppManager) :Action(pAppManager)
{
SetUndo(false);
}
void MultiSelect::ReadActionParameters(){
Point p;
Input *pIn = pManager->GetInput();
Output *pOut = pManager->GetOutput();
//deselecting any normally selected connector or statement
if (pManager->GetSelectedStatement() != NULL)
pManager->GetSelectedStatement()->SetSelected(false);
pManager->SetSelectedStatement(NULL);
if (pManager->GetSelectedConnector() != NULL)
pManager->GetSelectedConnector()->set_selected(false);
do{
pOut->PrintMessage("Selecting Statements or connectors , after you Finish selecting click on an empty space of the drawing area");
pIn->GetPointClicked(p);
ptr = pManager->GetStatement(p);
if (ptr != NULL && ptr->GetCut())
ptr = NULL;
if (ptr != NULL){
pManager->addSelectedStatement(ptr);
pManager->UpdateInterface();
}
else if (ptr == NULL)
{
con = pManager->GetConnector(p);
if (con != NULL)
{
pManager->addSelectedConnector(con);
pManager->UpdateInterface();
}
}
} while (!(ptr == NULL && con ==NULL));
}
void MultiSelect::Execute()
{
ReadActionParameters();
pManager->GetOutput()->PrintMessage("statements selected now choose an action");
} |
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Runs tests related to kubectl create --dry-run.
run_kubectl_create_dry_run_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
kube::log::status "Testing kubectl create dry-run"
# Pre-Condition: no POD exists
kube::test::get_object_assert pods "{{range.items}}{{${id_field:?}}}:{{end}}" ''
# dry-run create
kubectl create --dry-run=client -f hack/testdata/pod.yaml "${kube_flags[@]:?}"
kubectl create --dry-run=server -f hack/testdata/pod.yaml "${kube_flags[@]:?}"
# check no POD exists
kube::test::get_object_assert pods "{{range.items}}{{${id_field:?}}}:{{end}}" ''
set +o nounset
set +o errexit
}
# Runs tests related to kubectl create --filename(-f) --selector(-l).
run_kubectl_create_filter_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
kube::log::status "Testing kubectl create filter"
## kubectl create -f with label selector should only create matching objects
# Pre-Condition: no POD exists
kube::test::get_object_assert pods "{{range.items}}{{${id_field:?}}}:{{end}}" ''
# create
kubectl create -l unique-label=bingbang -f hack/testdata/filter "${kube_flags[@]:?}"
# check right pod exists
kube::test::get_object_assert 'pods selector-test-pod' "{{${labels_field:?}.name}}" 'selector-test-pod'
# check wrong pod doesn't exist
output_message=$(! kubectl get pods selector-test-pod-dont-apply 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" 'pods "selector-test-pod-dont-apply" not found'
# cleanup
kubectl delete pods selector-test-pod
set +o nounset
set +o errexit
}
run_kubectl_create_error_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
kube::log::status "Testing kubectl create with error"
# Passing no arguments to create is an error
! kubectl create || exit 1
# Posting a pod to namespaces should fail. Also tests --raw forcing the post location
grep -q 'the object provided is unrecognized (must be of type Namespace)' <<< "$( kubectl create "${kube_flags[@]}" --raw /api/v1/namespaces -f test/fixtures/doc-yaml/admin/limitrange/valid-pod.yaml --v=8 2>&1 )"
grep -q "raw and --edit are mutually exclusive" <<< "$( kubectl create "${kube_flags[@]}" --raw /api/v1/namespaces -f test/fixtures/doc-yaml/admin/limitrange/valid-pod.yaml --edit 2>&1 )"
set +o nounset
set +o errexit
}
# Runs kubectl create job tests
run_create_job_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
# Test kubectl create job
kubectl create job test-job --image=registry.k8s.io/nginx:test-cmd
# Post-Condition: job nginx is created
kube::test::get_object_assert 'job test-job' "{{${image_field0:?}}}" 'registry.k8s.io/nginx:test-cmd'
# Clean up
kubectl delete job test-job "${kube_flags[@]}"
# Test kubectl create job with command
kubectl create job test-job-pi "--image=$IMAGE_PERL" -- perl -Mbignum=bpi -wle 'print bpi(20)'
kube::test::get_object_assert 'job test-job-pi' "{{$image_field0}}" "$IMAGE_PERL"
# Clean up
kubectl delete job test-job-pi
# Test kubectl create job from cronjob
# Pre-Condition: create a cronjob
kubectl create cronjob test-pi --schedule="* */5 * * *" "--image=$IMAGE_PERL" -- perl -Mbignum=bpi -wle 'print bpi(10)'
kubectl create job my-pi --from=cronjob/test-pi
# Post-condition: container args contain expected command
output_message=$(kubectl get job my-pi -o go-template='{{(index .spec.template.spec.containers 0).command}}' "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" "perl -Mbignum=bpi -wle print bpi(10)"
# Clean up
kubectl delete job my-pi
kubectl delete cronjob test-pi
set +o nounset
set +o errexit
}
run_kubectl_create_kustomization_directory_tests() {
set -o nounset
set -o errexit
## kubectl create -k <dir> for kustomization directory
# Pre-Condition: No configmaps with name=test-the-map, no Deployment, Service exist
kube::test::get_object_assert 'configmaps --field-selector=metadata.name=test-the-map' "{{range.items}}{{${id_field:?}}}:{{end}}" ''
kube::test::get_object_assert deployment "{{range.items}}{{$id_field}}:{{end}}" ''
kube::test::get_object_assert services "{{range.items}}{{$id_field}}:{{end}}" ''
# Command
kubectl create -k hack/testdata/kustomize
# Post-condition: test-the-map, test-the-deployment, test-the-service exist
# Check that all items in the list are printed
kube::test::get_object_assert 'configmap test-the-map' "{{${id_field}}}" 'test-the-map'
kube::test::get_object_assert 'deployment test-the-deployment' "{{${id_field}}}" 'test-the-deployment'
kube::test::get_object_assert 'service test-the-service' "{{${id_field}}}" 'test-the-service'
# cleanup
kubectl delete -k hack/testdata/kustomize
set +o nounset
set +o errexit
}
has_one_of_error_message() {
local message=$1
local match1=$2
local match2=$3
if (grep -q "${match1}" <<< "${message}") || (grep -q "${match2}" <<< "${message}"); then
echo "Successful"
echo "message:${message}"
echo "has either:${match1}"
echo "or:${match2}"
return 0
else
echo "FAIL!"
echo "message:${message}"
echo "has neither:${match1}"
echo "nor:${match2}"
caller
return 1
fi
}
# Runs tests related to kubectl create --validate
run_kubectl_create_validate_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
## test --validate no value expects default strict is used
kube::log::status "Testing kubectl create --validate"
# create and verify
output_message=$(! kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate 2>&1)
has_one_of_error_message "${output_message}" 'strict decoding error' 'error validating data'
## test --validate=true
kube::log::status "Testing kubectl create --validate=true"
# create and verify
output_message=$(! kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=true 2>&1)
has_one_of_error_message "${output_message}" 'strict decoding error' 'error validating data'
## test --validate=false
kube::log::status "Testing kubectl create --validate=false"
# create and verify
output_message=$(kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=false)
kube::test::if_has_string "${output_message}" "deployment.apps/invalid-nginx-deployment created"
# cleanup
kubectl delete deployment invalid-nginx-deployment
## test --validate=strict
kube::log::status "Testing kubectl create --validate=strict"
# create and verify
output_message=$(! kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=strict 2>&1)
has_one_of_error_message "${output_message}" 'strict decoding error' 'error validating data'
## test --validate=warn
kube::log::status "Testing kubectl create --validate=warn"
# create and verify
output_message=$(kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=warn)
kube::test::if_has_string "${output_message}" "deployment.apps/invalid-nginx-deployment created"
# cleanup
kubectl delete deployment invalid-nginx-deployment
## test --validate=ignore
kube::log::status "Testing kubectl create --validate=ignore"
# create and verify
output_message=$(kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=ignore)
kube::test::if_has_string "${output_message}" "deployment.apps/invalid-nginx-deployment created"
# cleanup
kubectl delete deployment invalid-nginx-deployment
## test default is strict validation
kube::log::status "Testing kubectl create"
# create and verify
output_message=$(! kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml 2>&1)
has_one_of_error_message "${output_message}" 'strict decoding error' 'error validating data'
## test invalid validate value
kube::log::status "Testing kubectl create --validate=foo"
# create and verify
output_message=$(! kubectl create -f hack/testdata/invalid-deployment-unknown-and-duplicate-fields.yaml --validate=foo 2>&1)
kube::test::if_has_string "${output_message}" 'invalid - validate option "foo"'
set +o nounset
set +o errexit
}
|
fio jobfile.fio
|
#!/bin/bash
# annotate_references.sh
# Seungsoo Kim
# load modules
module load bowtie2/2.2.3
# PARAMETERS
# directory for output
out="../nobackup"
# directory for reference files
dir="../nobackup/annotations"
if [ ! -d "$dir" ]; then
mkdir "$dir"
fi
# subdirectory for bowtie2 indices
bt2="../nobackup/bowtie2"
if [ ! -d "$bt2" ]; then
mkdir "$bt2"
fi
# file with restriction enzymes - tab delimited file with following columns:
# 1) restriction enzyme name, 2) restriction site, 3) number of nt offset for cut
restriction_enzymes="../restr_enz.txt"
# file with references - tab-delimited file with following columns:
# 1) reference name, 2) reference fasta file path, 3) reference centromere annotation file path, 4) list of binsizes
refs="../refs.txt"
while read ref ref_file ref_cens binsizes
do
for binsize in $(echo $binsizes | tr "," "\n")
do
#bed file of bins
paste <(awk -v b=$binsize 'BEGIN{OFS="\t"; i=0}
{for (j = 0; j < $2; j = j + b){
if ($2 < j + b)
print $1, j, $2, i;
else print $1, j, j + b, i;
i = i + 1}}' $dir/$ref.chrom_lengths) > $dir/$ref.$binsize.bed
#annotations by chromosome - chromosome name, bin start, bin end, bin cen, chromosome number
paste <(awk -v b=$binsize 'BEGIN{OFS="\t"; start = 0; end = 0}
{end = end + int($2/b) + 1; print $1, start, end; start = start + int($2/b) + 1}' $dir/$ref.chrom_lengths) \
<(awk -v b=$binsize '{OFS="\t"; print int(($2+$3)/(2*b))}' ../${ref_cens}) | \
awk '{OFS="\t"; print $1, $1, $2, $3, $2+$4}' | \
sed 's/_/ /' | \
awk '{OFS="\t"; print $3, $4, $5, $6, $2}' > $dir/$ref.$binsize.chr_annotations
#annotations by bin - bin number, chromosome name, distance from centromere, chromosome arm length
awk '{OFS="\t"; for (i=$2; i<$3; i++)
if (i<$4) print i, $1, $4-i, $4-$2;
else if (i==$4) print i, $1, 0, 0;
else print i, $1, i-$4, $3-$4-1}' $dir/$ref.$binsize.chr_annotations | \
awk '{OFS="\t"; print $2, $0}' | \
sed 's/_/ /' | \
awk '{OFS="\t"; print $3, $4, $5, $6, $7, $2}' > $dir/$ref.$binsize.bin_annotations
#create subtelomere bed file
awk -v b=$binsize '{OFS="\t"; print $1, 0, b; print $1, $2-b, $2}' $dir/$ref.chrom_lengths | awk '{OFS="\t"; print $1, $2, $3, NR-1}' > $dir/$ref.$binsize.subtel.bed
#create pericentromeric bed file
awk -v b=$binsize '{OFS="\t"; print $1, int(($2+$3-b)/2), int(($2+$3+b)/2), NR-1}' ../${ref_cens} > $dir/$ref.$binsize.pericen.bed
done
done < $refs
|
<filename>docs/html/search/variables_c.js<gh_stars>0
var searchData=
[
['passed_1402',['passed',['../structCatch_1_1Counts.html#ad28daaf3de28006400208b6dd0c631e6',1,'Catch::Counts']]],
['precision_1403',['precision',['../structCatch_1_1StringMaker_3_01float_01_4.html#a54ebebe76a755dbe2dd8ad409c329378',1,'Catch::StringMaker< float >::precision()'],['../structCatch_1_1StringMaker_3_01double_01_4.html#a15fa2b093c532ece7f1d0c713ebaee67',1,'Catch::StringMaker< double >::precision()']]],
['prevassertions_1404',['prevAssertions',['../structCatch_1_1SectionEndInfo.html#ae70b154cbc05b5dd2901d97f89303d8c',1,'Catch::SectionEndInfo']]],
['properties_1405',['properties',['../structCatch_1_1TestCaseInfo.html#afc1e84bd7a2e180895a06d9131302af0',1,'Catch::TestCaseInfo']]]
];
|
cask "napari" do
version "0.4.8"
sha256 "95c77f0d35d97a720d9afc1c803a6e0ab575c267b76aa873f7332ef2d1aab9c2"
url "https://github.com/napari/napari/releases/download/v#{version}/napari-#{version}-macOS.zip",
verified: "github.com/napari/napari/"
name "napari"
desc "Multi-dimensional image viewer for Python"
homepage "https://napari.org/"
depends_on macos: "<= :catalina"
container nested: "napari-#{version}.dmg"
app "napari.app"
zap trash: "~/.config/napari/"
end
|
from abc import ABCMeta, abstractmethod
from pathlib import Path
from typing import TYPE_CHECKING, Generic, Optional, Type, TypeVar, Union
from avilla.core.metadata.model import Metadata, MetadataModifies
from avilla.core.selectors import mainline
if TYPE_CHECKING:
from avilla.core.relationship import Relationship
T = TypeVar("T")
class Resource(Generic[T]):
mainline: Optional[mainline] = None
R = TypeVar("R", bound=Resource)
M = TypeVar("M", bound=Metadata)
class ResourceProvider(metaclass=ABCMeta):
@abstractmethod
async def fetch(self, resource: Resource[T], relationship: Optional["Relationship"] = None) -> T:
# TODO: 指导开发者使用 Relationship as a Guest, 以实现鉴权.
pass
|
mvn -DskipTests=true package && java -jar locate-api-frontend-service/target/locate-api-frontend-service-0.0.1-SNAPSHOT.jar server locateApiFrontendConfiguration.yml
|
#!/bin/bash
NAME="surveillance"
WEIGHTS_LEAKY="yolov3_leaky.weights"
WEIGHTS_RELU="yolov3_relu.weights"
INPUT_WIDTH=608
INPUT_HEIGHT=352
CLASSES=6
BATCH=1
INT8BATCH=50
INT8CALFILES="cal_coco_visdrone.txt"
# FP32 models
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES yolov3_leaky_plugin $WEIGHTS_LEAKY \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_plugin_fp32.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES yolov3_leaky_native $WEIGHTS_LEAKY \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_native_fp32.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES yolov3_relu $WEIGHTS_RELU \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_relu_fp32.model"
# FP16 models
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --fp16 yolov3_leaky_plugin $WEIGHTS_LEAKY \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_plugin_fp16.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --fp16 yolov3_leaky_native $WEIGHTS_LEAKY \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_native_fp16.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --fp16 yolov3_relu $WEIGHTS_RELU \
"${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_relu_fp16.model"
# INT8 models (not for TX2)
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --int8batch=$INT8BATCH --int8cache="${NAME}_leaky_plugin.cache" \
--int8calfiles=$INT8CALFILES yolov3_leaky_plugin $WEIGHTS_LEAKY "${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_plugin_int8.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --int8batch=$INT8BATCH --int8cache="${NAME}_leaky_native.cache" \
--int8calfiles=$INT8CALFILES yolov3_leaky_native $WEIGHTS_LEAKY "${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_leaky_native_int8.model"
jetnet_build_darknet_model --maxbatch=$BATCH --width=$INPUT_WIDTH --height=$INPUT_HEIGHT --classes=$CLASSES --int8batch=$INT8BATCH --int8cache="${NAME}_relu.cache" \
--int8calfiles=$INT8CALFILES yolov3_relu $WEIGHTS_RELU "${NAME}_${INPUT_WIDTH}x${INPUT_HEIGHT}_relu_int8.model"
|
#!/bin/bash
# Webmail with Roundcube
# ----------------------
source setup/functions.sh # load our functions
source /etc/mailinabox.conf # load global vars
# ### Installing Roundcube
# We install Roundcube from sources, rather than from Ubuntu, because:
#
# 1. Ubuntu's `roundcube-core` package has dependencies on Apache & MySQL, which we don't want.
#
# 2. The Roundcube shipped with Ubuntu is consistently out of date.
#
# 3. It's packaged incorrectly --- it seems to be missing a directory of files.
#
# So we'll use apt-get to manually install the dependencies of roundcube that we know we need,
# and then we'll manually install roundcube from source.
# These dependencies are from `apt-cache showpkg roundcube-core`.
echo "Installing Roundcube (webmail)..."
apt_install \
dbconfig-common \
php-cli php-sqlite3 php-intl php-json php-common php-curl php-ldap \
php-gd php-pspell tinymce libjs-jquery libjs-jquery-mousewheel libmagic1 php-mbstring
# Install Roundcube from source if it is not already present or if it is out of date.
# Combine the Roundcube version number with the commit hash of plugins to track
# whether we have the latest version of everything.
VERSION=1.4.11
HASH=3877f0e70f29e7d0612155632e48c3db1e626be3
PERSISTENT_LOGIN_VERSION=6b3fc450cae23ccb2f393d0ef67aa319e877e435 # version 5.2.0
HTML5_NOTIFIER_VERSION=68d9ca194212e15b3c7225eb6085dbcf02fd13d7 # version 0.6.4+
CARDDAV_VERSION=3.0.3
CARDDAV_HASH=d1e3b0d851ffa2c6bd42bf0c04f70d0e1d0d78f8
UPDATE_KEY=$VERSION:$PERSISTENT_LOGIN_VERSION:$HTML5_NOTIFIER_VERSION:$CARDDAV_VERSION
# paths that are often reused.
RCM_DIR=/usr/local/lib/roundcubemail
RCM_PLUGIN_DIR=${RCM_DIR}/plugins
RCM_CONFIG=${RCM_DIR}/config/config.inc.php
needs_update=0 #NODOC
if [ ! -f /usr/local/lib/roundcubemail/version ]; then
# not installed yet #NODOC
needs_update=1 #NODOC
elif [[ "$UPDATE_KEY" != `cat /usr/local/lib/roundcubemail/version` ]]; then
# checks if the version is what we want
needs_update=1 #NODOC
fi
if [ $needs_update == 1 ]; then
# if upgrading from 1.3.x, clear the temp_dir
if [ -f /usr/local/lib/roundcubemail/version ]; then
if [ "$(cat /usr/local/lib/roundcubemail/version | cut -c1-3)" == '1.3' ]; then
find /var/tmp/roundcubemail/ -type f ! -name 'RCMTEMP*' -delete
fi
fi
# install roundcube
wget_verify \
https://github.com/roundcube/roundcubemail/releases/download/$VERSION/roundcubemail-$VERSION-complete.tar.gz \
$HASH \
/tmp/roundcube.tgz
tar -C /usr/local/lib --no-same-owner -zxf /tmp/roundcube.tgz
rm -rf /usr/local/lib/roundcubemail
mv /usr/local/lib/roundcubemail-$VERSION/ $RCM_DIR
rm -f /tmp/roundcube.tgz
# install roundcube persistent_login plugin
git_clone https://github.com/mfreiholz/Roundcube-Persistent-Login-Plugin.git $PERSISTENT_LOGIN_VERSION '' ${RCM_PLUGIN_DIR}/persistent_login
# install roundcube html5_notifier plugin
git_clone https://github.com/kitist/html5_notifier.git $HTML5_NOTIFIER_VERSION '' ${RCM_PLUGIN_DIR}/html5_notifier
# download and verify the full release of the carddav plugin
wget_verify \
https://github.com/blind-coder/rcmcarddav/releases/download/v${CARDDAV_VERSION}/carddav-${CARDDAV_VERSION}.zip \
$CARDDAV_HASH \
/tmp/carddav.zip
# unzip and cleanup
unzip -q /tmp/carddav.zip -d ${RCM_PLUGIN_DIR}
rm -f /tmp/carddav.zip
# record the version we've installed
echo $UPDATE_KEY > ${RCM_DIR}/version
fi
# ### Configuring Roundcube
# Generate a safe 24-character secret key of safe characters.
SECRET_KEY=$(dd if=/dev/urandom bs=1 count=18 2>/dev/null | base64 | fold -w 24 | head -n 1)
# Create a configuration file.
#
# For security, temp and log files are not stored in the default locations
# which are inside the roundcube sources directory. We put them instead
# in normal places.
cat > $RCM_CONFIG <<EOF;
<?php
/*
* Do not edit. Written by Mail-in-a-Box. Regenerated on updates.
*/
\$config = array();
\$config['log_dir'] = '/var/log/roundcubemail/';
\$config['temp_dir'] = '/var/tmp/roundcubemail/';
\$config['db_dsnw'] = 'sqlite:///$STORAGE_ROOT/mail/roundcube/roundcube.sqlite?mode=0640';
\$config['default_host'] = 'ssl://localhost';
\$config['default_port'] = 993;
\$config['imap_conn_options'] = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
),
);
\$config['imap_timeout'] = 15;
\$config['smtp_server'] = 'tls://127.0.0.1';
\$config['smtp_conn_options'] = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
),
);
\$config['support_url'] = 'https://mailinabox.email/';
\$config['product_name'] = '$PRIMARY_HOSTNAME Webmail';
\$config['des_key'] = '$SECRET_KEY';
\$config['plugins'] = array('html5_notifier', 'archive', 'zipdownload', 'password', 'managesieve', 'jqueryui', 'persistent_login', 'carddav');
\$config['skin'] = 'elastic';
\$config['login_autocomplete'] = 2;
\$config['password_charset'] = 'UTF-8';
\$config['junk_mbox'] = 'Spam';
?>
EOF
# Configure CardDav
cat > ${RCM_PLUGIN_DIR}/carddav/config.inc.php <<EOF;
<?php
/* Do not edit. Written by Mail-in-a-Box. Regenerated on updates. */
\$prefs['_GLOBAL']['hide_preferences'] = true;
\$prefs['_GLOBAL']['suppress_version_warning'] = true;
\$prefs['ownCloud'] = array(
'name' => 'ownCloud',
'username' => '%u', // login username
'password' => '%p', // login password
'url' => 'https://${PRIMARY_HOSTNAME}/cloud/remote.php/carddav/addressbooks/%u/contacts',
'active' => true,
'readonly' => false,
'refresh_time' => '02:00:00',
'fixed' => array('username','password'),
'preemptive_auth' => '1',
'hide' => false,
);
?>
EOF
# Create writable directories.
mkdir -p /var/log/roundcubemail /var/tmp/roundcubemail $STORAGE_ROOT/mail/roundcube
chown -R www-data.www-data /var/log/roundcubemail /var/tmp/roundcubemail $STORAGE_ROOT/mail/roundcube
# Ensure the log file monitored by fail2ban exists, or else fail2ban can't start.
sudo -u www-data touch /var/log/roundcubemail/errors.log
# Password changing plugin settings
# The config comes empty by default, so we need the settings
# we're not planning to change in config.inc.dist...
cp ${RCM_PLUGIN_DIR}/password/config.inc.php.dist \
${RCM_PLUGIN_DIR}/password/config.inc.php
tools/editconf.py ${RCM_PLUGIN_DIR}/password/config.inc.php \
"\$config['password_minimum_length']=8;" \
"\$config['password_db_dsn']='sqlite:///$STORAGE_ROOT/mail/users.sqlite';" \
"\$config['password_query']='UPDATE users SET password=%D WHERE email=%u';" \
"\$config['password_dovecotpw']='/usr/bin/doveadm pw';" \
"\$config['password_dovecotpw_method']='SHA512-CRYPT';" \
"\$config['password_dovecotpw_with_method']=true;"
# so PHP can use doveadm, for the password changing plugin
usermod -a -G dovecot www-data
# set permissions so that PHP can use users.sqlite
# could use dovecot instead of www-data, but not sure it matters
chown root.www-data $STORAGE_ROOT/mail
chmod 775 $STORAGE_ROOT/mail
chown root.www-data $STORAGE_ROOT/mail/users.sqlite
chmod 664 $STORAGE_ROOT/mail/users.sqlite
# Fix Carddav permissions:
chown -f -R root.www-data ${RCM_PLUGIN_DIR}/carddav
# root.www-data need all permissions, others only read
chmod -R 774 ${RCM_PLUGIN_DIR}/carddav
# Run Roundcube database migration script (database is created if it does not exist)
${RCM_DIR}/bin/updatedb.sh --dir ${RCM_DIR}/SQL --package roundcube
chown www-data:www-data $STORAGE_ROOT/mail/roundcube/roundcube.sqlite
chmod 664 $STORAGE_ROOT/mail/roundcube/roundcube.sqlite
# Enable PHP modules.
phpenmod -v php mcrypt imap
restart_service php7.2-fpm
|
<filename>ribose.gemspec
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "ribose/version"
Gem::Specification.new do |spec|
spec.name = "ribose"
spec.version = Ribose::VERSION
spec.authors = ["Ribose Inc."]
spec.email = ["<EMAIL>"]
spec.summary = "The Ruby interface for Ribose API"
spec.description = "The Ruby interface for Ribose API"
spec.homepage = "https://github.com/riboseinc/ribose-ruby"
spec.license = "MIT"
spec.require_paths = ["lib"]
spec.files = `git ls-files`.split("\n")
spec.test_files = `git ls-files -- {spec}/*`.split("\n")
spec.required_ruby_version = Gem::Requirement.new(">= 2.1.9")
spec.add_dependency "id_pack", "~> 1.0.1"
spec.add_dependency "mime-types", "~> 3.1"
spec.add_dependency "sawyer", "~> 0.8.1"
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "webmock", "~> 3.0"
end
|
import {trigger, animate, style, group, query, transition} from '@angular/animations';
export const routerTransition = trigger('routerTransition', [
// route 'enter' transition
transition(':enter', [
style({ transform: 'translateX(100%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(0%)' })),
]),
transition(':leave', [
style({ transform: 'translateX(0%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(100%)' }))
// css styles at start of transition
//style({ opacity: 0 }),
// animation and styles at end of transition
//animate('3s', style({ opacity: 1 }))
]),
]);
/* export const routerTransition = trigger('routerTransition', [
transition('about => home', [
query(':enter, :leave', style({ position: 'fixed', width:'100%' })
, { optional: true }),
group([
query(':enter', [
style({ transform: 'translateX(-100%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(0%)' }))
], { optional: true }),
query(':leave', [
style({ transform: 'translateX(0%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(100%)' }))
], { optional: true }),
])
]),
transition('home => about', [
group([
query(':enter, :leave', style({ position: 'fixed', width:'100%' })
, { optional: true }),
query(':enter', [
style({ transform: 'translateX(100%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(0%)' }))
], { optional: true }),
query(':leave', [
style({ transform: 'translateX(0%)' }),
animate('0.5s ease-in-out', style({ transform: 'translateX(-100%)' }))
], { optional: true }),
])
])
]) */ |
<reponame>awesome-archive/webrecorder<filename>webrecorder/admin/src/components/RadialGraph/index.js
import React, { Component, PropTypes } from 'react';
import classNames from 'classnames';
/**
* for the time being this uses a hackish css method.. TODO: move this to svg
* http://www.cssscript.com/demo/pure-css-circular-percentage-bar/
*/
import './style.scss';
class RadialGraph extends Component {
static propTypes = {
legend: PropTypes.string,
label: PropTypes.string,
percentage: PropTypes.number,
/* color coding for disk space readouts */
showWarning: PropTypes.bool,
className: PropTypes.string,
}
static defaultProps = {
showWarning: false,
}
render() {
const { className, label, legend, percentage, showWarning } = this.props;
const classes = classNames('c100', `p${percentage}`);
const perctClass = classNames({
'green': !showWarning || percentage < 75,
'yellow': showWarning && percentage >= 75 && percentage < 90,
'red': showWarning && percentage >= 90,
})
return (
<div className={`radial-graph ${className}`}>
<div className={classes}>
<span className={perctClass}>{label}</span>
<div className='slice'>
<div className={`bar ${perctClass}`} />
<div className={`fill ${perctClass}`} />
</div>
</div>
{
legend &&
<span className='legend'>{ legend }</span>
}
</div>
);
}
}
export default RadialGraph;
|
def sortByLength(input_list, length):
length_sorted_list = []
for string in input_list:
if len(string) == length:
length_sorted_list.append(string)
return length_sorted_list |
<reponame>TheTastyGravy/Double-Linked-List<filename>Project1/DLList.cpp
#include "DLList.h"
DLList::DLList() :
first(nullptr),
last(nullptr)
{
}
DLList::~DLList()
{
// Delete all nodes
for (int i = 0; i < count(); i++)
{
popFront();
}
}
void DLList::pushFront(int value)
{
// Create new node with value to be added to the list
DLListNode* newNode = new DLListNode(value);
// If the list is empty, just set first and last
if (isEmpty())
{
first = last = newNode;
return;
}
// Set up the pointers, and change the first node
first->previous = newNode;
newNode->next = first;
first = newNode;
}
void DLList::pushEnd(int value)
{
// Create new node with value to be added to the list
DLListNode* newNode = new DLListNode(value);
// If the list is empty, just set first and last
if (isEmpty())
{
first = last = newNode;
return;
}
// Set up the pointers, and change the last node
last->next = newNode;
newNode->previous = last;
last = newNode;
}
void DLList::insert(int value, DLListNode* node)
{
// Create new node with value and corrent pointers
DLListNode* newNode = new DLListNode(value, node, node->previous);
// If the list is empty, just set first and last
if (isEmpty())
{
first = last = newNode;
return;
}
// If its the first element, push
if (node == first)
{
pushFront(value);
return;
}
// As the value is inserted before a node, it cant be at the end
// Update the pointers of the two adjasent nodes
newNode->previous->next = newNode;
newNode->next->previous = newNode;
}
void DLList::popFront()
{
// If the list is empty, do nothing
if (isEmpty())
return;
// If there is only one node,
if (first == last)
{
delete first;
first = last = nullptr;
return;
}
// Delete first, and replace it with the next node
DLListNode* temp = first->next;
delete first;
first = temp;
first->previous = nullptr;
}
void DLList::popEnd()
{
// If the list is empty, do nothing
if (isEmpty())
return;
// If there is only one node
if (first == last)
{
delete first;
first = last = nullptr;
return;
}
// Delete last, and replace it with the previous node
DLListNode* temp = last->previous;
delete last;
last = temp;
last->next = nullptr;
}
void DLList::removeFromList(DLListNode* node)
{
// If the list is empty or nullptr, do nothing
if (isEmpty() || node == nullptr)
return;
// If it is the only node in the list, it becomes empty
if (node == first && node == last)
{
first = last = nullptr;
}
// Make next node first
else if (node == first)
{
first = node->next;
first->previous = nullptr;
}
// Make previous node last
else if (node == last)
{
last = node->previous;
last->next = nullptr;
}
// Make adjacent nodes point to eachother
else
{
node->next->previous = node->previous;
node->previous->next = node->next;
}
}
int DLList::count()
{
if (isEmpty())
return 0;
// Pointer to the current node
DLListNode* current = first;
int count = 1;
// Move to the next node until the last node is reached
while (current != last)
{
current = current->next;
count++;
}
return count;
}
bool DLList::isEmpty()
{
return (first == nullptr && last == nullptr);
}
void DLList::sort()
{
// There is nothing infront of the first node, so start at the second
DLListNode* current = first->next;
// Iterate through all nodes
while (current != nullptr)
{
// The start of the loop moves down to the next node
DLListNode* comp = current;
while (true)
{
// Get the next node down the list
comp = comp->previous;
if (comp->value < current->value)
{
// Insert it after the node smaller than it
comp = comp->next;
break;
}
else if (comp == first)
{
// Insert at the front as it is the smallest
break;
}
}
// Get the next node before moving current
DLListNode* nextNode = current->next;
// Insert the current node in its correct spot
if (current != comp)
move(current, comp);
current = nextNode;
}
}
void DLList::move(DLListNode* movedNode, DLListNode* baseNode)
{
// Remove refrences to nodeMove
removeFromList(movedNode);
// Change pointers of the node
movedNode->next = baseNode;
movedNode->previous = baseNode->previous;
// If its the first element, 'push' it
if (baseNode == first)
{
// Set up the pointers, and change the first node
first->previous = movedNode;
movedNode->next = first;
first = movedNode;
return;
}
// As the value is inserted before a node, it cant be at the end
// Update the pointers of the two adjasent nodes
movedNode->previous->next = movedNode;
movedNode->next->previous = movedNode;
} |
#!/bin/bash
# Determine the operating system
if [[ "$OSTYPE" == "darwin"* ]]; then
# MacOS
build_command="npm run android_release"
elif [[ "$OSTYPE" == "msys" ]]; then
# Windows
build_command="npm run win_android_release"
else
echo "Unsupported operating system"
exit 1
fi
# Set the project path
project_path=$(cd "$(dirname "$0")"; pwd)
# Set the package path
package="$project_path/package.sh"
# Execute the build command with the specified platform and configuration
if [[ "$OSTYPE" == "darwin"* ]]; then
# MacOS
/bin/sh "$package" android release
elif [[ "$OSTYPE" == "msys" ]]; then
# Windows
powershell -Command "$package android release"
fi |
package examenFinal;
public class agregar {
private int codigo;
private String nombre;
public agregar(int codigo, String nombre) {
this.codigo=codigo;
this.nombre=nombre;
}
public int getCodigo() {
return codigo;
}
public void setCodigo(int codigo) {
this.codigo = codigo;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
}
|
<gh_stars>1-10
#!/usr/bin/env node
require('nclosure').nclosure();
goog.require('goog.testing.jsunit');
goog.require('nclosure.examples.simple.Example');
goog.provide('nclosure.examples.simple.tests.syncTests');
testFunction1 = function() {
assertNotEquals(typeof(example_), 'undefined');
};
|
from typing import List
import re
def extract_html_tags(html_content: str) -> List[str]:
# Use regular expression to find all HTML tags in the content
html_tags = re.findall(r'<[^>]+>', html_content)
return html_tags |
def sieve_of_eratosthenes(n):
prime = [True for i in range(n + 1)]
p = 2
while p * p <= n:
if prime[p]:
for i in range(p * 2, n + 1, p):
prime[i] = False
p += 1
prime[0] = False
prime[1] = False
result = []
for p in range(n + 1):
if prime[p]:
result.append(p)
return result |
<reponame>harrytiendanube/nexo-app-sample
import React, { useEffect, useState } from 'react';
import { useHistory } from 'react-router';
import { DataList, ImageItem, Page } from '@tiendanube/components';
import axios from '../axios';
function ProductList() {
const { push } = useHistory();
const [products, setProducts] = useState([]);
const handleGoToMain = () => {
push('/');
}
const getProducts = async () => {
const { data } = await axios.get('/products');
setProducts(data);
}
useEffect(() => {
getProducts();
}, [])
return (
<Page title='Products' headerNavigation={{ onClick: handleGoToMain }}>
<DataList ruled >
{products.length === 0 && Array.from(Array(5).keys()).map((index) => (
<DataList.Row id={`${index}`} key={index}>
<DataList.Cell>
<ImageItem.Skeleton />
</DataList.Cell>
</DataList.Row>
))}
{products.map((product: any) => (
<DataList.Row id={product.id} key={product.id}>
<DataList.Cell>
<ImageItem thumbnail={product.images[0]?.src} link={{
children: product.name.es || product.name.pt,
onClick: () => { window.open(product.canonical_url) }
}} />
</DataList.Cell>
</DataList.Row>
))}
</DataList>
</Page>
)
}
export default ProductList; |
<reponame>ChristopherChudzicki/mathbox<filename>build/esm/primitives/types/present/track.js
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS104: Avoid inline assignments
* DS205: Consider reworking code to avoid use of IIFEs
* DS206: Consider reworking classes to avoid initClass
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import * as Ease from "../../../util/ease.js";
import { Primitive } from "../../primitive.js";
const deepCopy = function (x) {
const out = {};
for (const k in x) {
const v = x[k];
if (v instanceof Array) {
out[k] = v.slice();
}
else if (v != null && typeof v === "object") {
out[k] = deepCopy(v);
}
else {
out[k] = v;
}
}
return out;
};
export class Track extends Primitive {
static initClass() {
this.traits = ["node", "track", "seek", "bind"];
}
init() {
this.handlers = {};
this.script = null;
this.values = null;
this.playhead = 0;
this.velocity = null;
this.section = null;
return (this.expr = null);
}
make() {
// Bind to attached data sources
let ref;
this._helpers.bind.make([
{ to: "track.target", trait: "node", callback: null },
]);
const { script } = this.props;
const { node } = this.bind.target;
this.targetNode = node;
return (([this.script, this.values, this.start, this.end] = Array.from((ref = this._process(node, script)))),
ref);
}
unmake() {
this.unbindExpr();
this._helpers.bind.unmake();
this.script =
this.values =
this.start =
this.end =
this.section =
this.expr =
null;
return (this.playhead = 0);
}
// Bind animated expressions
bindExpr(expr) {
this.unbindExpr();
this.expr = expr;
this.targetNode.bind(expr, true);
// Measure playhead velocity on attribute computation
const { clock } = this.targetNode;
const self = this;
return this._attributes.bind((this.measure = (function () {
let playhead = null;
return () => {
const { step } = clock.getTime();
if (playhead != null) {
self.velocity = (self.playhead - playhead) / step;
}
return (playhead = self.playhead);
};
})()));
}
unbindExpr() {
if (this.expr != null) {
this.targetNode.unbind(this.expr, true);
}
if (this.measure != null) {
this._attributes.unbind(this.measure);
}
return (this.expr = this.measure = null);
}
// Process script steps by filling out missing props
_process(object, script) {
let k, key, last, message, s, step, v;
if (script instanceof Array) {
// Normalize array to numbered dict
s = {};
for (let i = 0; i < script.length; i++) {
step = script[i];
s[i] = step;
}
script = s;
}
// Normalize keyed steps to array of step objects
s = [];
for (key in script) {
step = script[key];
if (step == null) {
step = [];
}
if (step instanceof Array) {
// [props, expr] array
step = {
key: +key,
props: step[0] != null ? deepCopy(step[0]) : {},
expr: step[1] != null ? deepCopy(step[1]) : {},
};
}
else {
if (step.key == null && !step.props && !step.expr) {
// Direct props object (iffy, but people will do this anyhow)
step = { props: deepCopy(step) };
}
else {
// Proper step
step = deepCopy(step);
}
// Prepare step object
step.key = step.key != null ? +step.key : +key;
if (step.props == null) {
step.props = {};
}
if (step.expr == null) {
step.expr = {};
}
}
s.push(step);
}
script = s;
if (!script.length) {
return [[], {}, 0, 0];
}
// Sort by keys
script.sort((a, b) => a.key - b.key);
const start = script[0].key;
const end = script[script.length - 1].key;
// Connect steps
for (key in script) {
step = script[key];
if (last != null) {
last.next = step;
}
last = step;
}
// Last step leads to itself
last.next = last;
script = s;
// Determine starting props
const props = {};
const values = {};
for (key in script) {
step = script[key];
for (k in step.props) {
v = step.props[k];
props[k] = true;
}
}
for (key in script) {
step = script[key];
for (k in step.expr) {
v = step.expr[k];
props[k] = true;
}
}
for (k in props) {
props[k] = object.get(k);
}
try {
// Need two sources and one destination value for correct mixing of live expressions
for (k in props) {
values[k] = [
object.attribute(k).T.make(),
object.attribute(k).T.make(),
object.attribute(k).T.make(),
];
}
}
catch (error) {
console.warn(this.node.toMarkup());
message = `${this.node.toString()} - Target ${object} has no \`${k}\` property`;
throw new Error(message);
}
const result = [];
// Normalize script props, insert held values
for (step of Array.from(script)) {
for (k in props) {
v = props[k];
v = object.validate(k, step.props[k] != null ? step.props[k] : v);
props[k] = step.props[k] = v;
if (step.expr[k] != null && typeof step.expr[k] !== "function") {
console.warn(this.node.toMarkup());
message = `${this.node.toString()} - Expression \`${step.expr[k]}\` on property \`${k}\` is not a function`;
throw new Error(message);
}
}
result.push(step);
}
return [result, values, start, end];
}
update() {
let { playhead } = this;
const { script } = this;
const { ease, seek } = this.props;
const node = this.targetNode;
if (seek != null) {
playhead = seek;
}
if (script.length) {
let k;
const find = function () {
let last = script[0];
for (let i = 0; i < script.length; i++) {
const step = script[i];
if (step.key > playhead) {
break;
}
last = step;
}
return last;
};
let { section } = this;
if (!section || playhead < section.key || playhead > section.next.key) {
section = find(script, playhead);
}
if (section === this.section) {
return;
}
this.section = section;
const from = section;
const to = section.next;
const start = from.key;
const end = to.key;
// Easing of playhead along track
const easeMethod = (() => {
switch (ease) {
case "linear":
case 0:
return Ease.clamp;
case "cosine":
case 1:
return Ease.cosine;
case "binary":
case 2:
return Ease.binary;
case "hold":
case 3:
return Ease.hold;
default:
return Ease.cosine;
}
})();
// Callback for live playhead interpolator (linear approx time travel)
const { clock } = node;
const getPlayhead = (time) => {
if (this.velocity == null) {
return this.playhead;
}
const now = clock.getTime();
return this.playhead + this.velocity * (time - now.time);
};
const getLerpFactor = (function () {
const scale = 1 / Math.max(0.0001, end - start);
return (time) => easeMethod((getPlayhead(time) - start) * scale, 0, 1);
})();
// Create prop expression interpolator
const live = (key) => {
const fromE = from.expr[key];
const toE = to.expr[key];
const fromP = from.props[key];
const toP = to.props[key];
const invalid = function () {
console.warn(node.toMarkup());
throw new Error(`${this.node.toString()} - Invalid expression result on track \`${key}\``);
};
const attr = node.attribute(key);
const values = this.values[key];
const animator = this._animator;
// Lerp between two expressions
if (fromE && toE) {
return ((values, _from, _to) => function (time, delta) {
let _from, _to;
values[0] = _from = attr.T.validate(fromE(time, delta), values[0], invalid);
values[1] = _to = attr.T.validate(toE(time, delta), values[1], invalid);
return (values[2] = animator.lerp(attr.T, _from, _to, getLerpFactor(time), values[2]));
})(values, from, to);
// Lerp between an expression and a constant
}
else if (fromE) {
return ((values, _from, _to) => function (time, delta) {
let _from;
values[0] = _from = attr.T.validate(fromE(time, delta), values[0], invalid);
return (values[1] = animator.lerp(attr.T, _from, toP, getLerpFactor(time), values[1]));
})(values, from, to);
// Lerp between a constant and an expression
}
else if (toE) {
return ((values, _from, _to) => function (time, delta) {
let _to;
values[0] = _to = attr.T.validate(toE(time, delta), values[0], invalid);
return (values[1] = animator.lerp(attr.T, fromP, _to, getLerpFactor(time), values[1]));
})(values, from, to);
// Lerp between two constants
}
else {
return ((values, _from, _to) => (time, _delta) => (values[0] = animator.lerp(attr.T, fromP, toP, getLerpFactor(time), values[0])))(values, from, to);
}
};
// Handle expr / props on both ends
const expr = {};
for (k in from.expr) {
if (expr[k] == null) {
expr[k] = live(k);
}
}
for (k in to.expr) {
if (expr[k] == null) {
expr[k] = live(k);
}
}
for (k in from.props) {
if (expr[k] == null) {
expr[k] = live(k);
}
}
for (k in to.props) {
if (expr[k] == null) {
expr[k] = live(k);
}
}
// Bind node props
return this.bindExpr(expr);
}
}
change(changed, touched, init) {
if (changed["track.target"] ||
changed["track.script"] ||
changed["track.mode"]) {
return this.rebuild();
}
if (changed["seek.seek"] || init) {
return this.update();
}
}
}
Track.initClass();
|
echo "### Sending 'get structured record' request to GPCC adaptor"
echo ""
curl --location --request POST 'http://localhost:8090/B86041/STU3/1/gpconnect/fhir/Patient/$gpc.migratestructuredrecord' \
--header 'Ssp-From: 200000000359' \
--header 'Ssp-To: 918999198738' \
--header 'Ssp-InteractionID: urn:nhs:names:services:gpconnect:fhir:operation:gpc.migratestructuredrecord-1' \
--header 'Ssp-TraceID: 5fefd21d-17dd-4009-b595-0b9d953a286f' \
--header 'Authorization: Bearer some_token' \
--header 'Content-Type: application/fhir+json' \
--data-raw '{
"resourceType": "Parameters",
"parameter": [
{
"name": "patientNHSNumber",
"valueIdentifier": {
"system": "https://fhir.nhs.uk/Id/nhs-number",
"value": "9690937286"
}
},
{
"name": "includeFullRecord",
"part": [
{
"name": "includeSensitiveInformation",
"valueBoolean": true
}
]
}
]
}'
echo ""
echo "### Done."
sleep 1
echo "### Sending 'get document' request to GPCC adaptor"
echo ""
curl --location --request GET 'http://localhost:8090/B82617/STU3/1/gpconnect/documents/fhir/Binary/07a6483f-732b-461e-86b6-edb665c45510' \
--header 'Ssp-From: 200000000359' \
--header 'Ssp-To: 918999198738' \
--header 'Ssp-InteractionID: urn:nhs:names:services:gpconnect:documents:fhir:rest:migrate:binary-1' \
--header 'Ssp-TraceID: 5fefd21d-17dd-4009-b595-0b9d953a286f' \
--header 'Authorization: Bearer some_token'
echo ""
echo "### Done" |
#!/bin/bash
# Attention, there is no "-x" to avoid problems on Wercker
set -e
function checkout_from {
CLONE_URL=$1
PROJECT=$(echo "$CLONE_URL" | sed -nE 's/.*\/(.*).git/\1/p')
mkdir -p .ci-temp
cd .ci-temp
if [ -d "$PROJECT" ]; then
echo "Target project $PROJECT is already cloned, latest changes will be fetched"
cd $PROJECT
git fetch
cd ../
else
for i in 1 2 3 4 5; do git clone $CLONE_URL && break || sleep 15; done
fi
cd ../
}
function build_checkstyle {
if [[ "$SHIPPABLE" == "true" ]]; then
echo "Build checkstyle ..."
mvn clean install -Pno-validations
fi
}
case $1 in
no-exception-openjdk7-openjdk8)
CS_POM_VERSION=$(mvn -e -q -Dexec.executable='echo' -Dexec.args='${project.version}' \
--non-recursive org.codehaus.mojo:exec-maven-plugin:1.3.1:exec)
echo 'CS_POM_VERSION='${CS_POM_VERSION}
build_checkstyle
checkout_from https://github.com/checkstyle/contribution.git
cd .ci-temp/contribution/checkstyle-tester
sed -i'' 's/^guava/#guava/' projects-for-circle.properties
sed -i'' 's/#openjdk7/openjdk7/' projects-for-circle.properties
sed -i'' 's/#openjdk8/openjdk8/' projects-for-circle.properties
groovy launch.groovy --listOfProjects projects-for-circle.properties \
--config checks-nonjavadoc-error.xml --checkstyleVersion ${CS_POM_VERSION}
;;
no-exception-openjdk9-lucene-and-others)
CS_POM_VERSION=$(mvn -e -q -Dexec.executable='echo' -Dexec.args='${project.version}' \
--non-recursive org.codehaus.mojo:exec-maven-plugin:1.3.1:exec)
echo 'CS_POM_VERSION='${CS_POM_VERSION}
build_checkstyle
checkout_from https://github.com/checkstyle/contribution.git
cd .ci-temp/contribution/checkstyle-tester
sed -i'' 's/^guava/#guava/' projects-for-circle.properties
# till hg is installed
#sed -i'' 's/#openjdk9/openjdk9/' projects-for-circle.properties
sed -i'' 's/#infinispan/infinispan/' projects-for-circle.properties
sed -i'' 's/#protonpack/protonpack/' projects-for-circle.properties
sed -i'' 's/#jOOL/jOOL/' projects-for-circle.properties
sed -i'' 's/#lucene-solr/lucene-solr/' projects-for-circle.properties
groovy launch.groovy --listOfProjects projects-for-circle.properties \
--config checks-nonjavadoc-error.xml --checkstyleVersion ${CS_POM_VERSION}
;;
no-exception-cassandra-storm-tapestry)
CS_POM_VERSION=$(mvn -e -q -Dexec.executable='echo' -Dexec.args='${project.version}' \
--non-recursive org.codehaus.mojo:exec-maven-plugin:1.3.1:exec)
echo 'CS_POM_VERSION='${CS_POM_VERSION}
build_checkstyle
checkout_from https://github.com/checkstyle/contribution.git
cd .ci-temp/contribution/checkstyle-tester
sed -i'' 's/^guava/#guava/' projects-for-circle.properties
sed -i'' 's/#tapestry-5/tapestry-5/' projects-for-circle.properties
sed -i'' 's/#storm/storm/' projects-for-circle.properties
sed -i'' 's/#cassandra/cassandra/' projects-for-circle.properties
groovy launch.groovy --listOfProjects projects-for-circle.properties \
--config checks-nonjavadoc-error.xml --checkstyleVersion ${CS_POM_VERSION}
;;
no-exception-hadoop-apache-groovy-scouter)
CS_POM_VERSION=$(mvn -e -q -Dexec.executable='echo' -Dexec.args='${project.version}' \
--non-recursive org.codehaus.mojo:exec-maven-plugin:1.3.1:exec)
echo 'CS_POM_VERSION='${CS_POM_VERSION}
build_checkstyle
checkout_from https://github.com/checkstyle/contribution.git
cd .ci-temp/contribution/checkstyle-tester
sed -i'' 's/^guava/#guava/' projects-for-circle.properties
sed -i'' 's/#apache-commons/apache-commons/' projects-for-circle.properties
sed -i'' 's/#hadoop/hadoop/' projects-for-circle.properties
sed -i'' 's/#groovy/groovy/' projects-for-circle.properties
sed -i'' 's/#scouter/scouter/' projects-for-circle.properties
groovy launch.groovy --listOfProjects projects-for-circle.properties \
--config checks-nonjavadoc-error.xml --checkstyleVersion ${CS_POM_VERSION}
;;
*)
echo "Unexpected argument: $1"
sleep 5s
false
;;
esac
|
// config.js
//const dotenv = require('dotenv');
//dotenv.config();
module.exports = {
endpoint: process.env.REACT_APP_API_URL,
}; |
package com.telenav.osv.obd;
import android.util.Log;
/**
* Created by dianat on 3/25/2016.
*/
public class OBDHelper {
/**
* command for speed
*/
public static final String CMD_SPEED = "010D1";
/**
* command for speed
*/
public static final String CMD_WARM_START = "AT WS";
/**
* command for speed
*/
public static final String CMD_FAST_INIT = "AT FI";
public static final String CMD_SET_AUTO = "AT SP 00";
public static final String CMD_DEVICE_DESCRIPTION = "AT @1";
public static final String CMD_DESCRIBE_PROTOCOL = "AT DP";
private static final String TAG = "OBDHelper";
/**
* converts the characteristic result to integer value according to the command type formula
* the formulas used for calculation where taken for: https://en.wikipedia.org/wiki/OBD-II_PIDs
* @param charResult - the characteristic result
* @return - the integer value
*/
public static int convertResult(String charResult, VehicleDataListener vehicleDataListener) {
String request = null;
String response = null;
String[] arrayRes = charResult.split("\r");
if (arrayRes.length > 1) {
request = arrayRes[0];
response = arrayRes[1];
}
String[] splitResponse = response != null ? response.split(" ") : new String[0];
if (request != null) {
switch (request) {
case CMD_SPEED:
if (splitResponse.length >= 3) {
// formula used: A, where A is the first byte
Integer speed = Integer.parseInt(splitResponse[2], 16);
if (vehicleDataListener != null) {
vehicleDataListener.onSpeed(speed);
}
return speed;
}
break;
default:
Log.e(TAG, "convertResult: cannot parse");
}
}
return -1;
}
}
|
from collections import defaultdict
def return_default():
return 0
def dd():
return defaultdict(return_default)
CHALLENGE_DAY = "7"
REAL = open(CHALLENGE_DAY + ".txt").read()
SAMPLE = open(CHALLENGE_DAY + ".sample.txt").read()
SAMPLE_EXPECTED = 4
# SAMPLE_EXPECTED =
def parse_lines(raw):
# Groups.
# split = raw.split("\n\n")
# return list(map(lambda group: group.split("\n"), split))
split = raw.split("\n")
ret = {}
for l in split:
ls = l.split(" contain ")
name = ls[0].replace(" bags", "")
if ls[1] == "no other bags.":
ret[name] = None
continue
bags = ls[1].split(",")
here = {}
for bag in bags:
bag = bag.strip()
qty, n1, n2, _ = bag.split(" ")
nh = n1 + " " + n2
here[nh] = int(qty)
ret[name] = here
return ret
# return split
# return list(map(int, lines))
# return list(map(lambda l: l.strip(), split)) # beware leading / trailing WS
def contains(target, bags, at):
if at == target:
return True
if bags[at] == None:
return False
for k in bags[at].keys():
if contains(target, bags, k):
return True
return False
def solve(raw):
parsed = parse_lines(raw)
# Debug here to make sure parsing is good.
ret = 0
SHINY = "shiny gold"
for k in parsed.keys():
seen = set()
if k == SHINY:
continue
if contains(SHINY, parsed, k):
ret += 1
return ret
sample = solve(SAMPLE)
if SAMPLE_EXPECTED is None:
print("*** SKIPPING SAMPLE! ***")
else:
assert sample == SAMPLE_EXPECTED
print("*** SAMPLE PASSED ***")
solved = solve(REAL)
print("SOLUTION: ", solved)
# assert solved
|
import type { FileHandle } from 'fs/promises';
import { fstatSync } from 'fs';
import type { DataValue, Read, Struct } from '@nishin/reader';
import { BinaryReader, ByteOrder, DataType, Encoding } from '@nishin/reader';
import { assertInt } from '@nishin/reader/assert';
import { DataArray, DataBigInt, DataBoolean, DataChar, DataFloat, DataInt, DataString } from '@nishin/reader/data';
import { repeatAsync } from './repeat-async.js';
type ReadWrite<T> = {
-readonly [P in keyof T]: T[P];
};
interface Config {
readonly bufferSize: number;
}
export class AsyncReader {
#fileHandle: FileHandle;
#bufferSize: number;
#reader: BinaryReader;
#byteLength: number;
#dataRead = false;
#offset = 0;
get offset(): number {
return this.#offset;
}
get byteLength(): number {
return this.#byteLength;
}
get byteOrder(): ByteOrder | undefined {
return this.#reader.byteOrder;
}
get buffer(): Uint8Array {
return this.#reader.buffer;
}
get hasNext(): boolean {
return this.#offset < this.#byteLength;
}
constructor(fileHandle: FileHandle, byteOrder?: ByteOrder, { bufferSize }?: Config);
constructor(fileHandle: FileHandle, { bufferSize }: Config);
constructor(fileHandle: FileHandle, byteOrderOrConfig?: ByteOrder | Config, { bufferSize = 2 ** 20 * 10 } = {}) {
const byteOrder = byteOrderOrConfig instanceof ByteOrder ? byteOrderOrConfig : undefined;
this.#bufferSize = byteOrderOrConfig instanceof ByteOrder ? bufferSize : byteOrderOrConfig?.bufferSize ?? bufferSize;
this.#byteLength = fstatSync(fileHandle.fd).size;
this.#fileHandle = fileHandle;
this.#reader = new BinaryReader(Buffer.alloc(this.#bufferSize), byteOrder);
}
async #prepareOffset(delta: number, position = this.#offset + delta) {
if (!this.#dataRead || this.#reader.offset + delta < 0 || this.#reader.offset + delta >= this.#reader.buffer.length) {
this.#reader = new BinaryReader(Buffer.alloc(this.#bufferSize), this.byteOrder);
const { bytesRead } = await this.#fileHandle.read(this.#reader.buffer, 0, this.#bufferSize, position);
if (bytesRead < this.#bufferSize) {
this.#reader = this.#reader.slice(bytesRead);
}
this.#dataRead = true;
}
}
setByteOrder(byteOrder: ByteOrder): void {
this.#reader.setByteOrder(byteOrder);
}
async slice(size: number): Promise<BinaryReader> {
await this.skip(size);
const buffer = Buffer.alloc(size);
await this.#fileHandle.read({
buffer,
length: size,
position: this.#offset - size,
});
return new BinaryReader(buffer, this.byteOrder);
}
async seek(offset: number): Promise<void> {
assertInt(offset, { min: 0, max: this.#byteLength });
await this.#prepareOffset(offset - this.#offset);
this.#offset = offset;
}
async skip(bytes: number): Promise<void> {
assertInt(bytes, { min: 0 });
await this.seek(this.#offset + bytes);
}
async align(to: number): Promise<void> {
assertInt(to, { min: 0 });
await this.skip(((-this.offset % to) + to) % to);
}
async readByteOrderMark(offset = this.#offset): Promise<void> {
await this.seek(offset);
const { value } = await this.next(DataType.int({ signed: false, byteLength: 2 }, ByteOrder.BigEndian));
const byteOrder = ByteOrder.lookupValue(value);
if (!byteOrder) {
throw new TypeError(`invalid byte order mark`);
}
this.setByteOrder(byteOrder);
}
async assertMagic(magic: string | Uint8Array, offset = this.#offset): Promise<void> {
await this.seek(offset);
if (typeof magic === 'string') {
const { value } = await this.next(DataType.string(Encoding.ASCII, { count: magic.length }));
if (magic !== value) {
throw new TypeError(`invalid magic: expected '${magic}', got '${value}`);
}
} else {
const { value } = await this.next(DataType.array(DataType.Uint8, magic.length));
for (let i = 0; i < value.length; i++) {
if (value[i] !== magic[i]) {
throw new TypeError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`invalid magic: expected 0x${magic[i]?.toString(16).padStart(2, '0')} at position ${i}, got 0x${value[i]
?.toString(16)
.padStart(2, '0')}`,
);
}
}
}
}
async next<T extends DataType | Struct>(type: T): Promise<Read<T>> {
/* eslint-disable @typescript-eslint/consistent-type-assertions */
const initialOffset = this.#offset;
try {
if (type instanceof DataBoolean) {
await this.#prepareOffset(1, this.#offset);
const result = this.#reader.next(type) as DataValue<unknown>;
this.#offset += result.byteLength;
return result as Read<T>;
}
if (type instanceof DataInt || type instanceof DataBigInt || type instanceof DataFloat) {
await this.#prepareOffset(type.byteLength, this.#offset);
const result = this.#reader.next(type) as DataValue<unknown>;
this.#offset += result.byteLength;
return result as Read<T>;
}
if (type instanceof DataChar) {
await this.#prepareOffset(type.encoding.maxBytes, this.#offset);
const result = this.#reader.next(type) as DataValue<unknown>;
this.#offset += result.byteLength;
return result as Read<T>;
}
if (type instanceof DataString) {
const { encoding, byteOrder, terminator, count } = type;
const charType = DataType.char(encoding, byteOrder);
if (count > 0) {
const { value, byteLength } = await this.next(DataType.array(charType, count));
return {
value: value.join(''),
byteLength,
} as Read<T>;
}
// eslint-disable-next-line @typescript-eslint/no-shadow
const result: ReadWrite<DataValue<string>> = {
value: '',
byteLength: 0,
};
let char = await this.next(charType);
// eslint-disable-next-line no-await-in-loop
while (char.value !== terminator && this.#offset < this.#byteLength) {
result.value += char.value;
result.byteLength += char.byteLength;
// eslint-disable-next-line no-await-in-loop
char = await this.next(charType);
}
if (char.value !== terminator) {
result.value += char.value;
}
result.byteLength += char.byteLength;
return result as Read<T>;
}
if (type instanceof DataArray) {
const items = await repeatAsync(type.count, async () => this.next(type.type) as Promise<DataValue<unknown>>);
return items.reduce<ReadWrite<DataValue<unknown[]>>>(
(result, item) => {
result.value.push(item.value);
result.byteLength += item.byteLength;
return result;
},
{
value: [],
byteLength: 0,
},
) as Read<T>;
}
if (type instanceof DataType) {
throw new TypeError(`unsupported data type`);
}
if (Array.isArray(type)) {
const result = [];
for (const item of type) {
if (!(item instanceof DataType)) {
throw new TypeError(`struct array contains items which are not an instance of DataType`);
}
// eslint-disable-next-line no-await-in-loop
result.push(await this.next(item));
}
return result as Read<T>;
}
const entries = [];
for (const [key, item] of Object.entries(type)) {
if (!(item instanceof DataType)) {
throw new TypeError(`struct object contains items which are not an instance of DataType`);
}
// eslint-disable-next-line no-await-in-loop
entries.push([key, await this.next(item)] as const);
}
return Object.fromEntries(entries) as Read<T>;
} catch (error) {
this.#offset = initialOffset;
throw error;
}
/* eslint-enable @typescript-eslint/consistent-type-assertions */
}
async close(): Promise<void> {
await this.#fileHandle.close();
}
}
|
<filename>trello/board.go
package trello
import "fmt"
type Board struct {
ID string `json:"id"`
Name string `json:"name"`
Desc string `json:"desc"`
DescData string `json:"descData"`
Closed bool `json:"closed"`
IDOrganization string `json:"idOrganization"`
IDEnterprise string `json:"idEnterprise"`
IDBoardSource string `json:"idBoardSource"`
PremiumFeatures []interface{} `json:"premiumFeatures"`
Pinned bool `json:"pinned"`
URL string `json:"url"`
ShortURL string `json:"shortUrl"`
Prefs struct {
PermissionLevel string `json:"permissionLevel"`
HideVotes bool `json:"hideVotes"`
Voting string `json:"voting"`
Comments string `json:"comments"`
SelfJoin bool `json:"selfJoin"`
CardCovers bool `json:"cardCovers"`
IsTemplate bool `json:"isTemplate"`
CardAging string `json:"cardAging"`
CalendarFeedEnabled bool `json:"calendarFeedEnabled"`
Background string `json:"background"`
BackgroundImage string `json:"backgroundImage"`
BackgroundImageScaled []struct {
Width int `json:"width"`
Height int `json:"height"`
URL string `json:"url"`
} `json:"backgroundImageScaled"`
} `json:"prefs"`
LabelNames struct {
Green string `json:"green"`
Yellow string `json:"yellow"`
Orange string `json:"orange"`
Red string `json:"red"`
Purple string `json:"purple"`
Blue string `json:"blue"`
Sky string `json:"sky"`
Lime string `json:"lime"`
Pink string `json:"pink"`
Black string `json:"black"`
} `json:"labelNames"`
Limits struct {
Attachments struct {
PerBoard struct {
Status string `json:"status"`
DisableAt int `json:"disableAt"`
WarnAt int `json:"warnAt"`
} `json:"perBoard"`
} `json:"attachments"`
} `json:"limits"`
Starred bool `json:"starred"`
Memberships []struct {
ID string `json:"id"`
IDMember string `json:"idMember"`
MemberType string `json:"memberType"`
Unconfirmed bool `json:"unconfirmed"`
Deactivated bool `json:"deactivated"`
} `json:"memberships"`
ShortLink string `json:"shortLink"`
Subscribed bool `json:"subscribed"`
PowerUps []interface{} `json:"powerUps"`
DateLastActivity string `json:"dateLastActivity"`
DateLastView string `json:"dateLastView"`
IDTags []interface{} `json:"idTags"`
DatePluginDisable string `json:"datePluginDisable"`
CreationMethod string `json:"creationMethod"`
IxUpdate int `json:"ixUpdate"`
TemplateGallery string `json:"templateGallery"`
EnterpriseOwned bool `json:"enterpriseOwned"`
}
type List struct {
ID string `json:"id"`
Name string `json:"name"`
IDBoard string `json:"idBoard"`
Closed bool `json:"closed"`
Subscribed bool `json:"subscribed"`
Pos int `json:"pos"`
SoftLimit interface{} `json:"softLimit"`
}
func (t *Trello) Boards() ([]*Board, error) {
url := BoardsEndpoint()
var boards []*Board
if err := t.handler.getJSON(url, &boards); err != nil {
return nil, err
}
return boards, nil
}
func (t *Trello) SearchBoardByName(boardName string) (*Board, error) {
boards, err := t.Boards()
if err != nil {
return nil, err
}
for _, b := range boards {
if b.Name == boardName {
return b, nil
}
}
return nil, fmt.Errorf("Can't find board %s", boardName)
}
func (t *Trello) SearchBoardByID(boardID string) (*Board, error) {
url := BoardEndpoint(boardID)
var board *Board
if err := t.handler.getJSON(url, &board); err != nil {
return nil, err
}
return board, nil
}
func (t *Trello) BoardLists(boardID string) ([]*List, error) {
url := BoardListsEndpoint(boardID)
var lists []*List
if err := t.handler.getJSON(url, &lists); err != nil {
return nil, err
}
return lists, nil
}
type ErrListNotFound string
func (e ErrListNotFound) Error() string {
return fmt.Sprintf("Cannot find list %s", string(e))
}
func (t *Trello) SearchListByName(boardID, listName string) (*List, error) {
lists, err := t.BoardLists(boardID)
if err != nil {
return nil, err
}
for _, l := range lists {
if l.Name == listName {
return l, nil
}
}
return nil, ErrListNotFound(listName)
}
func (t *Trello) CreateList(boardID, listName string) (*List, error) {
url := CreateListEndpoint(boardID)
var list *List
args := map[string]string{
"name": listName,
}
if err := t.handler.postJSON(url, args, &list); err != nil {
return nil, err
}
return list, nil
}
|
//给你一个按递增顺序排序的数组 arr 和一个整数 k 。数组 arr 由 1 和若干 素数 组成,且其中所有整数互不相同。
//
// 对于每对满足 0 < i < j < arr.length 的 i 和 j ,可以得到分数 arr[i] / arr[j] 。
//
// 那么第 k 个最小的分数是多少呢? 以长度为 2 的整数数组返回你的答案, 这里 answer[0] == arr[i] 且 answer[1] ==
//arr[j] 。
//
//
// 示例 1:
//
//
//输入:arr = [1,2,3,5], k = 3
//输出:[2,5]
//解释:已构造好的分数,排序后如下所示:
//1/5, 1/3, 2/5, 1/2, 3/5, 2/3
//很明显第三个最小的分数是 2/5
//
//
// 示例 2:
//
//
//输入:arr = [1,7], k = 1
//输出:[1,7]
//
//
//
//
// 提示:
//
//
// 2 <= arr.length <= 1000
// 1 <= arr[i] <= 3 * 10⁴
// arr[0] == 1
// arr[i] 是一个 素数 ,i > 0
// arr 中的所有数字 互不相同 ,且按 严格递增 排序
// 1 <= k <= arr.length * (arr.length - 1) / 2
//
// Related Topics 数组 二分查找 堆(优先队列) 👍 170 👎 0
package algorithm_700
import (
"reflect"
"testing"
)
func Test_kthSmallestPrimeFraction(t *testing.T) {
type args struct {
arr []int
k int
}
tests := []struct {
name string
args args
want []int
}{
{"t1", args{[]int{1, 2, 3, 5}, 3}, []int{2, 5}},
{"t2", args{[]int{1, 7}, 1}, []int{1, 7}},
{"t3", args{[]int{1, 2, 3, 5, 11, 13, 19}, 4}, []int{2, 19}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := kthSmallestPrimeFraction(tt.args.arr, tt.args.k); !reflect.DeepEqual(got, tt.want) {
t.Errorf("kthSmallestPrimeFraction() = %v, want %v", got, tt.want)
}
})
}
}
|
Test case:
Input: array = [1, -2, 3, 4, -5, 6]
Expected output: 8
The expected output can be verified by running the algorithm on the given input. The max subarray for the given input will be [3, 4, -5, 6] which has the sum 8. Therefore, the expected output is 8. |
package com.lambdaschool.shoppingcart.services;
import com.lambdaschool.shoppingcart.ShoppingCartTestApplication;
import com.lambdaschool.shoppingcart.models.*;
import com.lambdaschool.shoppingcart.repository.CartItemRepository;
import com.lambdaschool.shoppingcart.repository.ProductRepository;
import com.lambdaschool.shoppingcart.repository.UserRepository;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
import static junit.framework.Assert.assertNull;
import static junit.framework.TestCase.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**************************************************************************************************
********************* REMEMBER TO TURN OFF SEEDDATA (both in test and main) *********************
**************************************************************************************************/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = ShoppingCartTestApplication.class)
public class CartItemServiceImplNoDBTest
{
private List<User> userList = new ArrayList<>();
private List<Product> prodList = new ArrayList<>();
@Autowired
CartItemService cartItemService;
@MockBean
private UserRepository userrepos;
@MockBean
private ProductRepository prodrepos;
@MockBean
private CartItemRepository cartitemrepos;
@Before
public void setUp() throws Exception
{
Role r1 = new Role("admin");
r1.setRoleid(1);
Role r2 = new Role("user");
r2.setRoleid(2);
User u1 = new User("barnbarn",
"LambdaLlama",
"<EMAIL>",
"added via seed data");
u1.getRoles()
.add(new UserRoles(u1,
r1));
u1.getRoles()
.add(new UserRoles(u1,
r2));
u1.setUserid(1);
User u2 = new User("cinnamon",
"LambdaLlama",
"<EMAIL>",
"added via seed data");
u2.getRoles()
.add(new UserRoles(u2,
r2));
u2.setUserid(2);
User u3 = new User("stumps",
"LambdaLlama",
"<EMAIL>",
"added via seed data");
u3.getRoles()
.add(new UserRoles(u3,
r2));
u3.setUserid(3);
// Adding Products
Product p1 = new Product();
p1.setProductid(1);
p1.setName("PEN");
p1.setDescription("MAKES WORDS");
p1.setPrice(2.50);
p1.setComments("added via seed data");
Product p2 = new Product();
p1.setProductid(2);
p2.setName("PENCIL");
p2.setDescription("DOES MATH");
p2.setPrice(1.50);
p2.setComments("added via seed data");
Product p3 = new Product();
p1.setProductid(3);
p3.setName("COFFEE");
p3.setDescription("EVERYONE NEEDS COFFEE");
p3.setPrice(4.00);
p3.setComments("added via seed data");
prodList.add(p1);
prodList.add(p2);
prodList.add(p3);
// Creating Carts
CartItem cart1 = new CartItem();
cart1.setUser(u1);
cart1.setProduct(p1);
cart1.setComments("added via seed data");
cart1.setQuantity(4);
u1.getCarts()
.add(cart1);
CartItem cart2 = new CartItem();
cart2.setUser(u1);
cart2.setProduct(p2);
cart2.setComments("added via seed data");
cart2.setQuantity(3);
u1.getCarts()
.add(cart2);
CartItem cart3 = new CartItem();
cart3.setUser(u1);
cart3.setProduct(p3);
cart3.setComments("added via seed data");
cart3.setQuantity(2);
u1.getCarts()
.add(cart3);
CartItem cart4 = new CartItem();
cart4.setUser(u2);
cart4.setProduct(p3);
cart4.setComments("added via seed data");
cart4.setQuantity(1);
u2.getCarts()
.add(cart4);
CartItem cart5 = new CartItem();
cart5.setUser(u3);
cart5.setProduct(p3);
cart5.setComments("added via seed data");
cart5.setQuantity(17);
u3.getCarts()
.add(cart5);
userList.add(u1);
userList.add(u2);
userList.add(u3);
MockitoAnnotations.initMocks(this);
}
@After
public void tearDown() throws Exception
{
}
@Test
public void addToCart()
{
CartItemId cartItemId = new CartItemId(1, 1);
CartItem cart3 = new CartItem();
cart3.setUser(userList.get(0));
cart3.setProduct(prodList.get(0));
cart3.setComments("");
cart3.setQuantity(2);
Mockito.when(userrepos.findById(1L)).thenReturn(Optional.of(userList.get(0)));
Mockito.when(prodrepos.findById(1L)).thenReturn(Optional.of(prodList.get(0)));
Mockito.when(cartitemrepos.findById(any(CartItemId.class))).thenReturn(Optional.of(cart3));
Mockito.when(cartitemrepos.save(any(CartItem.class))).thenReturn(cart3);
assertEquals(3,
cartItemService.addToCart(1L,
1L,
"Hello")
.getQuantity());
}
@Test
public void removeFromCart()
{
CartItemId cartItemId = new CartItemId(1, 1);
CartItem cart3 = new CartItem();
cart3.setUser(userList.get(0));
cart3.setProduct(prodList.get(0));
cart3.setComments("");
cart3.setQuantity(3);
Mockito.when(userrepos.findById(1L)).thenReturn(Optional.of(userList.get(0)));
Mockito.when(prodrepos.findById(1L)).thenReturn(Optional.of(prodList.get(0)));
Mockito.when(cartitemrepos.findById(any(CartItemId.class))).thenReturn(Optional.of(cart3));
Mockito.when(cartitemrepos.save(any(CartItem.class))).thenReturn(cart3);
assertEquals(2,
cartItemService.removeFromCart(1L,
1L,
"Bye")
.getQuantity());
}
@Test
public void emptyFromCart()
{
CartItemId cartItemId = new CartItemId(1, 1);
CartItem cart3 = new CartItem();
cart3.setUser(userList.get(0));
cart3.setProduct(prodList.get(0));
cart3.setComments("");
cart3.setQuantity(1);
Mockito.when(userrepos.findById(1L)).thenReturn(Optional.of(userList.get(0)));
Mockito.when(prodrepos.findById(1L)).thenReturn(Optional.of(prodList.get(0)));
Mockito.when(cartitemrepos.findById(any(CartItemId.class))).thenReturn(Optional.of(cart3));
Mockito.when(cartitemrepos.save(any(CartItem.class))).thenReturn(null);
assertNull(cartItemService.removeFromCart(1L,
1L,
"Bye"));
}
} |
<filename>src/config/index.ts
import * as dotEnvSafe from 'dotenv-safe'
import * as path from 'path'
const envPath = `.env.${process.env.NODE_ENV}`
dotEnvSafe.config({
allowEmptyValues: true,
example: path.resolve(__dirname, '../../.env.example'),
path: path.resolve(process.cwd(), envPath),
})
interface IConfig {
readonly AUTH: {
readonly TOKEN_SECRET: string
readonly TOKEN_EXPIRATION_TIME: string
}
readonly DB: {
readonly AUDIT_SCHEMA: string
readonly HOST: string
readonly MAIN_SCHEMA: string
readonly NAME: string
readonly PASSWORD: string
readonly PORT: number
readonly USER: string
}
readonly LOGDNA: {
readonly KEY: string
readonly HOSTNAME: string
readonly APPNAME: string
}
readonly NODE_ENV: string
readonly SERVER_PORT: number
}
const {
AUTH_TOKEN_EXPIRATION_TIME,
AUTH_TOKEN_SECRET,
DB_HOST,
DB_AUDIT_SCHEMA,
DB_MAIN_SCHEMA,
DB_PASSWORD,
DB_PORT,
DB_NAME,
DB_USER,
LOGDNA_KEY,
LOGDNA_HOSTNAME,
LOGDNA_APPNAME,
NODE_ENV,
SERVER_PORT,
} = process.env
const config: IConfig = {
AUTH: {
TOKEN_EXPIRATION_TIME: AUTH_TOKEN_EXPIRATION_TIME,
TOKEN_SECRET: AUTH_TOKEN_SECRET,
},
DB: {
AUDIT_SCHEMA: DB_AUDIT_SCHEMA,
HOST: DB_HOST,
MAIN_SCHEMA: DB_MAIN_SCHEMA,
NAME: DB_NAME,
PASSWORD: <PASSWORD>,
PORT: parseInt(DB_PORT, 10),
USER: DB_USER,
},
LOGDNA: {
KEY: LOGDNA_KEY,
HOSTNAME: LOGDNA_HOSTNAME,
APPNAME: LOGDNA_APPNAME,
},
NODE_ENV,
SERVER_PORT: parseInt(SERVER_PORT, 10),
}
export default config
|
/*!
* Copyright (c) 2015-present, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*/
import { Client} from '@okta/okta-sdk-nodejs';
import { getConfig } from '../../util';
import deleteUser from './deleteUser';
export default async (username: string): Promise<void> => {
const config = getConfig();
const oktaClient = new Client({
orgUrl: config.orgUrl,
token: config.oktaAPIKey,
});
try {
const {value: user} = await oktaClient.listUsers({
q: username
}).next();
if (user) {
await deleteUser(user);
}
} catch (err) {
console.log(`An error occured during self-enrolled user cleanup: ${err}`);
}
};
|
import React, { useState, useEffect } from 'react';
import { FlatList, Text, View } from 'react-native';
const App = () => {
const [data, setData] = useState([]);
useEffect(() => {
const fetchData = async () => {
const response = await fetch('https://jsonplaceholder.typicode.com/users');
const users = await response.json();
setData(users);
};
fetchData();
}, []);
return (
<View>
<FlatList
data={data}
renderItem={({ item }) => <Text>{item.name}</Text>}
keyExtractor={item => item.id}
/>
</View>
);
};
export default App; |
<filename>LZUISDK/SDK/LSDeviceManagerFramework.framework/Headers/LSESportHRSectionCfg.h
//
// LSESportHRSectionCfg.h
// LSWearable
//
// Created by lifesense-mac on 17/3/7.
// Copyright © 2017年 lifesense. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface LSESportHRSectionCfg : NSObject
// 最小
@property (nonatomic, assign) NSUInteger min;
// 最大
@property (nonatomic, assign) NSUInteger max;
@end
|
#!/bin/bash
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
set -e
echo "Script that assembles all you need to make an RC."
echo "It generates source and binary tar in release directory"
echo "Presumes that you can sign a release as described at https://www.apache.org/dev/release-signing.html"
echo "Starting...";sleep 2s
# Set directory variables
DIR_ROOT="$(cd $(dirname $0);pwd)/.."
cd $DIR_ROOT
PHOENIX="$(xmllint --xpath "//*[local-name()='project']/*[local-name()='version']/text()" pom.xml)"
DIR_REL_BASE=$DIR_ROOT/release
DIR_REL_ROOT=$DIR_REL_BASE/apache-phoenix-$PHOENIX
DIR_REL_BIN=apache-phoenix-$PHOENIX-bin
DIR_REL_BIN_PATH=$DIR_REL_ROOT/$DIR_REL_BIN
REL_SRC=apache-phoenix-$PHOENIX-src
DIR_REL_SRC_TAR_PATH=$DIR_REL_ROOT/src
DIR_REL_BIN_TAR_PATH=$DIR_REL_ROOT/bin
DIR_BIN=$DIR_REL_BIN_PATH/bin
DIR_PHERF_CONF=phoenix-pherf/config
DIR_EXAMPLES=$DIR_REL_BIN_PATH/examples
DIR_DOCS=dev/release_files
# Verify no target exists
mvn clean; rm -rf $DIR_REL_BASE;
RESULT=$(find -iname target)
if [ -z "$RESULT" ]
then
echo "Verified target directory does not exist.";
else
echo "Target directory exists at: $RESULT. Please use a clean repo.";
exit -1;
fi
# Generate src tar
ln -s . $REL_SRC; tar cvzf $REL_SRC.tar.gz --exclude="$REL_SRC/$REL_SRC" $REL_SRC/*; rm $REL_SRC;
# Generate directory structure
mkdir $DIR_REL_BASE;
mkdir $DIR_REL_ROOT;
mkdir $DIR_REL_BIN_PATH;
mkdir $DIR_REL_BIN_TAR_PATH;
mkdir $DIR_REL_SRC_TAR_PATH;
mkdir $DIR_EXAMPLES;
mkdir $DIR_BIN;
# Move src tar
mv $REL_SRC.tar.gz $DIR_REL_SRC_TAR_PATH;
# Copy common jars
mvn clean apache-rat:check package -DskipTests -Dcheckstyle.skip=true -q;
rm -rf $(find . -type d -name archive-tmp);
# Copy all phoenix-*.jars to release dir
phx_jars=$(find -iname phoenix-*.jar)
cp $phx_jars $DIR_REL_BIN_PATH;
# Copy bin
cp bin/* $DIR_BIN;
cp -R $DIR_PHERF_CONF $DIR_BIN;
# Copy release docs
cp $DIR_DOCS/* $DIR_REL_BIN_PATH;
# Copy examples
cp -r examples/* $DIR_EXAMPLES
# Generate bin tar
tar cvzf $DIR_REL_BIN_TAR_PATH/$DIR_REL_BIN.tar.gz -C $DIR_REL_ROOT apache-phoenix-$PHOENIX-bin;
rm -rf $DIR_REL_BIN_PATH;
echo "DONE generating binary and source tars in release directory."
echo "Now signing source and binary tars"
# Sign
function_sign() {
phoenix_tar=$(find apache-phoenix-*.gz);
gpg --armor --output $phoenix_tar.asc --detach-sig $phoenix_tar;
md5sum -b $phoenix_tar > $phoenix_tar.md5;
sha512sum -b $phoenix_tar > $phoenix_tar.sha;
sha256sum -b $phoenix_tar >> $phoenix_tar.sha;
}
cd $DIR_REL_BIN_TAR_PATH; function_sign;
cd $DIR_REL_SRC_TAR_PATH; function_sign;
# Tag
read -p "Do you want add tag for this RC in GIT? (Y for yes or any other key to continue)" prompt
if [[ $prompt =~ [yY](es)* ]]
then
echo "Tagging..."
read -p "Enter tag (Example 5.0.0-rc0):" prompt
echo "Setting tag: $prompt";sleep 5s
git tag -a $prompt -m "$prompt"; git push origin $prompt
mv $DIR_REL_ROOT $DIR_REL_BASE/phoenix-$prompt
fi
echo "DONE."
echo "If all looks good in release directory then commit RC at https://dist.apache.org/repos/dist/dev/phoenix"
|
impl ApiHandle {
/// Retrieves a reference to the underlying XCB connection from the Api.
pub fn xcb_connection(&self) -> &xcb::Connection {
self.api.conn()
}
} |
package input
import (
"encoding/json"
"errors"
"fmt"
"sync/atomic"
"time"
"github.com/Jeffail/benthos/v3/lib/condition"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/x/docs"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeReadUntil] = TypeSpec{
constructor: NewReadUntil,
Summary: `
Reads messages from a child input until a consumed message passes a condition,
at which point the input closes.`,
Description: `
Messages are read continuously while the condition returns false, when the
condition returns true the message that triggered the condition is sent out and
the input is closed. Use this type to define inputs where the stream should end
once a certain message appears.
Sometimes inputs close themselves. For example, when the ` + "`file`" + ` input
type reaches the end of a file it will shut down. By default this type will also
shut down. If you wish for the input type to be restarted every time it shuts
down until the condition is met then set ` + "`restart_input` to `true`." + `
### Metadata
A metadata key ` + "`benthos_read_until` containing the value `final`" + ` is
added to the first part of the message that triggers the input to stop.`,
Footnotes: `
## Examples
This input is useful when paired with the
` + "[`count`](/docs/components/conditions/count)" + ` condition, as it can be
used to cut the input stream off once a certain number of messages have been
read:
` + "```yaml" + `
# Only read 100 messages, and then exit.
input:
read_until:
input:
kafka_balanced:
addresses: [ TODO ]
topics: [ foo, bar ]
consumer_group: foogroup
condition:
not:
count:
arg: 100
` + "```" + ``,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
condSanit, err := condition.SanitiseConfig(conf.ReadUntil.Condition)
if err != nil {
return nil, err
}
var inputSanit interface{} = struct{}{}
if conf.ReadUntil.Input != nil {
if inputSanit, err = SanitiseConfig(*conf.ReadUntil.Input); err != nil {
return nil, err
}
}
return map[string]interface{}{
"input": inputSanit,
"restart_input": conf.ReadUntil.Restart,
"condition": condSanit,
}, nil
},
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("input", "The child input to consume from."),
docs.FieldCommon("condition", "The [condition](/docs/components/conditions/about) to test messages against."),
docs.FieldCommon("restart_input", "Whether the input should be reopened if it closes itself before the condition has resolved to true."),
},
}
}
//------------------------------------------------------------------------------
// ReadUntilConfig contains configuration values for the ReadUntil input type.
type ReadUntilConfig struct {
Input *Config `json:"input" yaml:"input"`
Restart bool `json:"restart_input" yaml:"restart_input"`
Condition condition.Config `json:"condition" yaml:"condition"`
}
// NewReadUntilConfig creates a new ReadUntilConfig with default values.
func NewReadUntilConfig() ReadUntilConfig {
return ReadUntilConfig{
Input: nil,
Restart: false,
Condition: condition.NewConfig(),
}
}
//------------------------------------------------------------------------------
type dummyReadUntilConfig struct {
Input interface{} `json:"input" yaml:"input"`
Restart bool `json:"restart_input" yaml:"restart_input"`
Condition condition.Config `json:"condition" yaml:"condition"`
}
// MarshalJSON prints an empty object instead of nil.
func (r ReadUntilConfig) MarshalJSON() ([]byte, error) {
dummy := dummyReadUntilConfig{
Input: r.Input,
Restart: r.Restart,
Condition: r.Condition,
}
if r.Input == nil {
dummy.Input = struct{}{}
}
return json.Marshal(dummy)
}
// MarshalYAML prints an empty object instead of nil.
func (r ReadUntilConfig) MarshalYAML() (interface{}, error) {
dummy := dummyReadUntilConfig{
Input: r.Input,
Restart: r.Restart,
Condition: r.Condition,
}
if r.Input == nil {
dummy.Input = struct{}{}
}
return dummy, nil
}
//------------------------------------------------------------------------------
// ReadUntil is an input type that continuously reads another input type until a
// condition returns true on a message consumed.
type ReadUntil struct {
running int32
conf ReadUntilConfig
wrapped Type
cond condition.Type
wrapperMgr types.Manager
wrapperLog log.Modular
wrapperStats metrics.Type
stats metrics.Type
log log.Modular
transactions chan types.Transaction
closeChan chan struct{}
closedChan chan struct{}
}
// NewReadUntil creates a new ReadUntil input type.
func NewReadUntil(
conf Config,
mgr types.Manager,
log log.Modular,
stats metrics.Type,
) (Type, error) {
if conf.ReadUntil.Input == nil {
return nil, errors.New("cannot create read_until input without a child")
}
wrapped, err := New(
*conf.ReadUntil.Input, mgr, log, stats,
)
if err != nil {
return nil, fmt.Errorf("failed to create input '%v': %v", conf.ReadUntil.Input.Type, err)
}
var cond condition.Type
if cond, err = condition.New(
conf.ReadUntil.Condition, mgr,
log.NewModule(".read_until.condition"),
metrics.Namespaced(stats, "read_until.condition"),
); err != nil {
return nil, fmt.Errorf("failed to create condition '%v': %v", conf.ReadUntil.Condition.Type, err)
}
rdr := &ReadUntil{
running: 1,
conf: conf.ReadUntil,
wrapperLog: log,
wrapperStats: stats,
wrapperMgr: mgr,
log: log.NewModule(".read_until"),
stats: metrics.Namespaced(stats, "read_until"),
wrapped: wrapped,
cond: cond,
transactions: make(chan types.Transaction),
closeChan: make(chan struct{}),
closedChan: make(chan struct{}),
}
go rdr.loop()
return rdr, nil
}
//------------------------------------------------------------------------------
func (r *ReadUntil) loop() {
var (
mRunning = r.stats.GetGauge("running")
mRestartErr = r.stats.GetCounter("restart.error")
mRestartSucc = r.stats.GetCounter("restart.success")
mInputClosed = r.stats.GetCounter("input.closed")
mCount = r.stats.GetCounter("count")
mPropagated = r.stats.GetCounter("propagated")
mFinalPropagated = r.stats.GetCounter("final.propagated")
mFinalResSent = r.stats.GetCounter("final.response.sent")
mFinalResSucc = r.stats.GetCounter("final.response.success")
mFinalResErr = r.stats.GetCounter("final.response.error")
)
defer func() {
if r.wrapped != nil {
r.wrapped.CloseAsync()
err := r.wrapped.WaitForClose(time.Second)
for ; err != nil; err = r.wrapped.WaitForClose(time.Second) {
}
}
mRunning.Decr(1)
close(r.transactions)
close(r.closedChan)
}()
mRunning.Incr(1)
var open bool
runLoop:
for atomic.LoadInt32(&r.running) == 1 {
if r.wrapped == nil {
if r.conf.Restart {
var err error
if r.wrapped, err = New(
*r.conf.Input, r.wrapperMgr, r.wrapperLog, r.wrapperStats,
); err != nil {
mRestartErr.Incr(1)
r.log.Errorf("Failed to create input '%v': %v\n", r.conf.Input.Type, err)
return
}
mRestartSucc.Incr(1)
} else {
return
}
}
var tran types.Transaction
select {
case tran, open = <-r.wrapped.TransactionChan():
if !open {
mInputClosed.Incr(1)
r.wrapped = nil
continue runLoop
}
case <-r.closeChan:
return
}
mCount.Incr(1)
if !r.cond.Check(tran.Payload) {
select {
case r.transactions <- tran:
mPropagated.Incr(1)
case <-r.closeChan:
return
}
continue
}
tran.Payload.Get(0).Metadata().Set("benthos_read_until", "final")
// If this transaction succeeds we shut down.
tmpRes := make(chan types.Response)
select {
case r.transactions <- types.NewTransaction(tran.Payload, tmpRes):
mFinalPropagated.Incr(1)
case <-r.closeChan:
return
}
var res types.Response
select {
case res, open = <-tmpRes:
if !open {
return
}
streamEnds := res.Error() == nil
select {
case tran.ResponseChan <- res:
mFinalResSent.Incr(1)
case <-r.closeChan:
return
}
if streamEnds {
mFinalResSucc.Incr(1)
return
}
mFinalResErr.Incr(1)
case <-r.closeChan:
return
}
}
}
// TransactionChan returns a transactions channel for consuming messages from
// this input type.
func (r *ReadUntil) TransactionChan() <-chan types.Transaction {
return r.transactions
}
// Connected returns a boolean indicating whether this input is currently
// connected to its target.
func (r *ReadUntil) Connected() bool {
return r.wrapped.Connected()
}
// CloseAsync shuts down the ReadUntil input and stops processing requests.
func (r *ReadUntil) CloseAsync() {
if atomic.CompareAndSwapInt32(&r.running, 1, 0) {
close(r.closeChan)
}
}
// WaitForClose blocks until the ReadUntil input has closed down.
func (r *ReadUntil) WaitForClose(timeout time.Duration) error {
select {
case <-r.closedChan:
case <-time.After(timeout):
return types.ErrTimeout
}
return nil
}
//------------------------------------------------------------------------------
|
<reponame>kariminf/KSimpleNLG
/*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* The Original Code is "Simplenlg".
*
* The Initial Developer of the Original Code is <NAME>, <NAME> and <NAME>.
* Portions created by <NAME>, <NAME> and <NAME> are Copyright (C) 2010-11 The University of Aberdeen. All Rights Reserved.
*
* Contributor(s): <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
*/
package simplenlg.syntax;
import java.util.List;
import simplenlg.features.DiscourseFunction;
import simplenlg.features.Feature;
import simplenlg.features.InternalFeature;
import simplenlg.features.LexicalFeature;
import simplenlg.framework.InflectedWordElement;
import simplenlg.framework.LexicalCategory;
import simplenlg.framework.ListElement;
import simplenlg.framework.NLGElement;
import simplenlg.framework.PhraseElement;
import simplenlg.framework.WordElement;
import simplenlg.phrasespec.NPPhraseSpec;
/**
* Abstract class for the NounPhrase syntax helper.
* Based on English NounPhrase syntax helper.
*
* @author vaudrypl
*/
public abstract class AbstractNounPhraseHelper {
/**
* The main method for realising noun phrases.
*
* @param phrase
* the <code>PhraseElement</code> to be realised.
* @return the realised <code>NLGElement</code>.
*/
public ListElement realise(PhraseElement phrase) {
ListElement realisedElement = null;
if (phrase != null
&& !phrase.getFeatureAsBoolean(Feature.ELIDED)) {
// vaudrypl added phrase argument to ListElement constructor
// to copy all features from the PhraseElement
realisedElement = new ListElement(phrase);
// Creates the appropriate pronoun if the noun phrase
// is pronominal.
if (phrase.getFeatureAsBoolean(Feature.PRONOMINAL)) {
realisedElement.addComponent(createPronoun(phrase));
} else {
realiseSpecifier(phrase, realisedElement);
realisePreModifiers(phrase, realisedElement);
realiseHeadNoun(phrase, realisedElement);
phrase.getPhraseHelper().realiseList( realisedElement,
phrase.getFeatureAsElementList(InternalFeature.COMPLEMENTS ),
DiscourseFunction.COMPLEMENT);
phrase.getPhraseHelper().realiseList( realisedElement,
phrase.getPostModifiers(), DiscourseFunction.POST_MODIFIER );
}
}
return realisedElement;
}
/**
* Realises the head noun of the noun phrase.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param realisedElement
* the current realisation of the noun phrase.
*/
protected void realiseHeadNoun(PhraseElement phrase,
ListElement realisedElement) {
NLGElement headElement = phrase.getHead();
if (headElement != null) {
// vaudrypl changed : features are given to currentElement
// after realiseSyntax() instead of headElement before realiseSyntax()
NLGElement currentElement = headElement.realiseSyntax();
if (currentElement != null) {
currentElement.setFeature(LexicalFeature.GENDER, phrase
.getFeature(LexicalFeature.GENDER));
currentElement.setFeature(InternalFeature.ACRONYM, phrase
.getFeature(InternalFeature.ACRONYM));
currentElement.setFeature(Feature.NUMBER, phrase
.getFeature(Feature.NUMBER));
currentElement.setFeature(Feature.PERSON, phrase
.getFeature(Feature.PERSON));
currentElement.setFeature(Feature.POSSESSIVE, phrase
.getFeature(Feature.POSSESSIVE));
currentElement.setFeature(Feature.PASSIVE, phrase
.getFeature(Feature.PASSIVE));
currentElement.setFeature(InternalFeature.DISCOURSE_FUNCTION,
DiscourseFunction.SUBJECT);
realisedElement.addComponent(currentElement);
}
}
}
/**
* Realises the pre-modifiers of the noun phrase. Before being realised,
* pre-modifiers undergo some basic sorting based on adjective ordering.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param realisedElement
* the current realisation of the noun phrase.
*/
protected void realisePreModifiers(PhraseElement phrase,
ListElement realisedElement) {
List<NLGElement> preModifiers = phrase.getPreModifiers();
if (phrase.getFeatureAsBoolean(Feature.ADJECTIVE_ORDERING)
.booleanValue()) {
preModifiers = sortNPPreModifiers(preModifiers);
}
phrase.getPhraseHelper().realiseList(realisedElement, preModifiers,
DiscourseFunction.PRE_MODIFIER);
}
/**
* Realises the specifier of the noun phrase.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param realisedElement
* the current realisation of the noun phrase.
*/
protected void realiseSpecifier(PhraseElement phrase,
ListElement realisedElement) {
NLGElement specifierElement =
phrase.getFeatureAsElement(InternalFeature.SPECIFIER);
if (specifierElement != null
&& !phrase.getFeatureAsBoolean(InternalFeature.RAISED)) {
NLGElement currentElement = specifierElement.realiseSyntax();
// number feature given to currentElement instead of specifierElement
// by vaudrypl
if (!specifierElement.isA(LexicalCategory.PRONOUN)) {
currentElement.setFeature(Feature.NUMBER,
phrase.getFeature(Feature.NUMBER));
}
if (currentElement != null) {
currentElement.setFeature(InternalFeature.DISCOURSE_FUNCTION,
DiscourseFunction.SPECIFIER);
realisedElement.addComponent(currentElement);
}
}
}
/**
* Sorts the list of premodifiers for this noun phrase.
* The default implementation returns the argument unchanged.
* It should be kept this way if premodifiers sorting doesn't apply
* to a particular language, like French, but overridden if it does,
* like in English.
*
* @param originalModifiers
* the original listing of the premodifiers.
* @return the sorted <code>List</code> of premodifiers.
*
* @author vaudrypl
*/
protected List<NLGElement> sortNPPreModifiers(List<NLGElement> originalModifiers) {
return originalModifiers;
}
/**
* Retrieves the correct representation of the word from the element. This
* method will find the <code>WordElement</code>, if it exists, for the
* given phrase or inflected word.
*
* @param element
* the <code>NLGElement</code> from which the head is required.
* @return the <code>WordElement</code>
*/
protected WordElement getHeadWordElement(NLGElement element) {
WordElement head = null;
if (element instanceof WordElement)
head = (WordElement) element;
else if (element instanceof InflectedWordElement) {
head = (WordElement) element.getFeature(InternalFeature.BASE_WORD);
} else if (element instanceof PhraseElement) {
head = getHeadWordElement(((PhraseElement) element).getHead());
}
return head;
}
/**
* Creates the appropriate pronoun for the noun phrase.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @return the <code>NLGElement</code> representing the pronominal.
*
* @author vaudrypl
*/
abstract protected NLGElement createPronoun(PhraseElement phrase);
/**
* Add a modifier to a noun phrase. Use heuristics to decide where it goes.
*
* @param nounPhrase
* @param modifier
*
*/
abstract public void addModifier(NPPhraseSpec nounPhrase, Object modifier);
} |
def extract_words(string, words):
result = []
for word in words:
if string in word:
result.append(word)
return result
string = 'ing'
words = ["listening", "seeing", "singing"]
result = extract_words(string, words)
print(result) |
#!/bin/bash
function create_wallet {
wallet_name=$1
echo 0 | veronite-wallet-cli --testnet --trusted-daemon --daemon-address localhost:15248 --generate-new-wallet $wallet_name --password "" --restore-height=1
}
create_wallet wallet_01.bin
create_wallet wallet_02.bin
create_wallet wallet_03.bin
create_wallet wallet_04.bin
create_wallet wallet_05.bin
create_wallet wallet_06.bin
# create_wallet wallet_m
|
module.exports = function(grunt) {
var
path = require("path");
grunt.initConfig({
httpServer: {
wwwRoot: "app/www",
jsRoot: "app/www/js",
cssRoot: "app/www/css",
mediaRoot: "app/www/media",
imageRoot: "app/www/i",
libsRoot: "app/www/libs",
port: 8080,
callback: function() {
grunt.log.writeln("web server started on port " + this.port);
}
},
mongoServer: {
host: "localhost",
port: 27017,
dbName: "cms"
},
loggerConfig: {
transports: {
console: { level: "debug"},
file: { level: "debug"}
//mongodb: { level: "debug", db: "mongodb://localhost/logs"}
//papertrail: {host: "logs2.papertrailapp.com", port: "43476", level: "debug"}
}
},
handlebars: {
compile: {
options: {
namespace: "templates",
processName: function(filePath) {
return path.basename(filePath, ".min.hbs");
},
processPartialName: function(filePath) {
return path.basename(filePath, ".min.hbs");
}
},
files: {
"app/templates.js": "app/templates-min/**/*.min.hbs"
}
}
},
htmlmin: {
templates: {
options: {
removeComments: true,
collapseWhitespace: true
},
expand: true,
cwd: 'app/templates',
src: '*.hbs',
dest: 'app/templates-min/',
ext: ".min.hbs"
}
},
sass: {
main: {
options: {
sourcemap: "none"
},
files: {
"app/www/css/site.css": "app/sass/site.scss"
}
}
},
cssmin: {
main: {
options: {
keepSpecialComments: 0,
sourceMap: false
},
files: {
"app/www/css/site.min.css": [
'app/www/libs/bootstrap/dist/css/bootstrap.css',
'app/www/libs/bootstrap/dist/css/bootstrap-theme.css',
'app/www/css/site.css',
]
}
}
},
uglify: {
combine: {
options: {
compress: false,
beautify: {
beautify: true,
indent_level: 2,
comments: true
},
mangle: false,
},
files: {
"app/www/js/site.js": [
"app/www/libs/jquery/dist/jquery.js",
"app/www/libs/bootstrap/dist/js/bootstrap.js",
"app/www/libs/underscore/underscore.js",
"app/www/libs/backbone/backbone.js",
"app/www/libs/handlebars/handlebars.js",
"app/www/libs/localforage/dist/localforage.js",
'app/www/js/templates.js',
'app/www/js/models/pages_model.js',
'app/www/js/models/donations_model.js',
'app/www/js/views/**/*.js',
'app/www/js/routers/**/*.js',
'app/www/js/app.js'
]
}
}
/* minify: {
options: {
compress: {
drop_debugger: true,
unsafe: true,
drop_console: true
},
beautify: false,
mangle: {},
screwIE8: true
},
files: {
"app/www/js/site.min.js": "app/www/js/site.js"
}
} */
},
/* compress: {
js: {
options: {
mode: 'gzip'
},
files: {
"app/www/js/site.min.gz.js": "app/www/js/site.min.js"
}
},
css: {
options: {
mode: 'gzip'
},
files: {
"app/www/css/site.min.gz.css": "app/www/css/site.min.css"
}
}
}, */
watch: {
templates: {
files: ["app/templates/**/*.hbs"],
tasks: ["htmlmin", "handlebars"],
options: {
spawn: false
}
},
css: {
files: "app/sass/**/*.scss",
tasks: ["sass"]
//tasks: ["sass","cssmin","compress:css"]
},
js: {
files: ["app/www/js/**/*.js", "!app/www/js/*.min.js"],
//tasks: ["uglify","compress:js"]
}
}
});
grunt.loadNpmTasks("grunt-contrib-watch");
grunt.loadNpmTasks("grunt-contrib-handlebars");
grunt.loadNpmTasks("grunt-contrib-htmlmin");
grunt.loadNpmTasks("grunt-contrib-cssmin");
grunt.loadNpmTasks("grunt-contrib-sass");
grunt.loadNpmTasks("grunt-contrib-uglify");
// grunt.loadNpmTasks("grunt-contrib-compress");
grunt.registerTask("webServer", "Start web server", function() {
var
httpServer = require("./app/http-server"),
app = require("./app/app"),
config = {
webSockets: require("./app/web-sockets"),
httpServer: grunt.config("httpServer"),
mongoServer: grunt.config("mongoServer"),
loggerConfig: grunt.config("loggerConfig")
};
// not needed because we are running watch task
//this.async();
config.app = app(config);
httpServer(config);
});
//grunt.registerTask("default", ["webServer"]);
grunt.registerTask("default", ["sass","cssmin","htmlmin","handlebars","uglify","webServer","watch"]);
};
|
<gh_stars>0
import React from 'react';
import { render, within, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import '@testing-library/jest-dom';
import Add7Dice from "../Add7Dice";
describe('Add7Dice component', () => {
function mockGateway() {
return {
rollStats: () => {
return [[1], [2], [3], [4], [5], [6], [1]]
}
}
}
it('displays the expected initial state', () => {
render(<Add7Dice />);
expect(screen.getByRole('button', { name: /Roll Stats/ })).toBeInTheDocument();
expect(screen.getByText(/All stats start at 8, and 7 dice are added/)).toBeInTheDocument();
// noinspection DuplicatedCode
expect(screen.getByText(/Selected Stat: , Selected Roll:/)).toBeInTheDocument();
expect(screen.getByRole('button', { name: /STR/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /DEX/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /CON/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /INT/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /WIS/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /CHR/ })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /Assign/ })).toBeDisabled();
expect(screen.getByRole('button', { name: /Reset/ })).toBeDisabled();
expect(screen.getByRole('button', { name: /Save Stats/ })).toBeInTheDocument();
});
it('displays an alert if Save Stats is clicked before rolling stats', () => {
render(<Add7Dice />);
const alertMock = jest.spyOn(window, 'alert').mockImplementation();
userEvent.click(screen.getByRole('button', { name: /Save Stats/ }));
expect(alertMock).toHaveBeenCalledTimes(1);
expect(alertMock).toHaveBeenCalledWith('must roll stats and assign all to save');
});
it('displays an alert if Save Stats is clicked before assigning all rolls', async () => {
render(<Add7Dice gateway={ mockGateway() } />);
const alertMock = jest.spyOn(window, 'alert').mockImplementation();
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => expect(screen.getByText(/Selected Stat:/)).toBeInTheDocument());
userEvent.click(screen.getByRole('button', { name: /Save Stats/ }));
expect(alertMock).toHaveBeenCalledTimes(1);
expect(alertMock).toHaveBeenCalledWith('must assign all rolls to save');
});
it('displays the expected output after rolling stats', async () => {
render(<Add7Dice gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button')).toHaveLength(7));
expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })).toHaveLength(2);
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ })).toBeInTheDocument();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ })).toBeInTheDocument();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /4/ })).toBeInTheDocument();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ })).toBeInTheDocument();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ })).toBeInTheDocument();
});
it('allows selecting and deselecting rolls and stats', async () => {
render(<Add7Dice gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => expect(screen.getByText(/Selected Stat: , Selected Roll:/)).toBeInTheDocument());
userEvent.click(screen.getByRole('button', { name: /INT/ }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ }));
expect(screen.getByText(/Selected Stat: INT, Selected Roll: 2/)).toBeInTheDocument();
userEvent.click(screen.getByRole('button', { name: /INT/ }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ }));
expect(screen.getByText(/Selected Stat: , Selected Roll:/)).toBeInTheDocument()
});
it('enables Assign button after selecting a stat and roll', async () => {
render(<Add7Dice gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => expect(screen.getByRole('button', { name: /Assign/ })).toBeDisabled());
userEvent.click(screen.getByRole('button', { name: /WIS/ }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ }));
expect(screen.getByRole('button', { name: /Assign/ })).toBeEnabled();
});
it('enables Reset button and disables assigned roll button after assigning a roll to a stat', async () => {
render(<Add7Dice gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => userEvent.click(screen.getByRole('button', { name: 'CHR' })));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(screen.getByRole('button', { name: /Reset/ })).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ })).toBeDisabled();
});
it('prevents a stat from exceeding 18 when assigning a roll and deselects offending roll', async () => {
render(<Add7Dice selectedChar={ testChar } gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => expect(screen.getByText(/Selected Stat: , Selected Roll:/)).toBeInTheDocument());
preventStatFromExceeding18('STR');
preventStatFromExceeding18('DEX');
preventStatFromExceeding18('CON');
preventStatFromExceeding18('INT');
preventStatFromExceeding18('WIS');
preventStatFromExceeding18('CHR');
});
it('resets stat assignments as expected', async () => {
render(<Add7Dice selectedChar={ testChar } gateway={ mockGateway() } />);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => userEvent.click(screen.getByRole('button', { name: 'STR' })));
userEvent.click(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[0]);
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[0]).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'DEX' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ })).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'CON' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ })).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'INT' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /4/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /4/ })).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'WIS' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ })).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'CHR' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ })).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: 'STR' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[1]);
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[1]).toBeDisabled();
userEvent.click(screen.getByRole('button', { name: /Reset/ }));
expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[0]).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[1]).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ })).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ })).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /4/ })).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ })).toBeEnabled();
expect(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ })).toBeEnabled();
});
it('updates the character as expected when clicking Save Stats', async () => {
const updateFn = jest.fn();
render(<Add7Dice selectedChar={ testChar } gateway={ mockGateway() } onUpdate={ updateFn }/>);
userEvent.click(screen.getByRole('button', { name: /Roll Stats/ }));
await waitFor(() => userEvent.click(screen.getByRole('button', { name: 'CHR' })));
userEvent.click(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[0]);
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'DEX' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /2/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'CON' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /3/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'INT' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /4/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'WIS' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'CHR' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ }));
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: 'STR' }));
userEvent.click(within(screen.getByTestId('add7Rolls')).getAllByRole('button', { name: /1/ })[1]);
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
userEvent.click(screen.getByRole('button', { name: /Save Stats/ }));
expect(updateFn).toHaveBeenCalledTimes(1);
expect(updateFn).toHaveBeenCalledWith(updatedChar);
});
const preventStatFromExceeding18 = (stat) => {
const alertMock = jest.spyOn(window, 'alert').mockImplementation();
const selectStatOnly = new RegExp(`Selected Stat: ${stat}, Selected Roll:`);
const selectStatAnd6 = new RegExp(`Selected Stat: ${stat}, Selected Roll: 6`);
const selectStatAnd5 = new RegExp(`Selected Stat: ${stat}, Selected Roll: 5`);
userEvent.click(screen.getByRole('button', { name: `${stat}` }));
expect(screen.getByText(selectStatOnly)).toBeInTheDocument();
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /6/ }));
expect(screen.getByText(selectStatAnd6)).toBeInTheDocument();
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(screen.getByText(selectStatOnly)).toBeInTheDocument();
userEvent.click(within(screen.getByTestId('add7Rolls')).getByRole('button', { name: /5/ }));
expect(screen.getByText(selectStatAnd5)).toBeInTheDocument();
userEvent.click(screen.getByRole('button', { name: /Assign/ }));
expect(alertMock).toHaveBeenCalledTimes(1);
expect(alertMock).toHaveBeenCalledWith('Stat cannot exceed 18');
expect(screen.getByText(selectStatOnly)).toBeInTheDocument();
userEvent.click(screen.getByRole('button', { name: /Reset/ }));
alertMock.mockReset();
};
const testChar = {
id: 1,
name: '<NAME>',
completionStep: 1,
str: 8,
dex: 8,
con: 8,
int: 8,
wis: 8,
chr: 8
};
const updatedChar = {
id: 1,
name: '<NAME>',
completionStep: 2,
str: 9,
dex: 10,
con: 11,
int: 12,
wis: 13,
chr: 15
};
});
|
<filename>core/src/main/java/com/javatest/javassist/Point.java
package com.javatest.javassist;
public class Point {
}
|
#!/usr/bin/env bash
cd "$(dirname "$0")" || exit
# shellcheck disable=SC1091
source helper_scripts/local-helpers.sh
# Plain files
FILES_DIR=system_files
WSL_FILES_DIR=wsl
install -m 644 $FILES_DIR/zshrc.zsh ~/.zshrc
install -m 644 $FILES_DIR/bashrc.sh ~/.bashrc
install -m 644 $FILES_DIR/bash_profile.sh ~/.bash_profile
install -m 644 $FILES_DIR/bash_aliases.sh ~/.bash_aliases
install -m 644 $FILES_DIR/bash_functions.sh ~/.bash_functions
mkdir -p ~/.config
install -m 644 $FILES_DIR/starship.toml ~/.config/starship.toml
! $MAC || touch ~/.hushlogin
install -m 644 $FILES_DIR/dircolors.sh ~/.dircolors
install -m 644 $FILES_DIR/vimrc ~/.vimrc
mkdir -p ~/.config/nvim
install -m 644 $FILES_DIR/init.vim ~/.config/nvim/init.vim
install -m 644 $FILES_DIR/xinitrc ~/.xinitrc
install -m 644 $FILES_DIR/Xmodmap ~/.Xmodmap
install -m 644 $FILES_DIR/tmux.conf ~/.tmux.conf
install -m 644 $FILES_DIR/gerrit_functions.sh ~/.gerrit_functions.sh
install -m 644 $FILES_DIR/gitconfig ~/.gitconfig
install -m 644 $FILES_DIR/gitignore_global ~/.gitignore_global
install -m 644 $FILES_DIR/unibeautifyrc.json ~/.unibeautifyrc.json
# WSL files
if $WSL; then
install -m 644 $WSL_FILES_DIR/bashrc_wsl.sh ~/.bashrc_wsl
unix2dos -n $FILES_DIR/gitconfig_windows "$(wslpath 'C:/ProgramData/Git/config')" 2>/dev/null
unix2dos -n $FILES_DIR/gitignore_global "$(wslpath 'C:/ProgramData/Git/gitignore_global')" \
2>/dev/null
fi
# Submodules files
if has_arg "submodules" || [[ 0 == $(find submodules/ -type f | wc -l) ]]; then
git submodule init
git submodule update --init --force --remote
fi
mkdir -p ~/bin
printf 'y\ny\nn\n' | ./submodules/fzf/install &> /dev/null
install -m 755 submodules/diff-so-fancy/diff-so-fancy ~/bin/diff-so-fancy
cp -r submodules/diff-so-fancy/lib ~/bin/
install -m 755 submodules/git-log-compact/git-log-compact \
~/bin/git-log-compact
install -m 755 submodules/tldr/tldr ~/bin/tldr
if $DO_UPDATE; then
pushd submodules/autojump > /dev/null || true
./install.py > /dev/null
popd > /dev/null || true
fi
install -m 644 submodules/rails_completion/rails.bash ~/.rails.bash
install -m 644 submodules/forgit/forgit.plugin.sh ~/.forgit.plugin.sh
install -m 755 submodules/git-heatmap/git-heatmap ~/bin/
# VSCodium
if has_arg "codium" && [[ $(which codium) ]]; then
if $MAC; then
VSC_CONF_DIR=~/Library/Application\ Support/VSCodium/User
else
VSC_CONF_DIR=~/.config/VSCodium/User
fi
RUN_VSC=codium
function INST_FILE() { install -m 644 "$@"; }
function GET_FILE() { install -m 644 "$@"; }
if $WSL; then
VSC_CONF_DIR="/mnt/c/Users/$WIN_USER/AppData/Roaming/VSCodium/User"
RUN_VSC='run_cmd codium'
function INST_FILE() { unix2dos -n "$1" "$2" 2>/dev/null; }
function GET_FILE() { dos2unix -n "$1" "$2" 2>/dev/null; chmod 644 "$2"; }
fi
mkdir -p "$VSC_CONF_DIR"
for FILE in keybindings.json settings.json; do
if [[ ! -f "$VSC_CONF_DIR"/$FILE ]]; then
INST_FILE $FILES_DIR/VSCodium/$FILE "$VSC_CONF_DIR"/$FILE
elif [[ "" != "$(diff --strip-trailing-cr "$VSC_CONF_DIR"/$FILE \
$FILES_DIR/VSCodium/$FILE)" ]]
then
if [[ $(stat -c %Y "$VSC_CONF_DIR"/$FILE) < \
$(stat -c %Y system_files/VSCodium/$FILE) ]]
then
INST_FILE $FILES_DIR/VSCodium/$FILE "$VSC_CONF_DIR"/$FILE
else
GET_FILE "$VSC_CONF_DIR"/$FILE $FILES_DIR/VSCodium/$FILE
echo "Local VSCodium $FILE settings newer than tracked. Settings copied here."
fi
fi
done
if ! $WSL; then
EXTENSIONS=$($RUN_VSC --list-extensions | grep -v simple-vim | sed 's/\r//g')
if [[ "$EXTENSIONS" != "$(cat $FILES_DIR/VSCodium/extensions.txt)" ]]
then
echo "$EXTENSIONS" > $FILES_DIR/VSCodium/extensions.txt
echo "Local VSCodium extensions different than tracked. List updated here."
echo "Determine desired list of extensions and run"
echo " cat system_files/VSCodium/extensions.txt | "
echo " xargs -n 1 -I {} bash -c \"$RUN_VSC --install-extension \\\$1\" _ {}"
fi
fi
fi
|
def update_axis(axis_index, value, axis_dict):
if axis_index in axis_dict:
axis_dict[axis_index] = value
else:
print(f"Axis index {axis_index} does not exist in the axis dictionary")
def update_button(button_index, button_dict):
if button_index in button_dict:
button_dict[button_index] = not button_dict[button_index]
else:
print(f"Button index {button_index} does not exist in the button dictionary") |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-Linux
CND_CONF=Release
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/ue_18_statistik
OUTPUT_BASENAME=ue_18_statistik
PACKAGE_TOP_DIR=ue18statistik/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/ue18statistik/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/ue18statistik.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/ue18statistik.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
import os
import socket
class Config:
SERVICE_NAME = 'The_Manual'
RUN_SETTING = {
'threaded': True
}
MYSQL_SETTING = {
'database': SERVICE_NAME,
'user': os.getenv('MYSQL_USERNAME', 'root'),
'password': os.getenv('MYSQL_PW', '')
}
# -> 일반적으로 데이터베이스 인스턴스는 다른 데에 분리하는 방식으로 구성해서,
# ProductionConfig에서 원격(실제) 데이터베이스 설정을, DevConfig에서 로컬 데이터베이스 설정을 둬야 하지만
# 우리의 경우에는 서버가 돌아가는 인스턴스에 데이터베이스가 딸려 있으니, 로컬 MySQL에 연결하는 설정을 여기서 global하게 관리함
SECRET_KEY = os.getenv('SECRET_KEY', '85c145a16bd6f6e1f3e104ca78c6a102')
class DevConfig(Config):
HOST = 'localhost'
PORT = 5000
DEBUG = True
RUN_SETTING = dict(Config.RUN_SETTING, **{
'host': HOST,
'port': PORT,
'debug': DEBUG
})
class ProductionConfig(Config):
HOST = socket.gethostbyname(socket.gethostname())
PORT = 80
DEBUG = False
RUN_SETTING = dict(Config.RUN_SETTING, **{
'host': HOST,
'port': PORT,
'debug': DEBUG
})
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package controller;
import com.opencsv.bean.BeanVerifier;
import com.opencsv.bean.CsvToBean;
import com.opencsv.bean.CsvToBeanBuilder;
import com.opencsv.bean.HeaderColumnNameMappingStrategy;
import com.opencsv.exceptions.CsvConstraintViolationException;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import model.NodeBIp;
import readgxportcsvdb.ReadGxportCsvDB;
/**
*
* @author Miguelangel
*/
public class ReadNodeBIpCsv {
public static List<NodeBIp> getNodeBDstip(String _rnc,int node_id){
Path myPath = Paths.get(ReadGxportCsvDB.getDb_dir()+"/NODEBIP.csv");
List <NodeBIp> nodeB = new ArrayList<>();
try (BufferedReader br = Files.newBufferedReader(myPath,
StandardCharsets.UTF_8)) {
HeaderColumnNameMappingStrategy<NodeBIp> strategy
= new HeaderColumnNameMappingStrategy<>();
strategy.setType(NodeBIp.class);
BeanVerifier beanVerifier = (BeanVerifier) (Object t) -> {
NodeBIp node = (NodeBIp)t;
return (node.getFilename().contains(_rnc)&&
node.getNodebid() == node_id ); //To change body of generated lambdas, choose Tools | Templates.
};
CsvToBean csvToBean = new CsvToBeanBuilder(br)
.withType(NodeBIp.class)
.withMappingStrategy(strategy)
.withIgnoreLeadingWhiteSpace(true)
.withVerifier(beanVerifier)
.build();
nodeB.addAll(csvToBean.parse()) ;
}catch(IOException ex){
System.out.println("Error general "+ex.getMessage());
}
return nodeB;
}
}
|
package com.vmware.spring.workshop.dto.banking;
import javax.xml.bind.annotation.XmlRegistry;
/**
* @author lgoldstein
*/
@XmlRegistry
public final class ObjectFactory {
public ObjectFactory() {
super();
}
public BankDTO createBankDTO () {
return new BankDTO();
}
public BankDTOList createBankDTOList () {
return new BankDTOList();
}
public BranchDTO createBranchDTO () {
return new BranchDTO();
}
public BranchDTOList createBranchDTOList () {
return new BranchDTOList();
}
public AccountDTO createAccountDTO () {
return new AccountDTO();
}
}
|
package res
import "math"
type sdomUtil struct {
sdom []int
// reverse preorder of the nodes
preOrdered []int
}
func swap(c []int, i, j int) {
t := c[i]
c[i] = c[j]
c[j] = t
}
// partition the array to find the correct position of c[i]
func partition(c []int, b []int, s, e int) int {
if e - s == 1 {
if c[s] > c[e] {
}
}
t := c[s]
j := s
k := e+1
for j < k && (j <= e && k >= s) {
for {
j++
if j > e || c[j] <= t {
break
}
}
for {
k--
if k < s || c[k] >= t {
break
}
}
if j < k {
swap(c, j, k)
swap(b, j, k)
}
}
swap(c, k, s)
swap(b, k, s)
return k
}
func qsort(a []int, b []int, s, e int) {
if s < e {
p := partition(a, b, s, e)
qsort(a, b, s, p-1)
qsort(a, b, p+1, e)
}
}
func sort(a []int, b []int) {
c := make([]int, len(b))
copy(c, b)
qsort(a, c, 0, len(b)-1)
}
func (s *sdomUtil) init(g Graph, d *dfsUtil) {
s.preOrdered = make([]int, len(d.dfnum))
s.sdom = make([]int, len(d.dfnum))
for i := range g {
s.preOrdered[i] = i
s.sdom[i] = -1
}
s.sdom[0] = 0
sort(s.preOrdered, d.dfnum)
}
func (s *sdomUtil) evalNode(g Graph, d *dfsUtil, r int) int {
min := math.MaxInt32
if r == 0 {
return 0
}
if len(g[r].Preds) == 1 {
s.sdom[r] = g[r].Preds[0]
return s.sdom[r]
}
for _, p := range g[r].Preds {
if d.dfnum[p] > d.dfnum[r] {
if s.sdom[p] == -1 {
s.sdom[p] = s.evalNode(g, d, p)
}
if min > s.sdom[p] {
min = s.sdom[p]
}
continue
} else if p == 0 {
// if root is predecessor, root is sdom
min = 0
break
} else if min > p {
min = p
}
}
s.sdom[r] = min
return min
}
func (s *sdomUtil) computeSdom(g Graph, d *dfsUtil) {
for i := range s.preOrdered {
n := s.preOrdered[i]
if s.sdom[n] == -1 {
s.evalNode(g, d, n)
}
}
}
func sdom(g Graph, d *dfsUtil) []int {
s := sdomUtil{}
s.init(g, d)
s.computeSdom(g, d)
return s.sdom
}
|
<filename>Tweets_Display_Filter-app/TweetsDisplay/app/src/main/java/com/example/ee5453/tweetsdisplay/MainActivity.java
package com.example.ee5453.tweetsdisplay;
import android.annotation.SuppressLint;
import android.content.ContentResolver;
import android.content.Intent;
import android.database.ContentObserver;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.ListView;
import android.widget.SimpleCursorAdapter;
public class MainActivity extends ActionBarActivity {
Cursor cursor;
ListView listView;
SimpleCursorAdapter simpleCursorAdapter;
String FROM[]={"user_name","status_text"};
int TO[]={android.R.id.text1,android.R.id.text2};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
listView=(ListView)findViewById(R.id.listView);
ContentResolver resolver=getContentResolver();
cursor=resolver.query(Uri.parse("content://com.ee5453.mytwitter"),
null,null,null,"created_at"+" DESC");
ContentObserver observer = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
super.onChange(selfChange);
Cursor tweets;
tweets=getContentResolver().query(Uri.parse("content://com.ee5453.mytwitter"),
null,null,null,"created_at"+" DESC");
simpleCursorAdapter.changeCursor(tweets);
}
@SuppressLint("NewApi")
@Override
public void onChange(boolean selfChange, Uri uri) {
super.onChange(selfChange, uri);
Cursor tweets;
tweets=getContentResolver().query(Uri.parse("content://com.ee5453.mytwitter"),
null,null,null,"created_at"+" DESC");
simpleCursorAdapter.changeCursor(tweets);
}
};
resolver.registerContentObserver(Uri.parse("content://com.ee5453.mytwitter"),true,observer);
simpleCursorAdapter=new SimpleCursorAdapter(getApplicationContext(),
android.R.layout.two_line_list_item,cursor,FROM,TO);
listView.setAdapter(simpleCursorAdapter);}
/*ContentResolver resolver=getContentResolver();
cursor=resolver.query(ContactsContract.Contacts.CONTENT_URI,null,null,null,null);
while(cursor.moveToNext()){
cursor.getString()
}*/
public void filtercriteria(View view){
Intent intent= new Intent(this,FilterCriteria.class );
Button button = (Button) findViewById(R.id.FilterButton);
startActivity(intent);
}
/* ContentResolver resolver = getApplicationContext().getContentResolver();
cursor=resolver.query(Uri.parse("content://com.ee5453.mytwitter"),
null,null,null,"created_at"+" DESC");
while(cursor.moveToNext()){
String name=cursor.getString(cursor.getColumnIndex("user_name"));
String status=cursor.getString(cursor.getColumnIndex("status_text"));
tv.append(String.format("%s\n%s\n",name,status));
}*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
#!/usr/bin/env expect
spawn openssl req -nodes -x509 -newkey rsa:4096 -keyout crypt/key.pem -out crypt/cert.pem
expect -re "Country Name .*:"
send -- "GB\n"
expect -re "State or Province Name .*:"
send -- "West Midlands\n"
expect -re "Locality Name .*:"
send -- "Birmingham\n"
expect -re "Organization Name .*:"
send -- "Banana Town\n"
expect -re "Organizational Unit Name .*:"
send -- "41464e4f4d7b314d5f6333727431463134384c595f38346e344e34737d\n"
expect -re "Common Name .*:"
send -- "wt.ctf\n"
expect -re "Email Address .*:"
send -- "monkey@wt.ctf\n"
interact |
<filename>client/test/domain-settings.test.js<gh_stars>0
// Copyright (c) 2017-2022 Uber Technologies Inc.
//
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
describe('Domain Settings', () => {
async function domainConfigTest(mochaTest, desc) {
const [testEl, scenario] = new Scenario(mochaTest)
.withDomain('ci-test')
.startingAt('/domains/ci-test/settings')
.withFeatureFlags()
.withEmptyNewsFeed()
.withDomainDescription('ci-test', desc)
.go();
const configEl = await testEl.waitUntilExists('section.domain-settings');
return [configEl, scenario];
}
it('should show properties in a readable form from the domain description API', async function test() {
const [configEl] = await domainConfigTest(this.test);
await configEl.waitUntilExists('dl.details dt');
configEl.should.have.descendant('header h3').with.text('ci-test');
configEl
.textNodes('dl.details dt')
.should.deep.equal([
'description',
'owner',
'Global?',
'Retention Period',
'Emit Metrics',
'History Archival',
'Visibility Archival',
'Failover Version',
'clusters',
]);
configEl
.textNodes('dl.details dd')
.should.deep.equal([
'A cool domain',
'<EMAIL>',
'No',
'21 days',
'Yes',
'Enabled',
'Disabled',
'0',
'ci-test-cluster (active)',
]);
});
});
|
<reponame>dfuentes/collectord
package main
import (
"log"
)
type Event struct {
Headers map[string]string
Body []byte
}
type Channel interface {
// Supporting Sources
AddEvent(Event) error
AddEvents([]Event) error
// Supporting Sinks
GetOldest(int) (int, []Event, error)
GetAll() (int, []Event, error)
ConfirmGet(int) error
Start() error
ReloadConfig(config ComponentSettings) bool
}
type Sink interface {
SetChannel(Channel) error
Start() error
ReloadConfig(config ComponentSettings) bool
}
type Source interface {
SetChannel(Channel) error
Start() error
ReloadConfig(config ComponentSettings) bool
}
func NewEvent() Event {
return Event{make(map[string]string), make([]byte, 0)}
}
// Global source registry
var registeredSources map[string]func(ComponentSettings) Source = make(map[string]func(ComponentSettings) Source)
func RegisterSource(name string, constructor func(ComponentSettings) Source) {
registeredSources[name] = constructor
}
func NewSource(name string, config ComponentSettings) Source {
constructor, ok := registeredSources[name]
if !ok {
log.Fatalf("No source registered for name [%s]", name)
}
return constructor(config)
}
// Global channel registry
var registeredChannels map[string]func(ComponentSettings) Channel = make(map[string]func(ComponentSettings) Channel)
func RegisterChannel(name string, constructor func(ComponentSettings) Channel) {
registeredChannels[name] = constructor
}
func NewChannel(name string, config ComponentSettings) Channel {
constructor, ok := registeredChannels[name]
if !ok {
log.Fatalf("No channel registered for name [%s]", name)
}
return constructor(config)
}
// Global sink registry
var registeredSinks map[string]func(ComponentSettings) Sink = make(map[string]func(ComponentSettings) Sink)
func RegisterSink(name string, constructor func(ComponentSettings) Sink) {
registeredSinks[name] = constructor
}
func NewSink(name string, config ComponentSettings) Sink {
constructor, ok := registeredSinks[name]
if !ok {
log.Fatalf("No sink registered for name [%s]", name)
}
return constructor(config)
}
|
#!/usr/bin/env zsh
cargo build --release
strip target/x86_64-unknown-linux-musl/release/amdfand
upx --best --lzma target/x86_64-unknown-linux-musl/release/amdfand
|
#!/bin/bash
source /data/$USER/conda/etc/profile.d/conda.sh
conda activate py37
module load CUDA/11.0
module load cuDNN/8.0.3/CUDA-11.0
module load gcc/8.3.0
# ! set up path, resolution etc...
outputdir=/data/duongdb/FH_OCT_08172021
resolution=256
# ! tf dataset
imagedata=$outputdir/Classify/Tf256RmFold3+EyePos+FH
# ! resume ?
resume=ffhq$resolution
# ! train
cd /data/duongdb/stylegan2-ada-EyeOct
python3 train_with_labels.py --data=$imagedata --gpus=2 --target=0.8 --aug=ada --outdir=$outputdir/Stylegan2 --resume=$resume --cfg=paper$resolution --snap=10 --oversample_prob=0 --mix_labels=0 --metrics=fid350_full --kimg 3000 --split_label_emb_at 4
|
You could use a for loop with a range and step parameter to print out every other element in a list. For example, for a list lst of length n:
for i in range(0, n, 2):
print(lst[i]) |
#! /bin/bash
if [ $# -eq 0 ]
then
echo "You must specify the pen to create"
exit 1
fi
# Change to the folder this script is in
cd $(dirname $0)
diskFile=$1.img
diskDir=$1
touch $diskFile
mkdir $diskDir
truncate -s 1G $diskFile
mke2fs -t ext4 -F $diskFile
mount $diskFile $diskDir
cp -r ./template/* ./$diskDir/
#change ownership to current user so it can access them
echo "Changing ownership: chown -R $SUDO_USER:$SUDO_USER ./$diskDir";
chown -R $SUDO_USER:$SUDO_USER ./$diskDir
# docker run -d -it --name=$diskDir --rm -v ${PWD}/$diskDir:/golem/work golem-slate bash -C ./init.sh |
#! /usr/bin/bash env
cd ../../
PYTHONPATH=. python data_tools/build_file_list.py hmdb51 data/hmdb51/rawframes/ --level 2 --format rawframes
echo "Filelist for rawframes generated."
PYTHONPATH=. python data_tools/build_file_list.py hmdb51 data/hmdb51/videos/ --level 2 --format videos
echo "Filelist for videos generated."
cd data_tools/hmdb51/ |
/**
* Copyright 2012 <NAME>. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. <NAME> licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.afspq.model;
import java.io.File;
import java.util.ArrayList;
import com.bah.bahdit.main.plugins.fulltextindex.data.SearchResults;
import com.bah.bahdit.main.search.Search;
public class ImageResults {
private Object query;
private String resultStr;
private long timeElapsed;
private int numberOfResults = 0;
public ImageResults() {
resultStr = "";
}
public String getQuery(){
if (query instanceof String) {
String q = (String)query;
return q.substring(q.lastIndexOf("/") + 1);
}
else if (query instanceof File) {
File f = (File)query;
String fs = f.getAbsolutePath();
return fs.substring(fs.lastIndexOf("/") + 1);
}
else
return "";
}
public long getTimeElapsed() {
return timeElapsed;
}
public void setTimeElapsed(long timeElapsed) {
this.timeElapsed = timeElapsed;
}
public String getResult() {
return resultStr;
}
public int getNumResults() {
return numberOfResults;
}
public ImageResults getResults(Object query, Search search, int similar, boolean dragAndDrop) {
// Rank , URL, title
if(query instanceof String){
query = ((String)query).toLowerCase();
}
this.query = query;
SearchResults searchResults = (similar == 1) ? search.search(query, 1, 0) : search.search(query, 0, 0);
ArrayList<String> results = searchResults.getResults();
timeElapsed = searchResults.getTime();
numberOfResults = searchResults.getNumResults();
if (results == null || results.isEmpty()) {
resultStr = resultStr.concat(
"No results found for <b>" + this.getQuery() + "</b>.<br><br>" +
"Suggestions: <br>" +
"<ul>" +
"<li>Make sure all words are spelled correctly.</li>" +
"<li>Try different keywords.</li>" +
"<li>Try more general keywords.</li>" +
"<li>Try fewer keywords.</li>" +
"</ul>"
);
} else {
if (!dragAndDrop && similar == 0) {
resultStr = resultStr.concat(
"<a href='ProcessQuery?imgQuery=" + this.getQuery() +
"&searchType=image&similar=1'>Search similar images</a><br>"
);
}
resultStr = resultStr.concat(
"Showing " + numberOfResults +
" results for <b>" + this.getQuery() +
"</b>.<br><br>"
);
for (String result: results) {
String[] tmp = result.split("\\[ \\]");
String location = tmp[0];
String url = tmp[1];
resultStr += "<a href='" + url +
"' target='blank' style='padding:10px;'><img src='" +
location + "' style='max-height:100px;max-width:100px;' title='" + url + "'/><a>";
}
}
return this;
}
}
|
#!/bin/sh
set -eu
BASEDIR=`dirname $0`/..
if [ "$(git symbolic-ref --short HEAD)" != "master" ]; then
echo "branch is not master"
exit 1
fi
git diff --quiet || (echo "diff exists"; exit 1)
VERSION=$(gobump show -r ${BASEDIR}/version)
echo "current version: ${VERSION}"
read -p "input next version: " NEXT_VERSION
gobump set ${NEXT_VERSION} -w ${BASEDIR}/version
git-chglog --next-tag v${NEXT_VERSION} -o ${BASEDIR}/CHANGELOG.md
GO_VERSION=$(go version | perl -waln -e 'print $F[2]')
perl -pi -e "s/go_version='.+'/go_version='${GO_VERSION}'/" README.md
perl -pi -e "s/@v${VERSION}/@v${NEXT_VERSION}/" README.md
read -p "release v${NEXT_VERSION}? (y/N): " yn
case "$yn" in
[yY]*) ;;
*) echo abort; exit 1;;
esac
git commit -am "release v${NEXT_VERSION}"
git tag v${NEXT_VERSION}
git push && git push --tags
|
package main
import "testing"
import "time"
func TestDurationUnmarshalJSON(t *testing.T) {
var d Duration
var err error
err = d.UnmarshalJSON([]byte(`"24h"`))
if err != nil {
t.Error(`"24h" should not produce error.`)
}
if d != Duration(24*time.Hour) {
t.Error(`Cannot convert "24h" to Duration...`)
}
err = d.UnmarshalJSON([]byte(`"1"`))
if err == nil {
t.Error("Should not be able to convert 1 to Duration...")
}
}
|
<filename>pooch/processors.py
# pylint: disable=line-too-long
"""
Post-processing hooks
"""
import os
import bz2
import gzip
import lzma
import shutil
from zipfile import ZipFile
from tarfile import TarFile
from .utils import get_logger
class ExtractorProcessor: # pylint: disable=too-few-public-methods
"""
Base class for extractions from compressed archives.
Subclasses can be used with :meth:`pooch.Pooch.fetch` and
:func:`pooch.retrieve` to unzip a downloaded data file into a folder in the
local data store. :meth:`~pooch.Pooch.fetch` will return a list with the
names of the extracted files instead of the archive.
Parameters
----------
members : list or None
If None, will unpack all files in the archive. Otherwise, *members*
must be a list of file names to unpack from the archive. Only these
files will be unpacked.
"""
# String appended to unpacked archive. To be implemented in subclass
suffix = None
def __init__(self, members=None):
self.members = members
def __call__(self, fname, action, pooch):
"""
Extract all files from the given archive.
Parameters
----------
fname : str
Full path of the zipped file in local storage.
action : str
Indicates what action was taken by :meth:`pooch.Pooch.fetch` or
:func:`pooch.retrieve`:
* ``"download"``: File didn't exist locally and was downloaded
* ``"update"``: Local file was outdated and was re-download
* ``"fetch"``: File exists and is updated so it wasn't downloaded
pooch : :class:`pooch.Pooch`
The instance of :class:`pooch.Pooch` that is calling this.
Returns
-------
fnames : list of str
A list of the full path to all files in the extracted archive.
"""
if self.suffix is None:
raise NotImplementedError(
"Derived classes must define the 'suffix' attribute."
)
extract_dir = fname + self.suffix
if action in ("update", "download") or not os.path.exists(extract_dir):
# Make sure that the folder with the extracted files exists
if not os.path.exists(extract_dir):
os.makedirs(extract_dir)
self._extract_file(fname, extract_dir)
# Get a list of all file names (including subdirectories) in our folder
# of unzipped files.
fnames = [
os.path.join(path, fname)
for path, _, files in os.walk(extract_dir)
for fname in files
]
return fnames
def _extract_file(self, fname, extract_dir):
"""
This method receives an argument for the archive to extract and the
destination path. MUST BE IMPLEMENTED BY CHILD CLASSES.
"""
raise NotImplementedError
class Unzip(ExtractorProcessor): # pylint: disable=too-few-public-methods
"""
Processor that unpacks a zip archive and returns a list of all files.
Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to unzip a
downloaded data file into a folder in the local data store. The
method/function will return a list with the names of the unzipped files
instead of the zip archive.
The output folder is ``{fname}.unzip``.
Parameters
----------
members : list or None
If None, will unpack all files in the zip archive. Otherwise, *members*
must be a list of file names to unpack from the archive. Only these
files will be unpacked.
"""
suffix = ".unzip"
def _extract_file(self, fname, extract_dir):
"""
This method receives an argument for the archive to extract and the
destination path.
"""
with ZipFile(fname, "r") as zip_file:
if self.members is None:
get_logger().info(
"Unzipping contents of '%s' to '%s'", fname, extract_dir
)
# Unpack all files from the archive into our new folder
zip_file.extractall(path=extract_dir)
else:
for member in self.members:
get_logger().info(
"Extracting '%s' from '%s' to '%s'", member, fname, extract_dir
)
# Extract the data file from within the archive
with zip_file.open(member) as data_file:
# Save it to our desired file name
with open(os.path.join(extract_dir, member), "wb") as output:
output.write(data_file.read())
class Untar(ExtractorProcessor): # pylint: disable=too-few-public-methods
"""
Processor that unpacks a tar archive and returns a list of all files.
Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to untar a
downloaded data file into a folder in the local data store. The
method/function will return a list with the names of the extracted files
instead of the archive.
The output folder is ``{fname}.untar``.
Parameters
----------
members : list or None
If None, will unpack all files in the archive. Otherwise, *members*
must be a list of file names to unpack from the archive. Only these
files will be unpacked.
"""
suffix = ".untar"
def _extract_file(self, fname, extract_dir):
"""
This method receives an argument for the archive to extract and the
destination path.
"""
with TarFile.open(fname, "r") as tar_file:
if self.members is None:
get_logger().info(
"Untarring contents of '%s' to '%s'", fname, extract_dir
)
# Unpack all files from the archive into our new folder
tar_file.extractall(path=extract_dir)
else:
for member in self.members:
get_logger().info(
"Extracting '%s' from '%s' to '%s'", member, fname, extract_dir
)
# Extract the data file from within the archive
# Python 2.7: extractfile doesn't return a context manager
data_file = tar_file.extractfile(member)
try:
# Save it to our desired file name
with open(os.path.join(extract_dir, member), "wb") as output:
output.write(data_file.read())
finally:
data_file.close()
class Decompress: # pylint: disable=too-few-public-methods
"""
Processor that decompress a file and returns the decompressed version.
Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to decompress
a downloaded data file so that it can be easily opened. Useful for data
files that take a long time to decompress (exchanging disk space for
speed).
Supported decompression methods are LZMA (``.xz``), bzip2 (``.bz2``), and
gzip (``.gz``).
File names with the standard extensions (see above) can use
``method="auto"`` to automatically determine the compression method. This
can be overwritten by setting the *method* argument.
.. note::
To unpack zip and tar archives with one or more files, use
:class:`pooch.Unzip` and :class:`pooch.Untar` instead.
The output file is ``{fname}.decomp`` by default but it can be changed by
setting the ``name`` parameter.
.. warning::
Passing in ``name`` can cause existing data to be lost! For example, if
a file already exists with the specified name it will be overwritten
with the new decompressed file content. **Use this option with
caution.**
Parameters
----------
method : str
Name of the compression method. Can be "auto", "lzma", "xz", "bzip2",
or "gzip".
name : None or str
Defines the decompressed file name. The file name will be
``{fname}.decomp`` if ``None`` (default) or the given name otherwise.
Note that the name should **not** include the full (or relative) path,
it should be just the file name itself.
"""
modules = {"auto": None, "lzma": lzma, "xz": lzma, "gzip": gzip, "bzip2": bz2}
extensions = {".xz": "lzma", ".gz": "gzip", ".bz2": "bzip2"}
def __init__(self, method="auto", name=None):
self.method = method
self.name = name
def __call__(self, fname, action, pooch):
"""
Decompress the given file.
The output file will be either ``{fname}.decomp`` or the given *name*
class attribute.
Parameters
----------
fname : str
Full path of the compressed file in local storage.
action : str
Indicates what action was taken by :meth:`pooch.Pooch.fetch` or
:func:`pooch.retrieve`:
- ``"download"``: File didn't exist locally and was downloaded
- ``"update"``: Local file was outdated and was re-download
- ``"fetch"``: File exists and is updated so it wasn't downloaded
pooch : :class:`pooch.Pooch`
The instance of :class:`pooch.Pooch` that is calling this.
Returns
-------
fname : str
The full path to the decompressed file.
"""
if self.name is None:
decompressed = fname + ".decomp"
else:
decompressed = os.path.join(os.path.dirname(fname), self.name)
if action in ("update", "download") or not os.path.exists(decompressed):
get_logger().info(
"Decompressing '%s' to '%s' using method '%s'.",
fname,
decompressed,
self.method,
)
module = self._compression_module(fname)
with open(decompressed, "w+b") as output:
with module.open(fname) as compressed:
shutil.copyfileobj(compressed, output)
return decompressed
def _compression_module(self, fname):
"""
Get the Python module compatible with fname and the chosen method.
If the *method* attribute is "auto", will select a method based on the
extension. If no recognized extension is in the file name, will raise a
ValueError.
"""
error_archives = "To unpack zip/tar archives, use pooch.Unzip/Untar instead."
if self.method not in self.modules:
message = (
f"Invalid compression method '{self.method}'. "
f"Must be one of '{list(self.modules.keys())}'."
)
if self.method in {"zip", "tar"}:
message = " ".join([message, error_archives])
raise ValueError(message)
if self.method == "auto":
ext = os.path.splitext(fname)[-1]
if ext not in self.extensions:
message = (
f"Unrecognized file extension '{ext}'. "
f"Must be one of '{list(self.extensions.keys())}'."
)
if ext in {".zip", ".tar"}:
message = " ".join([message, error_archives])
raise ValueError(message)
return self.modules[self.extensions[ext]]
return self.modules[self.method]
|
<filename>randomizer/fates/model/processors/prep/FatesVerifier.java
package randomizer.fates.model.processors.prep;
import randomizer.common.enums.ChapterType;
import randomizer.common.structures.Chapter;
import randomizer.fates.singletons.FatesChapters;
import randomizer.fates.singletons.FatesFiles;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
public class FatesVerifier {
private static final String JOIN_PATH = "/castle/castle_join.bin.lz";
private static final String GAMEDATA_PATH = "/GameData/GameData.bin.lz";
private static final String BEV_PATH = "/Scripts/bev";
private static final String DISPOS_PATH = "/GameData/Dispos";
private static final String BIRTHRIGHT_DISPOS_PATH = "/GameData/Dispos/A";
private static final String CONQUEST_DISPOS_PATH = "/GameData/Dispos/B";
private static final String REVELATION_DISPOS_PATH = "/GameData/Dispos/C";
private static final String PERSON_PATH = "/GameData/Person";
private static final String BIRTHRIGHT_PERSON_PATH = "/GameData/Person/A";
private static final String CONQUEST_PERSON_PATH = "/GameData/Person/B";
private static final String REVELATION_PERSON_PATH = "/GameData/Person/C";
private static final String BIRTHRIGHT_SCRIPTS_PATH = "/Scripts/A";
private static final String CONQUEST_SCRIPTS_PATH = "/Scripts/B";
private static final String REVELATION_SCRIPTS_PATH = "/Scripts/C";
/**
* Verify that the given ROM directory contains every file necessary to
* perform randomization.
*
* @param dir The ROM directory.
* @param region The region layout that the ROM directory uses.
* @return A boolean value indicating whether or not the ROM verified successfully.
*/
public static boolean verify(File dir, String region) {
if(dir == null)
throw new IllegalArgumentException("Violation of precondidition: " +
"verify. dir must not be null.");
if(region == null)
throw new IllegalArgumentException("Violation of precondidition: " +
"verify. region must not be null.");
// Verification trackers.
List<File> verified = new ArrayList<>();
List<File> failures = new ArrayList<>();
List<File> routeFailures = new ArrayList<>();
// Path-specific files.
List<File> birthright = new ArrayList<>();
List<File> conquest = new ArrayList<>();
List<File> revelation = new ArrayList<>();
// Route flags.
boolean birthrightFlag = false;
boolean conquestFlag = false;
boolean revelationFlag = false;
// Define text locations by region.
File mainText;
File birthrightText;
File conquestText;
File revelationText;
switch(region) {
case "North America":
mainText = new File(dir.getAbsolutePath() + "/m/@E");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@E");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@E");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@E");
break;
case "Japan":
mainText = new File(dir.getAbsolutePath() + "/m");
birthrightText = new File(dir.getAbsolutePath() + "/m/A");
conquestText = new File(dir.getAbsolutePath() + "/m/B");
revelationText = new File(dir.getAbsolutePath() + "/m/C");
break;
case "Europe - English":
mainText = new File(dir.getAbsolutePath() + "/m/@U");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@U");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@U");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@U");
break;
case "Europe - Spanish":
mainText = new File(dir.getAbsolutePath() + "/m/@S");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@S");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@S");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@S");
break;
case "Europe - German":
mainText = new File(dir.getAbsolutePath() + "/m/@G");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@G");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@G");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@G");
break;
case "Europe - Italian":
mainText = new File(dir.getAbsolutePath() + "/m/@I");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@I");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@I");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@I");
break;
case "Europe - French":
mainText = new File(dir.getAbsolutePath() + "/m/@F");
birthrightText = new File(dir.getAbsolutePath() + "/m/A/@F");
conquestText = new File(dir.getAbsolutePath() + "/m/B/@F");
revelationText = new File(dir.getAbsolutePath() + "/m/C/@F");
break;
default:
throw new IllegalArgumentException("Error : verify. Illegal region specified.");
}
// Verify that chapter-independent files exist.
File file = new File(dir.getAbsolutePath() + GAMEDATA_PATH);
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(dir.getAbsolutePath() + JOIN_PATH);
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(dir.getAbsolutePath() + BEV_PATH);
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(mainText, "GMap.bin.lz");
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(mainText, "GameData.bin.lz");
if(file.exists()) {
NameMatcher.matchNames(file); // Get names from IDs.
}
else
failures.add(file);
// Check chapter specific files.
for(Chapter c : FatesChapters.getInstance().getChapters()) {
if(c.getType() == ChapterType.AllRoutes || c.getType() == ChapterType.Child
|| c.getType() == ChapterType.Amiibo) { // Chapters available on all routes.
file = new File(mainText, c.getCid() + ".bin.lz");
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(dir.getAbsolutePath() + DISPOS_PATH, c.getCid() + ".bin.lz");
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(dir.getAbsolutePath() + PERSON_PATH, c.getCid() + ".bin.lz");
if(file.exists())
verified.add(file);
else
failures.add(file);
file = new File(dir.getAbsolutePath() + "/Scripts", c.getCid() + ".cmb");
if(file.exists())
verified.add(file);
else
failures.add(file);
}
else if(c.getType() == ChapterType.Birthright) {
file = new File(birthrightText, c.getCid() + ".bin.lz");
if(file.exists())
birthright.add(file);
else {
birthrightFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + BIRTHRIGHT_DISPOS_PATH, c.getCid() + ".bin.lz");
if(file.exists())
birthright.add(file);
else {
birthrightFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + BIRTHRIGHT_PERSON_PATH, c.getCid() + ".bin.lz");
if(file.exists())
birthright.add(file);
else {
birthrightFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + BIRTHRIGHT_SCRIPTS_PATH, c.getCid() + ".cmb");
if(file.exists())
birthright.add(file);
else {
birthrightFlag = true;
routeFailures.add(file);
}
}
else if(c.getType() == ChapterType.Conquest) {
file = new File(conquestText, c.getCid() + ".bin.lz");
if(file.exists())
conquest.add(file);
else {
conquestFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + CONQUEST_DISPOS_PATH, c.getCid() + ".bin.lz");
if(file.exists())
conquest.add(file);
else {
conquestFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + CONQUEST_PERSON_PATH, c.getCid() + ".bin.lz");
if(file.exists())
conquest.add(file);
else {
conquestFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + CONQUEST_SCRIPTS_PATH, c.getCid() + ".cmb");
if(file.exists())
conquest.add(file);
else {
conquestFlag = true;
routeFailures.add(file);
}
}
else if(c.getType() == ChapterType.Revelation) {
file = new File(revelationText, c.getCid() + ".bin.lz");
if(file.exists())
revelation.add(file);
else {
revelationFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + REVELATION_DISPOS_PATH, c.getCid() + ".bin.lz");
if(file.exists())
revelation.add(file);
else {
revelationFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + REVELATION_PERSON_PATH, c.getCid() + ".bin.lz");
if(file.exists())
revelation.add(file);
else {
revelationFlag = true;
routeFailures.add(file);
}
file = new File(dir.getAbsolutePath() + REVELATION_SCRIPTS_PATH, c.getCid() + ".cmb");
if(file.exists())
revelation.add(file);
else {
revelationFlag = true;
routeFailures.add(file);
}
}
file = new File(dir.getAbsolutePath() + "/Scripts", c.getCid() + "_Terrain.cmb");
if(file.exists())
verified.add(file);
}
// Run checks based off of failures and successes.
if(birthrightFlag && conquestFlag && revelationFlag) { // No route verified completely.
failures.addAll(routeFailures);
}
if(failures.size() > 0) {
outputErrorLog(failures);
return false;
}
if(!birthrightFlag) {
verified.addAll(birthright);
FatesFiles.getInstance().setBirthrightVerified(true);
}
if(!conquestFlag) {
verified.addAll(conquest);
FatesFiles.getInstance().setConquestVerified(true);
}
if(!revelationFlag) {
verified.addAll(revelation);
FatesFiles.getInstance().setRevelationVerified(true);
}
for(File f : routeFailures)
System.out.println(f.getName());
FatesFiles.getInstance().setOriginalFileList(verified);
FatesFiles.getInstance().setRom(dir);
return true;
}
private static void outputErrorLog(List<File> failures) {
if(failures == null)
throw new IllegalArgumentException("Violation of precondidition: " +
"outputErrorLog. failures must not be null.");
List<String> out = new ArrayList<>();
out.add("The following files failed to verify: ");
for(File f : failures)
out.add(f.getAbsolutePath());
try {
Files.write(Paths.get(System.getProperty("user.dir") + "/VerificationFailures.txt"), out);
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
<gh_stars>1-10
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/google_client/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["juandebravo"]
gem.email = ["<EMAIL>"]
gem.summary = %q{Ease way to get access to Google API.}
gem.description = %q{This gem is a wrapper on top of the Google API that allows a developer to handle calendars, events, contacts on behalf of the user.}
gem.homepage = "http://www.github.com/juandebravo/google_client"
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
gem.files = `git ls-files`.split("\n")
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
gem.name = "google_client"
gem.require_paths = ["lib"]
gem.version = GoogleClient::VERSION
gem.add_dependency("rest-client")
gem.add_dependency("addressable")
gem.add_development_dependency("rake")
gem.add_development_dependency("rspec")
gem.add_development_dependency("webmock")
end
|
insert into README values
('memory', 'procedure', 'mem', 'call mem', 'get schema +memory usage'),
('process', 'procedure', 'ps', 'call ps', 'show current sql process'),
('process', 'procedure', 'freq', 'call freq(tablename,colname)', 'show frequency of a term');
drop procedure if exists mem;
delimiter //
create procedure mem()
begin
select
table_schema,
ENGINE,
TABLE_NAME,
TABLE_ROWS,
concat( round( TABLE_ROWS / ( 1000 *1000 ) , 2 ) , '' ) million,
concat( round( data_length / ( 1024 *1024 ) , 2 ) , 'M' ) data_MB,
concat( round( index_length / ( 1024 *1024 ) , 2 ) , 'M' ) index_MB,
TABLE_COLLATION
from
information_schema.TABLES
where
TABLE_SCHEMA = DATABASE()
order by
table_schema, engine, table_name;
end//
delimiter ;
-- alias for "call mem"
drop procedure if exists info;
delimiter //
create procedure info()
begin
call mem;
end//
delimiter ;
drop procedure if exists ps;
delimiter //
create procedure ps()
begin
select * from information_schema.processlist
where
information_schema.processlist.DB=DATABASE() and
information_schema.processlist.INFO not like '%information_schema.processlist%';
end//
delimiter ;
drop procedure if exists freq;
delimiter //
create procedure freq( tablename varchar(100), colname varchar(100))
begin
select concat('select ', colname, ',' ,'count(*) as cnt', ' from ', tablename, ' group by ' , colname, ' order by cnt desc') into @sql_cnt;
select @sql_cnt;
prepare stmt from @sql_cnt; execute stmt;
end//
delimiter ;
drop procedure if exists freqdist;
delimiter //
create procedure freqdist( tablename varchar(100), colname varchar(100), coldistinct varchar(100))
begin
select concat('select ', colname, ',' ,'count(distinct ', coldistinct,' ) as cnt', ' from ', tablename, ' group by ' , colname, ' order by cnt desc') into @sql_cnt;
select @sql_cnt;
prepare stmt from @sql_cnt; execute stmt;
end//
delimiter ;
drop procedure if exists create_index;
delimiter //
create procedure create_index( tablename varchar(100), indexcols varchar(100) )
begin
call log( concat(tablename,':', indexcols), 'index begin');
select concat('alter table ', tablename, ' add index (', indexcols, ')') into @idx;
prepare stmt from @idx; execute stmt;
select concat('show index from ', tablename) into @show;
prepare stmt from @show; execute stmt;
call log( concat(tablename,':', indexcols), 'index done');
end//
delimiter ;
drop procedure if exists utf8_unicode;
delimiter //
create procedure utf8_unicode( tablename varchar(100))
begin
select concat('alter table ', tablename, ' convert to CHARSET utf8 collate utf8_unicode_ci') into @idx;
prepare stmt from @idx; execute stmt;
call log(tablename, 'utf8_unicode_ci');
end//
delimiter ;
drop procedure if exists less20;
delimiter //
create procedure less20(tablename varchar(100))
begin
select concat('select * from ', tablename, ' limit 20') into @stmt;
prepare stmt from @stmt;
execute stmt;
end//
delimiter ;
|
import { NestFactory } from '@nestjs/core';
import { ValidationPipe } from '@nestjs/common';
import { AppModule } from './app.module';
import { EntityNotFoundInterceptor } from './interceptors/entity-not-found.interceptor';
import { UnauthorizedInterceptor } from './interceptors/unauthorized.interceptor';
import { DatabaseInterceptor } from './interceptors/database.interceptor';
import { ConflictInterceptor } from './interceptors/conflict.interceptor';
import { Transport } from '@nestjs/microservices';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
// Pipes
app.useGlobalPipes(
new ValidationPipe({
transform: true,
whitelist: true,
forbidNonWhitelisted: true,
}),
);
// Interceptor
app.useGlobalInterceptors(new ConflictInterceptor());
app.useGlobalInterceptors(new EntityNotFoundInterceptor());
app.useGlobalInterceptors(new UnauthorizedInterceptor());
app.useGlobalInterceptors(new DatabaseInterceptor());
app.connectMicroservice({
transport: Transport.KAFKA,
options: {
client: {
brokers: ['kafka:9092'],
},
},
});
await app.startAllMicroservices();
await app.listen(4000);
}
bootstrap();
|
/*-
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _ARM_STM_STM32F4_USART_H_
#define _ARM_STM_STM32F4_USART_H_
#define USART_SR 0x00 /* Status register */
#define USART_SR_RXNE (1 << 5) /* Read data register not empty */
#define USART_SR_TXE (1 << 7) /* Transmit data register empty */
#define USART_DR 0x04 /* Data register */
#define USART_BRR 0x08 /* Baud rate register */
#define USART_CR1 0x0C /* Control register 1 */
#define USART_CR1_UE (1 << 13) /* USART enable */
#define USART_CR1_RXNEIE (1 << 5) /* RXNE interrupt enable */
#define USART_CR1_TE (1 << 3) /* Transmitter enable */
#define USART_CR1_RE (1 << 2) /* Receiver enable */
#define USART_CR2 0x10 /* Control register 2 */
#define USART_CR3 0x14 /* Control register 3 */
#define USART_GTPR 0x18 /* Guard time and prescaler register */
struct stm32f4_usart_softc {
uint32_t base;
};
int stm32f4_usart_init(struct stm32f4_usart_softc *sc, uint32_t base,
uint32_t cpu_freq, uint32_t baud_rate);
void stm32f4_usart_putc(struct stm32f4_usart_softc *sc, char c);
#endif /* !_ARM_STM_STM32F4_USART_H_ */
|
<gh_stars>1000+
/*jslint nomen: true */
var helper = require('../helper'),
code,
verifier;
module.exports = {
"with a function expression that uses strict": {
setUp: function (cb) {
code = [
'(function () {',
' "use strict";',
' var x = Object.freeze({ foo: 1 });',
' try {',
' x.foo = 2;',
' output = "fail";',
' } catch (ex) {',
' output = "pass";',
' }',
'}());'
];
verifier = helper.verifier(__filename, code);
cb();
},
"should cover one statement less": function (test) {
verifier.verify(test, [], "pass", {
statements: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 0, 6: 1 },
lines: { 1: 1, 3: 1, 4: 1, 5: 1, 6: 0, 8: 1 },
branches: {},
functions: { 1: 1}
});
test.done();
}
},
"with a function declaration that uses strict": {
setUp: function (cb) {
code = [
'function foo() {',
' "use strict";',
' var x = Object.freeze({ foo: 1 });',
' try {',
' x.foo = 2;',
' output = "fail";',
' } catch (ex) {',
' output = "pass";',
' }',
'}',
'foo();'
];
verifier = helper.verifier(__filename, code);
cb();
},
"should cover one statement less": function (test) {
verifier.verify(test, [], "pass", {
statements: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 0, 6: 1, 7: 1 },
lines: { 1: 1, 3: 1, 4: 1, 5: 1, 6: 0, 8: 1, 11: 1 },
branches: {},
functions: { 1: 1}
});
test.done();
}
},
"with a function declaration that looks like strict but is not": {
setUp: function (cb) {
code = [
'function foo() {',
' 1;',
' "use strict";',
' var x = Object.freeze({ foo: 1 });',
' try {',
' x.foo = 2;',
' output = "fail";',
' } catch (ex) {',
' output = "pass";',
' }',
'}',
'foo();'
];
verifier = helper.verifier(__filename, code);
cb();
},
"should cover all statements as usual": function (test) {
verifier.verify(test, [], "fail", {
statements: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 0, 9: 1 },
lines: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 9: 0, 12: 1 },
branches: {},
functions: { 1: 1}
});
test.done();
}
},
"with a file-level strict declaration": {
setUp: function (cb) {
code = [
' "use strict";',
' var x = Object.freeze({ foo: 1 });',
' try {',
' x.foo = 2;',
' output = "fail";',
' } catch (ex) {',
' output = "pass";',
' }'
];
verifier = helper.verifier(__filename, code);
cb();
},
"should not change behavor (this is a bug!)": function (test) {
verifier.verify(test, [], "fail", {
statements: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 0 },
lines: { 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 7: 0 },
branches: {},
functions: {}
});
test.done();
}
}
};
|
use chrono::{DateTime, Utc};
use serde::Deserialize;
#[derive(Deserialize)]
struct DocumentUri;
#[derive(Deserialize)]
struct PersonUri;
#[derive(Deserialize)]
struct GroupTypeUri;
#[derive(Deserialize)]
struct GroupStateUri;
#[derive(Deserialize)]
fn deserialize_time<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: serde::Deserializer<'de>,
{
// Custom deserialization logic for time
}
struct Group {
description: String,
charter: DocumentUri,
ad: Option<PersonUri>,
time: DateTime<Utc>,
group_type: GroupTypeUri,
comments: String,
parent: GroupUri,
state: GroupStateUri,
}
impl Group {
fn validate_state(&self) -> bool {
match &self.group_type {
GroupTypeUri::Public => self.state == GroupStateUri::Active,
GroupTypeUri::Private => {
self.state == GroupStateUri::Active || self.state == GroupStateUri::Inactive
}
GroupTypeUri::Secret => self.state == GroupStateUri::Inactive,
}
}
fn display_group_info(&self) {
println!("Description: {}", self.description);
// Print other group information
}
}
fn main() {
// Create and use Group instances
} |
#!/bin/bash -x
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Java version
java -version
# Only run integration tests if needed
if [ "$RUN_INTEGRATION_TESTS" != false ]; then
mvn test -B -P github-actions,integration-tests-only && exit 0 || exit 1
else
mvn test -B -P github-actions,no-integration-tests && exit 0 || exit 1
fi
|
import java.util.Stack;
public class Solution {
public boolean isValid(String s){
char[] sArray = s.toCharArray();
Stack<Character> parenthesesStack = new Stack<Character>();
for (char c : sArray){
if (c == '[') parenthesesStack.push(']');
else if (c == '(') parenthesesStack.push(')');
else if (c == '{') parenthesesStack.push('}');
else if (parenthesesStack.isEmpty() || parenthesesStack.pop() != c) return false;
}
return parenthesesStack.isEmpty();
}
} |
#!/store/1-stage1/protobusybox/bin/ash
#> FETCH 820d9724f020a3e69cb337893a0b63c2db161dadcb0e06fc11dc29eb1e84a32c
#> FROM https://ftp.gnu.org/gnu/binutils/binutils-2.37.tar.xz
set -uex
export PATH='/store/1-stage1/protobusybox/bin'
export PATH="$PATH:/store/1-stage1/tinycc/wrappers"
export PATH="$PATH:/store/2a0-static-gnumake/bin"
mkdir -p /tmp/2a1-static-binutils; cd /tmp/2a1-static-binutils
if [ -e /ccache/setup ]; then . /ccache/setup; fi
echo "### $0: unpacking binutils sources..."
tar --strip-components=1 -xf /downloads/binutils-2.37.tar.xz
echo "### $0: building static binutils..."
sed -i 's|/bin/sh|/store/1-stage1/protobusybox/bin/ash|' \
missing install-sh mkinstalldirs
export lt_cv_sys_max_cmd_len=32768
# see libtool's 74c8993c178a1386ea5e2363a01d919738402f30
sed -i 's/| \$NL2SP/| sort | $NL2SP/' ltmain.sh
ash configure \
CONFIG_SHELL=/store/1-stage1/protobusybox/bin/ash \
SHELL=/store/1-stage1/protobusybox/bin/ash \
CFLAGS='-D__LITTLE_ENDIAN__=1' \
--enable-deterministic-archives \
--host x86_64-linux --build x86_64-linux \
--prefix=/store/2a1-static-binutils
make -j $NPROC
echo "### $0: installing static binutils..."
make -j $NPROC install
|
import java.util.ArrayList;
public class Player {
String name;
String level;
ArrayList<Integer> times = new ArrayList<Integer>();
boolean sorted = false;
public void setName(String name)
{
this.name = name;
}
public void setLevel(String level)
{
this.level = level;
}
public void setTimes(ArrayList<Integer> times)
{
this.times = times;
}
public void setSorted(boolean val)
{
sorted = val;
}
public boolean isSorted()
{
return this.sorted;
}
@Override
public String toString() {
return name;
}
}
|
<gh_stars>0
#include <stdio.h>
int main(void)
{
int i=2;
int x =3;
while(1){
x = x+x;
break;
}
for(i = i+i; i<x; x++){
i = i+i;
}
printf("%d %d\n", x, i);
return 0;
}
|
#!/bin/sh
docker run -it --rm --label=jekyll -p 127.0.0.1:4000:4000 --volume=$(pwd):/srv/jekyll jekyll/jekyll:pages jekyll serve --watch --drafts
|
cat $1 | sed 's/^\(X.*\)S\(.*\)/Z0.\2\n\1/' > gcode_camotics.gcode
camotics gcode_camotics.gcode
|
<reponame>Agilicus/incubator-druid<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.testing.utils;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.Request;
import org.apache.druid.java.util.http.client.response.StatusResponseHandler;
import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
import org.apache.druid.testing.clients.AbstractQueryResourceTestClient;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import javax.annotation.Nullable;
import javax.ws.rs.core.MediaType;
import java.net.URL;
public class HttpUtil
{
private static final Logger LOG = new Logger(AbstractQueryResourceTestClient.class);
private static final StatusResponseHandler RESPONSE_HANDLER = StatusResponseHandler.getInstance();
public static StatusResponseHolder makeRequest(HttpClient httpClient, HttpMethod method, String url, byte[] content)
{
return makeRequestWithExpectedStatus(
httpClient,
method,
url,
content,
HttpResponseStatus.OK
);
}
public static StatusResponseHolder makeRequestWithExpectedStatus(
HttpClient httpClient,
HttpMethod method,
String url,
@Nullable byte[] content,
HttpResponseStatus expectedStatus
)
{
try {
Request request = new Request(method, new URL(url));
if (content != null) {
request.setContent(MediaType.APPLICATION_JSON, content);
}
int retryCount = 0;
StatusResponseHolder response;
while (true) {
response = httpClient.go(request, RESPONSE_HANDLER).get();
if (!response.getStatus().equals(expectedStatus)) {
String errMsg = StringUtils.format(
"Error while making request to url[%s] status[%s] content[%s]",
url,
response.getStatus(),
response.getContent()
);
// it can take time for the auth config to propagate, so we retry
if (retryCount > 10) {
throw new ISE(errMsg);
} else {
LOG.error(errMsg);
LOG.error("retrying in 3000ms, retryCount: " + retryCount);
retryCount++;
Thread.sleep(3000);
}
} else {
break;
}
}
return response;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private HttpUtil()
{
}
}
|
import React from 'react'
import BgImage from "../../images/bg/parallax5.jpg"
const CallToAction3 = () => {
return (
<section className="pad80 parallax" style={{backgroundImage: `url(${BgImage})`, backgroundPosition: "100% 50%"}}>
<div className="container">
<div className="row">
<div className="col-md-12">
<div className="call-to-action text-center white">
<h3>Design. Develope. Dedicate</h3>
<p>Praesent sapien massa, convallis a pellentesque nec, egestas non nisi. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi. Praesent sapien massa, convallis a pellentesque nec, egestas non nisi.</p>
<a href="#" className="btn btn-primary">Buy This Now</a>
</div>
</div>
</div>
</div>
</section>
)
}
export default CallToAction3
|
package com.lbs.server.exception
case class UserNotFoundException(chatId: Long) extends Exception(s"Luxmed username for chat with id $chatId")
|
#!/bin/bash
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
#
#
# SOFTWARE HISTORY
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# Dec 05, 2013 #2593 rjpeter Fix getPidsOfMyRunningCaves
# Dec 05, 2013 #2590 dgilling Modified extendLibraryPath() to export a
# var if it's already been run.
# Jan 24, 2014 #2739 bsteffen Add method to log exit status of process.
# Jan 30, 2014 #2593 bclement extracted generic part of getPidsOfMyRunningCaves into forEachRunningCave
# added methods for reading max memory from .ini files
# fixes for INI files with spaces
# Feb 20, 2014 #2780 bclement added site type ini file check
#
# Mar 13 2014 #15348 kjohnson added function to remove logs
# Jun 20, 2014 #3245 bclement forEachRunningCave now accounts for child processes
# Jul 02, 2014 #3245 bclement account for memory override in vm arguments
# Jul 10, 2014 #3363 bclement fixed precedence order for ini file lookup
# Jul 11, 2014 #3371 bclement added killSpawn()
# Oct 13, 2014 #3675 bclement logExitStatus() waits for child to start and renames log with child PID
# Jul 23, 2015 ASM#13849 D. Friedman Use a unique Eclipse configuration directory
# Aug 03, 2015 #4694 dlovely Fixed path for log file cleanup
# Sep 16, 2015 #18041 lshi Purge CAVE logs after 30 days instead of 7
# Apr 20, 2016 #18910 lshi Change CAVE log purging to add check for find commands
# already running
# May 27, 2016 ASM#18971 dfriedman Fix local variable usage in deleteOldEclipseConfigurationDirs
# Aug 09, 2016 ASM#18911 D. Friedman Add minimum purge period of 24 hours. Use a lock file to prevent
# simultaneous purges. Allow override of days to keep.
# Jan 26,2017 #6092 randerso return exitCode so it can be propagated back to through the calling processes
# Apr 17, 2018 M. James Cleanup for containerization
########################
source /awips2/cave/iniLookup.sh
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: unable to find and/or access /awips2/cave/iniLookup.sh."
exit 1
fi
# This script will be sourced by cave.sh.
export CAVE_INI_ARG=
BYTES_IN_KB=1024
BYTES_IN_MB=1048576
BYTES_IN_GB=1073741824
# Looks up ini file first by component/perspective
# then by SITE_TYPE before falling back to cave.ini.
# Sets ini file cave argument string in $CAVE_INI_ARG.
# Returns 0 if component/perspective found in args, else 1.
function lookupINI()
{
# only check for component/perspective if arguments aren't empty
if [[ "${1}" != "" ]]; then
position=1
for arg in $@; do
if [ "${arg}" == "-component" ] ||
[ "${arg}" == "-perspective" ]; then
# Get The Next Argument.
position=$(( $position + 1 ))
nextArg=${!position}
retrieveAssociatedINI ${arg} "${nextArg}"
RC=$?
if [ ${RC} -eq 0 ]; then
export CAVE_INI_ARG="--launcher.ini /awips2/cave/${ASSOCIATED_INI}"
return 0
fi
fi
position=$(( $position + 1 ))
done
fi
# if ini wasn't found through component or perspective
if [[ -z $CAVE_INI_ARG ]]
then
# attempt to fall back to site type specific ini
siteTypeIni="/awips2/cave/${SITE_TYPE}.ini"
if [[ -e ${siteTypeIni} ]]
then
export CAVE_INI_ARG="--launcher.ini ${siteTypeIni}"
else
# cave.ini if all else fails
export CAVE_INI_ARG="--launcher.ini /awips2/cave/cave.ini"
fi
fi
return 1
}
function extendLibraryPath()
{
# Arguments:
#
# ${1} == -noX {optional}
local CAVE_LIB_DIRECTORY=
if [ -d /awips2/cave/lib ]; then
local CAVE_LIB_DIRECTORY="/awips2/cave/lib"
fi
if [ -d /awips2/cave/lib64 ]; then
local CAVE_LIB_DIRECTORY="/awips2/cave/lib64"
fi
if [ "${1}" = "-noX" ]; then
export LD_LIBRARY_PATH="${CAVE_LIB_DIRECTORY}/lib_mesa:$LD_LIBRARY_PATH"
fi
CALLED_EXTEND_LIB_PATH="true"
}
function copyVizShutdownUtilIfNecessary()
{
local VIZ_UTILITY_SCRIPT="awips2VisualizeUtility.sh"
# Ensure that there is a .kde directory.
if [ ! -d ${HOME}/.kde ]; then
return 0
fi
# There is a .kde directory, continue.
if [ ! -d ${HOME}/.kde/shutdown ]; then
mkdir ${HOME}/.kde/shutdown
fi
if [ -f ${HOME}/.kde/shutdown/${VIZ_UTILITY_SCRIPT} ]; then
rm -f ${HOME}/.kde/shutdown/${VIZ_UTILITY_SCRIPT}
fi
# Copy the newest version of the utility to the user's shutdown directory.
cp /awips2/cave/${VIZ_UTILITY_SCRIPT} ${HOME}/.kde/shutdown/${VIZ_UTILITY_SCRIPT}
chmod a+x ${HOME}/.kde/shutdown/${VIZ_UTILITY_SCRIPT}
}
# takes a function as an argument and calls the function passing in the ps string of the process
function forEachRunningCave()
{
local user=`whoami`
for parent in $(pgrep -u $user '^cave$')
do
# the cave process starts a new JVM as a child process
# find all children of the cave process
children=$(pgrep -P $parent)
if [[ -z $children ]]
then
# no children, assume that this is a main cave process
"$@" "$(ps --no-header -fp $parent)"
else
for child in $children
do
"$@" "$(ps --no-header -fp $child)"
done
fi
done
}
# takes in ps string of cave process, stores pid in _pids and increments _numPids
function processPidOfCave()
{
_pids[$_numPids]=`echo $1 | awk '{print $2}'`
let "_numPids+=1"
}
# returns _numPids and array _pids containing the pids of the currently running cave sessions.
function getPidsOfMyRunningCaves()
{
_numPids=0
forEachRunningCave processPidOfCave
}
# takes a name of an ini file as an argument, echos the memory (in bytes) from file (or default)
function readMemFromIni()
{
local inifile="$1"
local mem
local unit
local regex='^[^#]*-Xmx([0-9]+)([bBkKmMgG])?'
# read ini file line by line looking for Xmx arg
while read -r line
do
if [[ $line =~ $regex ]]
then
mem=${BASH_REMATCH[1]}
unit=${BASH_REMATCH[2]}
break
fi
done < "$inifile"
convertMemToBytes $mem $unit
}
# takes in integer amount and string units (K|M|G), echos the amount converted to bytes
function convertMemToBytes()
{
local mem=$1
local unit=$2
# convert to bytes
case "$unit" in
[kK])
mem=$(($mem * $BYTES_IN_KB))
;;
[mM])
mem=$(($mem * $BYTES_IN_MB))
;;
[gG])
mem=$(($mem * $BYTES_IN_GB))
;;
esac
regex='^[0-9]+$'
if [[ ! $mem =~ $regex ]]
then
# we couldn't find a valid Xmx value
# java default is usually 1G
mem=1073741824
fi
echo $mem
}
# takes in ps string of cave process, reads Xmx from ini and adds bytes to _totalRunninMem
function addMemOfCave()
{
local inifile
# get ini file from process string
local iniRegex='--launcher.ini\s(.+\.ini)'
local xmxRegex='-Xmx([0-9]*)([^\s]*)'
if [[ $1 =~ $xmxRegex ]]
then
local mem="${BASH_REMATCH[1]}"
local unit="${BASH_REMATCH[2]}"
let "_totalRunningMem+=$(convertMemToBytes $mem $unit)"
else
if [[ $1 =~ $iniRegex ]]
then
inifile="${BASH_REMATCH[1]}"
else
inifile="/awips2/cave/cave.ini"
fi
let "_totalRunningMem+=$(readMemFromIni "$inifile")"
fi
}
# finds total max memory of running caves in bytes and places it in _totalRunningMem
function getTotalMemOfRunningCaves()
{
_totalRunningMem=0
forEachRunningCave addMemOfCave
}
function deleteOldCaveDiskCaches()
{
local curDir=`pwd`
local user=`whoami`
local caches="diskCache/GFE"
local cacheDir="$HOME/caveData/etc/workstation"
local host=`hostname -s`
if [ -d "$cacheDir/$host" ]; then
cacheDir="$cacheDir/$host"
else
host=${host%-} # remove the -testbed
if [ -d "$cacheDir/$host" ]; then
cacheDir="$cacheDir/$host"
else
host=`hostname`
cacheDir="$cacheDir/$host"
fi
fi
if [ -d "$cacheDir" ]; then
# found cache dir for workstation
cd $cacheDir
# grab the current cave pids
getPidsOfMyRunningCaves
for cache in $caches; do
if [ -d "$cache" ]; then
cd $cache
diskPids=`ls -d pid_* 2> /dev/null`
for dPid in $diskPids; do
# strip the pid_ and compare to pids of running caves
dPidNum="${dPid#pid_}"
found=0
for pid in ${_pids[*]}; do
if [ "$pid" == "$dPidNum" ]; then
found=1
break
fi
done
if [ $found -eq 0 ]; then
rm -rf $dPid
fi
done
cd ..
fi
done
fi
cd $curDir
}
# takes in a process id
# kills spawned subprocesses of pid
# and then kills the process itself
function killSpawn()
{
pid=$1
pkill -P $pid
kill $pid
}
# log the exit status and time to a log file, requires 2 args pid and log file
function logExitStatus()
{
pid=$1
logFile=$2
trap 'killSpawn $pid' SIGHUP SIGINT SIGQUIT SIGTERM
childPid=$(waitForChildToStart $pid)
if [[ -n $childPid ]]
then
newFileName=${logFile/\%PID\%/$childPid}
mv $logFile $newFileName
logFile=$newFileName
fi
wait $pid
exitCode=$?
curTime=`date --rfc-3339=seconds -u`
echo Exited at $curTime with an exit status of $exitCode >> $logFile
# If a core file was generated attempt to save it to a better place
coreFile=core.$pid
if [ -f "$coreFile" ]; then
basePath="/awips2/cave/fxa/cave/"
hostName=`hostname -s`
hostPath="$basePath/$hostName/"
mkdir -p $hostPath
if [ -d "$hostPath" ]; then
mv $coreFile $hostPath
fi
fi
return $exitCode
}
# takes in a PID
# waits for PID to spawn child
# outputs the PID of the child or nothing if PID exits first
function waitForChildToStart()
{
pid=$1
# check if PID is still running
while ps -p $pid > /dev/null
do
sleep 1s
if child=$(pgrep -P $pid)
then
echo $child
break
fi
done
}
#Delete old CAVE logs DR 15348
function deleteOldCaveLogs()
{
local logdir=$HOME/$BASE_LOGDIR
local now=$(date +%s)
# Determine the last purge time
local last_purge_f=$logdir/.last-purge
local last_purge_time=$(cat "$last_purge_f" 2>/dev/null)
if ! [[ $last_purge_time -gt 0 ]]; then
last_purge_time=0
fi
# If the last purge time was less than one day ago, return.
if [[ $(( last_purge_time + 86400 )) -gt $now ]]; then
return
fi
# Use a lock file to handle multiple CAVEs started at the same time.
local lock_f=$logdir/.purge-lock
set -o noclobber
if ! : > "$lock_f" && [ -e "$lock_f" ]; then
# If the lock file could not created and it exists and is less than one hour old, return.
local found=$(find "$(dirname "$lock_f")" -maxdepth 1 -mmin -60 -name "$(basename "$lock_f")" | wc -l)
if (( found )); then
set +o noclobber
return
fi
fi
set +o noclobber
# Purge the old logs.
local n_days_to_keep=${CAVE_LOG_DAYS_TO_KEEP:-30}
find "$logdir" -type f -name "*.log" -mtime +"$n_days_to_keep" | xargs -r rm
# Record the last purge time and remove the lock file.
echo $(date +%s) > "$last_purge_f"
rm -f "$lock_f"
exit 0
}
# Delete old Eclipse configuration directories that are no longer in use
function deleteOldEclipseConfigurationDirs()
{
local tmp_dir=$1
local tmp_dir_pat=$(echo "$tmp_dir" | sed -e 's/|/\\|/g')
local save_IFS=$IFS
IFS=$'\n'
# Find directories that are owned by the user and older than one hour
local old_dirs=( $(find "$tmp_dir" -mindepth 1 -maxdepth 1 -type d -user "$(whoami)" -mmin +60) )
IFS=$save_IFS
if (( ${#old_dirs[@]} < 1 )); then
return
fi
# Determine which of those directories are in use.
local lsof_args=()
local d
for d in "${old_dirs[@]}"; do
lsof_args+=('+D')
lsof_args+=("$d")
done
IFS=$'\n'
# Run lsof, producing machine readable output, filter the out process IDs,
# the leading 'n' of any path, and any subpath under a configuration
# directory. Then filter for uniq values.
local in_use_dirs=$(lsof -w -n -l -P -S 10 -F pn "${lsof_args[@]}" | grep -v ^p | \
sed -r -e 's|^n('"$tmp_dir_pat"'/[^/]*).*$|\1|' | uniq)
IFS=$save_IFS
local p
for p in "${old_dirs[@]}"; do
if ! echo "$in_use_dirs" | grep -qxF "$p"; then
rm -rf "$p"
fi
done
}
function deleteEclipseConfigurationDir()
{
if [[ -n $eclipseConfigurationDir ]]; then
rm -rf "$eclipseConfigurationDir"
fi
}
function createEclipseConfigurationDir()
{
local d dir id=$(hostname)-$(whoami)
for d in "$HOME/caveData/.cave-eclipse/"; do
if [[ $d == $HOME/* ]]; then
mkdir -p "$d" || continue
fi
deleteOldEclipseConfigurationDirs "$d"
if dir=$(mktemp -d --tmpdir="$d" "${id}-XXXX"); then
export eclipseConfigurationDir=$dir
trap deleteEclipseConfigurationDir EXIT
SWITCHES+=(-configuration "$eclipseConfigurationDir")
return 0
fi
done
echo "Unable to create a unique Eclipse configuration directory. Will proceed with default." >&2
export eclipseConfigurationDir=$HOME/caveData/.cave-eclipse
return 1
}
|
<reponame>DeIaube/YiXing
package arouter.dawn.zju.edu.module_goods.ui.detail;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.alibaba.android.arouter.facade.annotation.Autowired;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.youth.banner.Banner;
import java.text.SimpleDateFormat;
import arouter.dawn.zju.edu.lib_net.bean.goods.Goods;
import arouter.dawn.zju.edu.module_goods.util.PicassoUrlImageLeader;
import arouter.dawn.zju.edu.module_nearby.R;
import arouter.dawn.zju.edu.module_pay.callback.PayCallback;
import arouter.dawn.zju.edu.module_pay.ui.container.PayContainerFragment;
import baselib.base.BaseActivity;
import baselib.constants.RouteConstants;
/**
* @Auther: Dawn
* @Date: 2018/11/22 22:01
* @Description:
* 商品详情页
*/
@Route(path = RouteConstants.AROUTER_GOODS_DETAIL)
public class GoodsDetailActivity extends BaseActivity<GoodsDetailContract.Presenter> implements GoodsDetailContract.View, View.OnClickListener {
@Autowired(name = RouteConstants.GOODS_DETAIL_BUNDLE)
Bundle bundle;
Goods goods;
TextView goodsTitleTv;
TextView goodsBuyCounterTv;
TextView goodsLocationTv;
TextView goodsExplainTv;
TextView goodsStartTimeTv;
TextView goodsEndTimeTv;
TextView goodsPriceTv;
Button goodsPayBtn;
Banner goodsDetailBanner;
private String mCollectionMenuContent;
@SuppressLint("DefaultLocale")
@Override
protected void initView() {
goods = bundle.getParcelable(RouteConstants.GOODS_DETAIL_GOODS);
setToolbarTitle(goods.getTitle());
goodsDetailBanner = findViewById(R.id.goods_detail_banner);
goodsTitleTv = findViewById(R.id.goods_detail_title);
goodsBuyCounterTv = findViewById(R.id.goods_detail_buy_counter);
goodsLocationTv = findViewById(R.id.goods_detail_loacation);
goodsExplainTv = findViewById(R.id.goods_detail_explain);
goodsStartTimeTv = findViewById(R.id.goods_detail_start_time);
goodsEndTimeTv = findViewById(R.id.goods_detail_end_time);
goodsPriceTv = findViewById(R.id.goods_detail_price);
goodsPayBtn = findViewById(R.id.goods_detail_pay);
@SuppressLint("SimpleDateFormat")
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
goodsTitleTv.setText(goods.getTitle());
goodsExplainTv.setText(goods.getExplain());
goodsLocationTv.setText(goods.getLocation());
goodsPriceTv.setText(String.format("%.2f", goods.getPrice()));
goodsStartTimeTv.setText(sdf.format(goods.getStartTime()));
goodsEndTimeTv.setText(sdf.format(goods.getEndTime()));
goodsPayBtn.setOnClickListener(this);
goodsDetailBanner.setImageLoader(new PicassoUrlImageLeader());
goodsDetailBanner.setImages(goods.getPreviewList());
goodsDetailBanner.start();
mCollectionMenuContent = getString(R.string.goods_detail_collection);
mPresenter.init(goods);
}
@Override
protected boolean showHomeAsUp() {
return true;
}
@Override
protected int getLayoutId() {
return R.layout.activity_goods_detail;
}
@Override
protected void bindPresenter() {
mPresenter = new GoodsDetailPresenter();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.detail_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem collectionItem = menu.findItem(R.id.detail_menu_collection);
collectionItem.setTitle(mCollectionMenuContent);
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.detail_menu_collection) {
// 收藏商品
mPresenter.collection(goods);
} else if (id == R.id.detail_menu_share) {
// 分享文章
Intent shareIntent = new Intent();
shareIntent.setAction(Intent.ACTION_SEND);
shareIntent.putExtra(Intent.EXTRA_TEXT, goods.getTitle());
shareIntent.setType("text/plain");
startActivity(shareIntent.createChooser(shareIntent, goods.getTitle()));
}
return super.onOptionsItemSelected(item);
}
@Override
public void showGoodsAlreadyCollection() {
mCollectionMenuContent = getString(R.string.goods_detail_cancel_collection);
getWindow().invalidatePanelMenu(Window.FEATURE_OPTIONS_PANEL);
}
@Override
public void showGoodsUnCollection() {
mCollectionMenuContent = getString(R.string.goods_detail_collection);
getWindow().invalidatePanelMenu(Window.FEATURE_OPTIONS_PANEL);
}
@Override
public void refreshBuyCounterTextView(String text) {
goodsBuyCounterTv.setVisibility(View.VISIBLE);
goodsBuyCounterTv.setText(text);
}
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.goods_detail_pay) {
if (!mPresenter.checkGoodsData(goods)) {
Toast.makeText(this, "与活动时间不符", Toast.LENGTH_SHORT).show();
return;
}
new PayContainerFragment()
.show(getSupportFragmentManager(), goods.getPrice(),
goods.getTitle(), goods.getExplain()
, new PayCallback() {
@Override
public void paySuccess() {
mPresenter.paySuccess(goods);
}
@Override
public void payFailed(String msg) {
mPresenter.payFailed(goods, msg);
}
});
}
}
}
|
<reponame>ioannis-mylonas/orange-talents-03-template-ecommerce
package bootcamp.mercado.usuario.autenticacao;
import org.springframework.security.core.GrantedAuthority;
import javax.persistence.*;
@Entity
public class Perfil implements GrantedAuthority {
private static final long serialVersionUID = 1L;
@Id @GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false)
private String nome;
public Perfil(String nome) {
this.nome = nome;
}
@Override
public String getAuthority() {
return this.nome;
}
}
|
import { DirectedMessage } from "@atomist/rug/operations/Handlers";
import { EventHandlerScenarioWorld, Given, Then, When } from "@atomist/rug/test/handler/Core";
import { Tag } from "@atomist/cortex/stub/Tag";
Given("the MyFirstEventHandler is registered", (w: EventHandlerScenarioWorld) => {
w.registerHandler("MyFirstEventHandler");
});
When("a new Tag is received", (w: EventHandlerScenarioWorld) => {
const event = new Tag();
w.sendEvent(event);
});
Then("the MyFirstEventHandler event handler should respond with the correct message",
(w: EventHandlerScenarioWorld) => {
const expected = `Tag event received`;
const message = (w.plan().messages[0] as DirectedMessage).body;
return message === expected;
});
|
/*
* create_nestedtable3.sql
* Chapter 6, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This constructs a null element nested table type in the database, then extends it one element at a time.
*/
SET ECHO ON
SET SERVEROUTPUT ON SIZE 1000000
-- Define a table of variable length strings.
CREATE OR REPLACE TYPE card_table
AS TABLE OF VARCHAR2(5 CHAR);
/
DECLARE
-- Declare and initialize a nested table with three rows.
cards CARD_TABLE := card_table(NULL,NULL,NULL);
BEGIN
-- Print title.
dbms_output.put_line(
'Nested table initialized as null values.');
dbms_output.put_line(
'----------------------------------------');
-- Loop through the three records.
FOR i IN 1..3 LOOP
-- Print the contents.
dbms_output.put ('Cards Varray ['||i||'] ');
dbms_output.put_line('['||cards(i)||']');
END LOOP;
-- Assign values to subscripted members of the varray.
cards(1) := 'Ace';
cards(2) := 'Two';
cards(3) := 'Three';
-- Print title.
dbms_output.put (CHR(10)); -- Visual line break.
dbms_output.put_line(
'Nested table initialized as Ace, Two and Three.');
dbms_output.put_line(
'-----------------------------------------------');
-- Loop through the three records to print the varray contents.
FOR i IN 1..3 LOOP
dbms_output.put_line('Cards ['||i||'] '
|| '['||cards(i)||']');
END LOOP;
END;
/
|
<reponame>mdsrosa/todos-api
from todos.serializers import TodoSerializer
from todos.models import Todo
from rest_framework import generics
class ListCreateTodoView(generics.ListCreateAPIView):
serializer_class = TodoSerializer
queryset = Todo.objects.all() |
//dshdshtydjy
package
com.bingor.router.impl
;
/**
* Created by HXB on 2018/7/25.
*/
public interface RouterNodeExecutorNormal extends RouterNodeExecutor {
void executeNode(String jsonParams);
}
|
class ToDoList:
def __init__(self):
self.tasks = []
def print(self):
for i in range(len(self.tasks)):
print(f'{i+1}. {self.tasks[i]}')
def add(self, task):
self.tasks.append(task)
def edit(self, index, new_task):
self.tasks[index] = new_task
def delete(self, index):
del self.tasks[index] |
package com.dimafeng.testcontainers
import org.testcontainers.containers.{MongoDBContainer => JavaMongoDBContainer}
import org.testcontainers.utility.DockerImageName
case class MongoDBContainer(
tag: Option[DockerImageName] = None
) extends SingleContainer[JavaMongoDBContainer] {
override val container: JavaMongoDBContainer = tag match {
case Some(tag) => new JavaMongoDBContainer(tag)
case None => new JavaMongoDBContainer()
}
def replicaSetUrl: String = container.getReplicaSetUrl
}
object MongoDBContainer {
def apply(tag: DockerImageName): MongoDBContainer = new MongoDBContainer(Option(tag))
case class Def(
tag: DockerImageName = null
) extends ContainerDef {
override type Container = MongoDBContainer
override def createContainer(): MongoDBContainer = new MongoDBContainer(Option(tag))
}
}
|
<gh_stars>0
var $e=function(sel){
return document.querySelector(sel);
};
var $ea=function(sel){
return document.querySelectorAll(sel);
};
var cdilid=0;
window.onload=function(){
var $elem=$e('#remind');
curtime();
setInterval(curtime,1000);
if(typeof($elem) === 'undefined' || $elem === null){
return ;
}
document.getElementById("remind").onclick = function(e){
var data={};
data.title="Task Reminder";
data.message= document.getElementById("message").value;
if(data.message.trim()==""){
document.getElementById("message").focus();
return;
}
if(document.getElementsByName("type")[1].checked){
data.date=document.getElementById("date").value;
if(data.date.trim()==""){
document.getElementById("date").focus();
return;
}
data.hours=Number(document.getElementById("hours").value);
var apm=document.getElementById("apm").value.trim();
if(apm=="pm" && data.hours!=12){
data.hours+=12;
}else if(apm=="am" && data.hours==12){
data.hours=0;
}
var time=new Date(data.date);
time.setHours(data.hours);
time.setMinutes(Number(document.getElementById("mins").value));
data.time=time.valueOf();
data.minutes=((data.time-new Date().valueOf())/1000)/60;
}else{
data.minutes=document.getElementById("minutes").value;
if(data.minutes.trim()=="" || isNaN(data.minutes)){
document.getElementById("minutes").focus();
return;
}
var time=new Date();
time.setMinutes(time.getMinutes() + Number(data.minutes));
data.time=time.valueOf();
}
data.id="task_rem"+data.time;
chrome.runtime.sendMessage(data,function(response) {
$ea('#menus a')[1].click();
reset_fields('#message','#minutes','#date');
store_items();
setTimeout(function(){
store_items();
},1000)
});
};
var mins="";
for(var $i=0;$i<=59;$i++){
var j=$i;
if($i<10){j="0"+j;}
mins+="<option value='"+$i+"'>"+j+"</option>"
}
var hrs="";
for(var $i=1;$i<=12;$i++){
var j=$i;
if($i<10){j="0"+j;}
hrs+="<option value='"+$i+"'>"+j+"</option>"
}
document.getElementById("mins").innerHTML=mins;
document.getElementById("hours").innerHTML=hrs;
var radios=document.getElementsByName("type");
for(var i = 0;i < radios.length;i++){
radios[i].onchange = function(){
if(this.value.trim()==='mins'){
ienable("minutes");
idisable("mins","hours","apm","date");
}else{
idisable("minutes");
ienable("mins","hours","apm","date");
}
};
}
var now=new Date();
var nowhr=now.getHours();
var nowmin=now.getMinutes();
if(nowhr>12){
$e('#apm').value='pm';
nowhr=nowhr-12;
}
$e('#mins').value=nowmin;
$e('#hours').value=nowhr;
$e("#date").value=now.format("%y-%m-%d");
var cl=$ea('#menus a');
for(var i=0;i<cl.length;i++){
cl[i].onclick=function(){
for(var i=0;i<cl.length;i++){
cl[i].classList.remove("active");
}
this.classList.add('active');
var acttab=this.getAttribute('href');
var tabi=$ea('.tabi');
for(var i=0;i<tabi.length;i++){
tabi[i].style.display="none";
}
$e(acttab).style.display="block";
};
};
store_items();
init_del();
$e("#jotit").onclick=function(){
var url="https://chrome.google.com/webstore/detail/jotit-just-jot-it/aoipkhoiccpbmbgbinbplkgdhgmjeiek";
chrome.tabs.create({ url: url,active:true });
};
};
function remind(data) {
chrome.runtime.sendMessage(data,
function(response) {
});
}
function store_items(){
chrome.storage.sync.get('tr_items',function(items){
var td="";
if(Object.keys(items).length>0){
for(var i=0;i<items.tr_items.length;i++){
td+="<tr>";
td+="<td>"+items.tr_items[i].message+"</td>";
td+="<td>"+new Date(items.tr_items[i].time).format("%m/%d/%y %h:%min %APM")+" (<span class='remtime' data-time='"+items.tr_items[i].time+"'></span> )</td>";
td+="<td><a class='btn btn-link del_tasks' data-id='"+items.tr_items[i].id+"'><i title='delete' class='glyphicon glyphicon-trash'></i></a>";
td+"</tr>";
}
}
if(td!=""){
var $fdel="<a class='btn btn-link del_tasks' data-id='full'><i title='delete all' class='glyphicon glyphicon-trash'></i>";
$e('#viewtasks table tbody').innerHTML=td;
$ea('#viewtasks table thead th')[2].innerHTML=$fdel;
init_del();
}else{
$e('#viewtasks table tbody').innerHTML="<tr><td colspan='3'>No Tasks</td></tr>";
}
$ea(".remtime").forEach(function(e,i){
var tm=Number(e.dataset.time);
if(tm){
setInterval(function(){
remaining(tm,e);
},1000);
}
});
});
}
function idisable(){
for(var i=0;i<arguments.length;i++){
document.getElementById(arguments[i]).setAttribute('disabled','disabled');
}
}
function ienable(){
for(var i=0;i<arguments.length;i++){
document.getElementById(arguments[i]).removeAttribute('disabled');
}
}
function clear_task(id){
var fn=arguments.length>1?arguments[1]:"";
if(id.trim()!="full"){
chrome.storage.sync.get('tr_items',function(items){
if(Object.keys(items).length>0){
var tmp=[];
for(var i=0;i<items.tr_items.length;i++){
var tr=items.tr_items[i];
if(tr.id.trim()!=id.trim()){
tmp.push(tr);
}
}
chrome.storage.sync.set({'tr_items':tmp},function(){
if(fn!=""){
fn();
}
});
}
});
}else{
chrome.storage.sync.clear(function(){
if(fn!=""){
fn();
}
});
}
}
function init_del(){
var ids=$ea('.del_tasks');
for(var i=0;i<ids.length;i++){
ids[i].onclick=function(){
var did=this.getAttribute('data-id');
cdilid=did
$e('#delete_confirm').style.display="block";
};
}
document.getElementById('confirm_delete').onclick=function(){
clear_task(cdilid,function(){
$e('#delete_confirm').style.display="none";
store_items();
chrome.notifications.clear(cdilid);
});
};
document.getElementById('confirm_delete_no').onclick=function(){
$e('#delete_confirm').style.display="none";
};
}
function reset_fields(){
for(var i=0;i<arguments.length;i++){
$e(arguments[i]).value="";
}
}
function get_all(){
chrome.notifications.getAll(function(dat){
console.log(dat);
});
chrome.alarms.getAll(function(dat){
console.log(dat);
});
chrome.storage.sync.get('tr_items',function(items){
console.log(items);
});
}
try{
Date.prototype.format=function(f){
var dt=this;
var formats={
"m":dt.getMonth()+1,
"d":dt.getDate(),
"w":dt.getDay(),
"y":dt.getFullYear(),
"h":dt.getHours(),
"min":dt.getMinutes(),
"s":dt.getSeconds(),
"ms":dt.getMilliseconds(),
"t":dt.getTime(),
"apm":dt.getHours() >= 12 ? 'pm' : 'am',
"APM":dt.getHours() >= 12 ? 'PM' : 'AM'
};
if(f.search(/apm/i)>-1){
formats['h'] = formats['h'] % 12;
formats['h'] = formats['h'] ? formats['h'] : 12;
}
for(var fm in formats){
if(typeof formats[fm]=="number" && formats[fm]<10){
formats[fm]="0"+formats[fm];
}
f=f.replace("%"+fm,formats[fm]);
}
return f;
};
}catch(e){
console.log(e);
}
function curtime(){
var dt = new Date();
$e("#curtime").innerHTML=dt.format("%y-%m-%d %h:%min %APM");
}
function remaining(dt,id){
var cdt=new Date().getTime();
var tdt=new Date(dt).getTime();
var bal=tdt-cdt;
var remdiv="";
if(bal>0){
var days = Math.floor(bal / (1000 * 60 * 60 * 24));
var hours = Math.floor((bal % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
var minutes = Math.floor((bal % (1000 * 60 * 60)) / (1000 * 60));
remdiv= (days>0?days+" Days ":"")+(hours>0?hours+" Hr ":"")+(minutes+" Min")+" Left";
}
id.innerHTML=remdiv;
} |
import React, { useEffect, useState } from 'react';
import ChapterList from './ChapterList';
export default {
title: 'Prasang/ChapterList',
component: ChapterList,
};
const chamkaurChapters = [
{
artwork:
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/e70e2478-3314-4a6b-a9c8-7fced70e6f31/dcheda2-d72bc290-f701-4b21-9e48-a34a8d834769.jpg/v1/fill/w_600,h_343,q_75,strp/anandpur_sahib_vector_art_by_damanpreetsinghkhurl_dcheda2-fullview.jpg?token=<KEY>',
number: 31,
gurmukhiScript: 'AnMdpur CoVnw',
name: 'Leaving Anandpur',
},
{
artwork:
'https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTzkD7Fr38S5p9PGO71L0kPojLUHp2HcoPSow&usqp=CAU',
number: 32,
gurmukhiScript: 'ipCoN phwVIAW qy sUibAW ny Aw pYxw [',
name: 'From Behind, The Hill Chiefs and Mughals Attack...',
},
{
artwork:
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/00714226-88e5-48cd-8b5e-736f73e06f76/dcnjzs9-e5c73dae-ce30-4a14-9d7d-ea261ced1f3a.png/v1/fill/w_1024,h_1505,q_80,strp/guru_gobind_singh_ji_l_digital_art_by_ms_saluja_by_ms_saluja_dcnjzs9-fullview.jpg?token=<KEY>',
number: 33,
gurmukhiScript: 'BweI jIvn isMG b`D [ jMg',
name: 'Aftermath of <NAME>. Battle.',
},
];
export const ChamkaurChapters = () => (
<ChapterList chapters={chamkaurChapters} />
);
|
<reponame>autochthe/pdf-html
import {createCanvas} from "canvas";
import fs from "fs";
import pdfjsLib, { PDFPageProxy } from "pdfjs-dist";
import NodeCanvasFactory from "./NodeCanvasFactory";
import PdfXml from "./PdfXml";
export function convertPdfFile(file: string) {
fs.readFile(file, (err, buffer) =>{
if (err) {
console.error(err);
} else {
convertPdfBuffer(buffer, {filename: file});
}
})
}
interface pdfParameters {
filename?: string;
scale?: number;
}
interface pdfPageParameters {
scale: number;
}
export function convertPdfBuffer(buffer: Buffer, parameters?: pdfParameters ) {
// Initialize parameters
const defaultPageParams = {
scale: 4,
};
let pageParams: pdfPageParameters;
if(parameters) {
pageParams = {...defaultPageParams, ...parameters};
} else {
pageParams = defaultPageParams;
}
const filename = parameters ? parameters.filename : undefined;
// Parse PDF
const xml = new PdfXml();
pdfjsLib.getDocument({
data: new Uint8Array(buffer),
}).promise.then(pdfDocument =>{
pdfDocument.getMetadata().then(metadata =>{
xml.setMetadata({...metadata, filename});
})
for (let i = 1; i <= pdfDocument.numPages; i++) {
pdfDocument.getPage(i).then(pdfPage=>{
convertPdfPage(pdfPage, pageParams)
});
break;
}
})
}
function convertPdfPage(pdfPage: PDFPageProxy, {scale}: pdfPageParameters) {
// Get Viewport
const viewport = pdfPage.getViewport(scale);
const pageInfo = {
num: pdfPage.pageNumber,
scale: viewport.scale,
width: viewport.width,
height: viewport.height,
offsetX: viewport.offsetX,
offsetY: viewport.offsetY,
rotation: viewport.rotation,
};
// Get TextContent
pdfPage.getTextContent().then(content => {
console.log(content);
});
// Get Canvas
const canvas = createCanvas(viewport.width, viewport.height);
const ctx = canvas.getContext("2d");
pdfPage.render({
canvasContext: ctx,
viewport: viewport,
canvasFactory: new NodeCanvasFactory(),
intent: "print",
}).promise.then(()=>{
return canvas.toBuffer();
});
}
|
# AutoComplete Functions
source ${MAESCRIPT_HOME}/src/bash/bash-prompt.sh
# Git Branch
source ${MAESCRIPT_HOME}/src/bash/autocomplete-functions.sh |
#!/bin/bash
set -e
ROOT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLCHAIN="${ROOT_DIR}/../mipsel-toolchain"
ROOTFS="${ROOT_DIR}/../rootfs"
OUT_DIR="${ROOTFS}/usr"
export CROSS_COMPILE=mipsel-linux-
export PATH="${TOOLCHAIN}/bin:${PATH}"
export CFLAGS=" -Os -W -Wall"
export CPPFLAGS=" -I${ROOTFS}/include -I${ROOTFS}/usr/include"
export LDFLAGS=" -L${ROOTFS}/lib -L${ROOTFS}/usr/lib"
./Configure linux-mips32 shared zlib-dynamic --prefix="${OUT_DIR}"
sed -i'' 's/\-gcc/gcc/' Makefile
make
make install
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.