code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
<?php /** * @link http://www.yiiframework.com/ * @copyright Copyright (c) 2008 Yii Software LLC * @license http://www.yiiframework.com/license/ */ namespace app\commands; use yii\console\Controller; use app\models\Subject; use app\components\MainTrait; use yii\web\NotFoundHttpException; /** * This command echoes the first argument that you have entered. * * This command is provided as an example for you to learn how to create console commands. * * @author Qiang Xue <qiang.xue@gmail.com> * @since 2.0 */ class HelloController extends Controller { /** * This command echoes what you have entered as the message. * @param string $message the message to be echoed. */ public function actionIndex($message = 'hello world') { echo $message . "\n"; } public function actionUrlSubject() { $res=Subject::find()->asArray()->all(); foreach ($res as $row){ $customer = Subject::findOne($row['id']); $str = MainTrait::getTranslit($row['name']); // замена на англ $str = str_replace(' ', '_', $str); //замена пробелов $str = mb_strtolower ($str); //нижний регистр $url=preg_replace('#\(?(\w)\)?#s','$1',$str); //удалить круглые скобки $customer->url = $url; if ($customer->update() == false) { throw new NotFoundHttpException('Failed:'.$row['name'].'->'.$url); } } return true; } }
iExellent/gratemake
commands/HelloController.php
PHP
bsd-3-clause
1,585
<?php /*## TbModal class file. * * @author Christoffer Niska <ChristofferNiska@gmail.com> * @copyright Copyright &copy; Christoffer Niska 2011- * @license [New BSD License](http://www.opensource.org/licenses/bsd-license.php) * @package bootstrap.widgets * @since 0.9.3 */ /** * Bootstrap modal widget. * @see <http://twitter.github.com/bootstrap/javascript.html#modals> */ class TbModal extends CWidget { /** * @var boolean indicates whether to automatically open the modal when initialized. Defaults to 'false'. */ public $autoOpen = false; /** * @var boolean indicates whether the modal should use transitions. Defaults to 'true'. */ public $fade = true; /** * @var array the options for the Bootstrap Javascript plugin. */ public $options = array('backdrop'=>'static'); /** * @var string[] the Javascript event handlers. */ public $events = array(); /** * @var array the HTML attributes for the widget container. */ public $htmlOptions = array(); /** * @var boolean 是否允许拖拽 */ public $dragable = false; /** *### .init() * * Initializes the widget. */ public function init() { if (!isset($this->htmlOptions['id'])) $this->htmlOptions['id'] = $this->getId(); if ($this->autoOpen === false && !isset($this->options['show'])) $this->options['show'] = false; $classes = array('modal hide'); if ($this->fade === true) $classes[] = 'fade'; if (!empty($classes)) { $classes = implode(' ', $classes); if (isset($this->htmlOptions['class'])) $this->htmlOptions['class'] .= ' '.$classes; else $this->htmlOptions['class'] = $classes; } echo CHtml::openTag('div', $this->htmlOptions); } /** *### .run() * * Runs the widget. */ public function run() { $id = $this->htmlOptions['id']; echo '</div>'; /** @var CClientScript $cs */ $cs = Yii::app()->getClientScript(); $options = !empty($this->options) ? CJavaScript::encode($this->options) : ''; $cs->registerScript(__CLASS__.'#'.$id, "jQuery('#{$id}').modal({$options});"); if($this->dragable == true) { $cs->registerCoreScript("jquery.ui"); $cs->registerScript(__CLASS__.'#'.$id.'_drag', "jQuery('#{$id}').draggable({handle: '.modal-header'});"); } foreach ($this->events as $name => $handler) { $handler = CJavaScript::encode($handler); $cs->registerScript(__CLASS__.'#'.$id.'_'.$name, "jQuery('#{$id}').on('{$name}', {$handler});"); } } }
GsHatRed/Yiitest
protected/extensions/bootstrap/widgets/TbModal.php
PHP
bsd-3-clause
2,602
#include <math.h> #include "utAllocore.h" template <class T> inline bool eq(T x, T y, T eps=0.000001){ return abs(x-y) < eps; } template <class T> inline bool eq(const T* x, const T* y, int n, T eps=0.0000001){ for(int i=0; i<n; ++i){ if(!eq(x[i], y[i], eps)) return false; } return true; } template <class T> inline bool eq(const Quat<T>& a, const Quat<T>& b, T eps=0.000001){ return eq(&a[0], &b[0], 4, eps); } template <int N, class T> inline bool eq(const Vec<N,T>& a, const Vec<N,T>& b, T eps=0.000001){ return eq(&a[0], &b[0], N, eps); } template <int N, class T> inline bool eq(const Mat<N,T>& a, const Mat<N,T>& b, T eps=0.000001){ return eq(&a[0], &b[0], N*N, eps); } int utMath(){ // Vec { const int N = 4; // Should be able to hold objects with constructors { Vec<1, Vec<1, int> > t; } { Vec<4, Complex<float> > t; } { Vec<5, char> t; } { Vec<0,int> t; } //Vec<0,int>().print(); //Vec<1,int>(1).print(); //Vec<2,int>(1,2).print(); //std::cout << Vec<0,int>(); Vec<N,double> a, b, c; assert(a.size() == N); a[0] = 0; assert(a[0] == 0); assert(a.elems()[0] == 0); a.x = 1; assert(a[0] == 1); a.y = 2; assert(a[1] == 2); a.z = 3; assert(a[2] == 3); a.w = 4; assert(a[3] == 4); a = 1; assert(a == 1); b = a; assert(b == 1); assert(b == a); b = 2; assert(b != a); { a = 1; b = 0; double * p = a.elems(); assert(p[0] == a[0]); b.set(p); assert(a == b); char c1[] = {4,4,4,4}; a.set(c1); assert(a == 4); char c2[] = {1,0,1,0,1,0,1,0}; a.set(c2,2); assert(a == 1); a = 0; a.set(Vec<N-1,int>(1,2,3), 4); assert(a[0] == 1 && a[1] == 2 && a[2] == 3 && a[3] == 4); } a = 3; b = 3; a -= b; assert(a == 0); a += b; assert(a == 3); a -= 3; assert(a == 0); a += 3; assert(a == 3); a *= b; assert(a == 9); a /= b; assert(a == 3); a *= 3; assert(a == 9); a /= 3; assert(a == 3); a = b + b; assert(a == 6); a = b - b; assert(a == 0); a = b * b; assert(a == 9); a = b / b; assert(a == 1); a = 2. + a; assert(a == 3); a = 6. - a; assert(a == 3); a = 2. * a; assert(a == 6); // a = 1. / a; assert(a == 1./6); a = +1; b = -1; assert(a == -b); a = -1; b = +1; assert(a.dot(b) ==-N); assert(a.dot(a) == N); assert(a.sum() == -N); assert(a.sumAbs() == N); assert(a.mag() == sqrt(N)); assert(b.mag() == sqrt(N)); assert(a.mag() == abs(a)); assert(a.magSqr() == N); assert(b.magSqr() == N); assert(a.norm1() == N); assert(a.norm2() == sqrt(N)); (a = 1).negate(); assert(a == -1); (a = 1).normalize(); assert(a == 1./sqrt(N)); assert(a == (b = 10).normalized()); b = a = 1; assert(concat(a,b) == 1); // conversion { a = 0; Vec<N+1, double> t = concat(a, Vec<1,char>(1)); assert(t.size() == a.size()+1); } for(int i=0; i<a.size(); ++i) a[i]=i; assert(a.get(0,1) == Vec2d(0,1)); assert(a.get(2,2) == Vec2d(2,2)); assert(a.get(2,1,0) == Vec3d(2,1,0)); { for(int i=0; i<a.size(); ++i) a[i]=i; Vec<2, double> t; t = a.sub<2>(); assert(t[0] == 0 && t[1] == 1); t = a.sub<2>(2); assert(t[0] == 2 && t[1] == 3); // Verify in-place operations a.sub<2>() += 10; assert(a[0] == 10 && a[1] == 11); } assert(eq(angle(Vec3d(1,0,0), Vec3d(1, 0, 0)), 0.)); assert(eq(angle(Vec3d(1,0,0), Vec3d(0, 1, 0)), M_PI_2)); assert(eq(angle(Vec3d(1,0,0), Vec3d(0,-1, 0)), M_PI_2)); { Vec3d r; centroid3(r, Vec3d(1,0,0), Vec3d(0,1,0), Vec3d(0,0,1)); assert(eq(r, Vec3d(1/3.))); normal(r, Vec3d(1,0,0), Vec3d(0,1,0), Vec3d(-1,0,0)); assert(eq(r, Vec3d(0,0,1))); Vec3d pos(1,2,3); Vec3d to(4,5,6); Vec3d rel = to - pos; assert(rel[0]==3 && rel[1]==3 && rel[2]==3); } a = 0; b = 1; assert(min(a,b) == 0); assert(max(a,b) == 1); } // Vec3 { Vec3d a, b, c; a.set(1,0,0); b.set(0,1,0); c.set(0,0,1); assert(c == cross(a,b)); assert(c == a.cross(b)); a = b; assert(a == b); } // Mat { /*#define CHECK(mat, a,b,c, d,e,f, g,h,i){\ const auto& m = mat;\ assert(eq(m(0,0),a)); assert(eq(m(0,1),b)); assert(eq(m(0,2),c));\ assert(eq(m(1,0),d)); assert(eq(m(1,1),e)); assert(eq(m(1,2),f));\ assert(eq(m(2,0),g)); assert(eq(m(2,1),h)); assert(eq(m(2,2),i));\ }*/ #define CHECK(mat, a,b,c, d,e,f, g,h,i)\ assert(eq(mat, Mat3d(a,b,c, d,e,f, g,h,i))) // factory functions CHECK(Mat3d::identity(), 1,0,0, 0,1,0, 0,0,1); CHECK(Mat3d::scaling(2), 2,0,0, 0,2,0, 0,0,1); CHECK(Mat3d::scaling(2,3), 2,0,0, 0,3,0, 0,0,1); CHECK(Mat3d::scaling(Vec2d(2,3)), 2,0,0, 0,3,0, 0,0,1); CHECK(Mat3d::translation(2,3), 1,0,2, 0,1,3, 0,0,1); CHECK(Mat3d::translation(Vec2d(2,3)), 1,0,2, 0,1,3, 0,0,1); CHECK(Mat3d::rotation(M_PI/2, 0,1), 0,-1,0,1,0,0, 0,0,1); Mat3d a;//, b; a.setIdentity(); CHECK(a, 1,0,0, 0,1,0, 0,0,1); assert(a.trace() == 3); a += 2; CHECK(a, 3,2,2, 2,3,2, 2,2,3); a -= 1; CHECK(a, 2,1,1, 1,2,1, 1,1,2); a *= 2; CHECK(a, 4,2,2, 2,4,2, 2,2,4); a /= 2; CHECK(a, 2,1,1, 1,2,1, 1,1,2); a.setIdentity(); a = a+2; CHECK(a, 3,2,2, 2,3,2, 2,2,3); a = a-1; CHECK(a, 2,1,1, 1,2,1, 1,1,2); a = a*2; CHECK(a, 4,2,2, 2,4,2, 2,2,4); a = a/2; CHECK(a, 2,1,1, 1,2,1, 1,1,2); a.setIdentity(); a = 2.+a; CHECK(a, 3,2,2, 2,3,2, 2,2,3); a = 4.-a; CHECK(a, 1,2,2, 2,1,2, 2,2,1); a = 2.*a; CHECK(a, 2,4,4, 4,2,4, 4,4,2); a.set( 1,2,3, 4,5,6, 7,8,9 ); assert(a.col(0) == Vec3d(1,4,7)); assert(a.col(1) == Vec3d(2,5,8)); assert(a.col(2) == Vec3d(3,6,9)); assert(a.row(0) == Vec3d(1,2,3)); assert(a.row(1) == Vec3d(4,5,6)); assert(a.row(2) == Vec3d(7,8,9)); a.transpose(); assert(a.col(0) == Vec3d(1,2,3)); assert(a.col(1) == Vec3d(4,5,6)); assert(a.col(2) == Vec3d(7,8,9)); assert(a.row(0) == Vec3d(1,4,7)); assert(a.row(1) == Vec3d(2,5,8)); assert(a.row(2) == Vec3d(3,6,9)); // test special operations { Mat<2,double> m( 2,4, 0,3 ); assert(eq(determinant(m), 6.)); Mat<2,double> inv = m; assert(invert(inv)); assert(eq(m*inv, Mat<2,double>::identity())); } { Mat<3,double> m( 2,5,7, 0,3,6, 0,0,4 ); assert(eq(determinant(m), 24.)); Mat<3,double> inv = m; assert(invert(inv)); assert(eq(m*inv, Mat<3,double>::identity())); } #undef CHECK } { Complexd c(0,0); #define T(x, y) assert(x == y); T(c, Complexd(0,0)) c.fromPolar(1, 0.2); T(c, Polard(0.2)) c.fromPolar(2.3); T(c, Polard(2.3)) assert(c != Complexd(0,0)); T(c.conj(), Complexd(c.r, -c.i)) #undef T // #define T(x, y) assert(almostEqual(x,y,2)); // c.normalize(); T(c.norm(), 1) // double p=0.1; c(1,0); c *= Polard(1, p); T(c.arg(), p) // // c.fromPolar(4,0.2); // T(c.sqrt().norm(), 2) // T(c.sqrt().arg(), 0.1) // #undef T } // Quat { struct printQuat{ void operator()(const Quatd& v){ printf("%g %g %g %g\n", v[0], v[1], v[2], v[3]); } }; // Test factory functions assert(Quatd::identity() == Quatd(1,0,0,0)); // Test basic mathematical operations Quatd q(0,0,0,0); assert(q == Quatd(0,0,0,0)); q.setIdentity(); assert(q == Quatd(1,0,0,0)); assert(q != Quatd(1,0,0,1)); q.set(1,2,4,10); assert(-q == Quatd(-1,-2,-4,-10)); assert(q.conj() == Quatd(q.w, -q.x, -q.y, -q.z)); assert(q.dot(q) == 121); assert(q.mag() == 11); assert(q.magSqr() == 121); assert(eq(q.sgn(), Quatd(1./11, 2./11, 4./11, 10./11))); // Test rotation of vectors by quaternion q.fromAxisAngle(M_2PI/4, 1,0,0); assert(eq(q, Quatd(sqrt(2)/2, sqrt(2)/2,0,0))); { Vec3d v(0,1,0); v = q.rotate(v); //printf("%g %g %g\n", v[0], v[1], v[2]); assert(eq(v, Vec3d(0,0,1))); } q.fromAxisAngle(M_2PI/4, 0,1,0); assert(eq(q, Quatd(sqrt(2)/2, 0,sqrt(2)/2,0))); { Vec3d v(0,0,1); v = q.rotate(v); //printf("%g %g %g\n", v[0], v[1], v[2]); assert(eq(v, Vec3d(1,0,0))); } q.fromAxisAngle(M_2PI/4, 0,0,1); { Vec3d v(1,0,0); v = q.rotate(v); //printf("%g %g %g\n", v[0], v[1], v[2]); assert(eq(v, Vec3d(0,1,0))); } // Test fromAxis* consistency assert(q.fromAxisAngle(M_2PI/8, 1,0,0) == Quatd().fromAxisX(M_2PI/8)); assert(q.fromAxisAngle(M_2PI/8, 0,1,0) == Quatd().fromAxisY(M_2PI/8)); assert(q.fromAxisAngle(M_2PI/8, 0,0,1) == Quatd().fromAxisZ(M_2PI/8)); // Test AxisAngle<->Quat conversion { q.fromEuler(M_2PI/7, M_2PI/8, M_2PI/9); // set to something non-trival... double angle, ax,ay,az; q.toAxisAngle(angle, ax,ay,az); Quatd b = q.fromAxisAngle(angle, ax,ay,az); assert(q == b || q == b.conj()); } // Test consistency between conversions from Euler angles and axes assert( eq( q.fromEuler(M_2PI/8,M_2PI/8,M_2PI/8), (Quatd().fromAxisY(M_2PI/8) * Quatd().fromAxisX(M_2PI/8)) * Quatd().fromAxisZ(M_2PI/8) ) ); // Test roundtrip Euler/quat conversion { float eps = 0.00001; int N = 16; for(int k=0; k<N; ++k){ for(int j=0; j<N; ++j){ for(int i=0; i<N; ++i){ float kludge = 0.1; float az = (float(k)/N-0.5) * 2*M_PI + kludge; float el = (float(j)/N-0.5) * 2*M_PI + kludge; float ba = (float(i)/N-0.5) * 2*M_PI + kludge; Quatf a,b; a.fromEuler(az, el, ba); a.toEuler(az, el, ba); b.fromEuler(az, el, ba); //a.print(); b.print(); printf("\n"); assert(eq(a, b, eps) || eq(a, -b, eps)); }}} } // Test Quat to matrix { Quatd q; q.set(1,0,0,0); Mat4d m; q.toMatrix(&m[0]); assert(eq(m, Mat4d::identity())); q.fromAxisAngle(M_2PI/4, 0,0,1); q.toMatrix(&m[0]); // For a right-handed coordinate system assert(eq(m, Mat4d(0,-1,0,0, 1,0,0,0, 0,0,1,0, 0,0,0,1))); } // Test Quat to component coordinate frame { Quatd q; Vec3d vx, vy, vz; q.fromAxisAngle(M_2PI/4, 1,0,0); q.toVectorX(vx); q.toVectorY(vy); q.toVectorZ(vz); assert(eq(vx, Vec3d(1, 0,0))); assert(eq(vy, Vec3d(0, 0,1))); assert(eq(vz, Vec3d(0,-1,0))); q.fromAxisAngle(M_2PI/4, 0,1,0); q.toVectorX(vx); q.toVectorY(vy); q.toVectorZ(vz); assert(eq(vx, Vec3d(0,0,-1))); assert(eq(vy, Vec3d(0,1, 0))); assert(eq(vz, Vec3d(1,0, 0))); q.fromAxisAngle(M_2PI/4, 0,0,1); q.toVectorX(vx); q.toVectorY(vy); q.toVectorZ(vz); assert(eq(vx, Vec3d(0,1,0))); assert(eq(vy, Vec3d(-1,0,0))); assert(eq(vz, Vec3d(0,0,1))); } // Test roundtrip matrix/quat conversions { double mat4[16]; Quatd b; q.fromEuler(1.,2.,3.); // a non-trival quat q.toMatrix(mat4); b = q.fromMatrix(mat4); assert( eq(q,b) || eq(q,b.conj()) ); q.toMatrixTransposed(mat4); b = q.fromMatrixTransposed(mat4); assert( eq(q,b) || eq(q,b.conj()) ); } // int smps = 100; // Quatd q1 = Quatd::fromAxisAngle(10, .707, .707, 0); // Quatd q2 = Quatd::fromAxisAngle(60, .707, 0, .707); // Quatd buf[smps]; // Quatd::slerp_buffer(q1, q2, buf, smps); // for (int i=0; i<smps; i++) { // double t, x, y, z; // buf[i].toAxisAngle(&t, &x, &y, &z); // //printf("%f %f %f %f\n", t, x, y, z); // } } // Simple Functions { const double pinf = INFINITY; // + infinity const double ninf =-INFINITY; // - infinity #define T(x) assert(al::abs(x) == (x<0?-x:x)); T(0.) T(1.) T(-1.) T(0) T(1) T(-1) #undef T #define T(x,y, r) assert(al::atLeast(x,y) == r); T(0.,1., 1.) T(+0.1,1., 1.) T(-0.1,1., -1.) #undef T #define T(x,y) assert(al::abs(al::atan2Fast(x,y) - std::atan2(x,y)) < 1e-5); T(1.,0.) T(1.,1.) T(0.,1.) T(-1.,1.) T(-1.,0.) T(-1.,-1.) T(0.,-1.) T(1.,-1.) #undef T #define T(x, y) assert(al::ceil(x) == y); T(0., 0.) T( 1., 1.) T( 1.2, 2.) T( 1.8, 2.) T( 1000.1, 1001.) T(-1.,-1.) T(-1.2,-1.) T(-1.8,-1.) T(-1000.1,-1000.) #undef T #define T(x, y) assert(al::ceilEven(x) == y); T(0, 0) T(1, 2) T(2, 2) T(3, 4) T(1001, 1002) #undef T #define T(x, y) assert(al::ceilPow2(x) == y); T(0, 0) T(1, 1) T(2, 2) T(3, 4) T(500, 512) T(999, 1024) #undef T #define T(x, y) assert(al::clip(x) == y); T(0., 0.) T(0.5, 0.5) T(1., 1.) T(1.2, 1.) T(-0.5, 0.) T(pinf, 1.) T(ninf, 0.) #undef T #define T(x, y) assert(al::clipS(x) == y); T(0., 0.) T(0.5, 0.5) T(1., 1.) T(1.2, 1.) T(-0.5, -0.5) T(-1., -1) T(-1.2, -1.) #undef T #define T(x,r) assert(al::even(x) == r); T(0,true) T(1,false) T(-2,true) #undef T #define T(x, y) assert(al::factorial(x) == y); T(0, 1) T(1, 1) T(2, 2*1) T(3, 3*2*1) T(4, 4*3*2*1) T(5, 5*4*3*2*1) T(6, 6*5*4*3*2*1) T(7, 7*6*5*4*3*2*1) T(8, 8*7*6*5*4*3*2*1) T(9, 9*8*7*6*5*4*3*2*1) T(10, 10*9*8*7*6*5*4*3*2*1) T(11, 11*10*9*8*7*6*5*4*3*2*1) T(12, 12*11*10*9*8*7*6*5*4*3*2*1) #undef T for(int i=0; i<=12; ++i){ assert( al::aeq(al::factorialSqrt(i), sqrt(al::factorial(i))) ); } #define T(x, y) assert(al::floor(x) == y); T(0., 0.) T( 1., 1.) T( 1.2, 1.) T( 1.8, 1.) T( 1000.1, 1000.) T(-1.,-1.) T(-1.2,-2.) T(-1.8,-2.) T(-1000.1,-1001.) #undef T #define T(x, y) assert(al::floorPow2(x) == y); T(0, 1) T(1, 1) T(2, 2) T(3, 2) T(513, 512) T(1090, 1024) #undef T #define T(x, y) assert(eq(al::fold(x), y)); T(0., 0.) T(0.5, 0.5) T(1., 1.) T(1.2, 0.8) T(-0.2, 0.2) T(2.2, 0.2) T(3.2, 0.8) T(4.2, 0.2) T(5.2, 0.8) #undef T #define T(x,y,r) assert(al::gcd(x,y) == r); T(7,7,7) T(7,4,1) T(8,4,4) #undef T #define T(x,y,r) assert(al::lcm(x,y)==r); T(7,3,21) T(8,4,8) T(3,1,3) #undef T #define T(x,y,r) assert(al::lessAbs(x,y)==r); T(0.1,1., true) T(-0.1,1., true) T(1.,1., false) T(-1.,1., false) #undef T #define T(x) assert(al::log2(1<<x) == x); T(0) T(1) T(2) T(3) T(4) T(29) T(30) T(31) #undef T #define T(x,y,r) assert(al::max(x,y)==r); T(0,0,0) T(0,1,1) T(1,0,1) T(-1,1,1) #undef T #define T(x,y,z,r) assert(al::max(x,y,z)==r); T(0,0,0, 0) T(0,1,2, 2) T(1,2,0, 2) T(2,1,0, 2) #undef T assert(al::mean(-1., 1.) == 0.); #define T(x,y,r) assert(al::min(x,y)==r); T(0,0,0) T(0,1,0) T(1,0,0) T(-1,1,-1) #undef T #define T(x,r) assert(al::odd(x) == r); T(0,false) T(1,true) T(-2,false) #undef T #define T(x) assert(al::pow2(x) == x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow2S(x) == x*al::abs(x)); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow3(x) == x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow3Abs(x) == al::abs(x*x*x)); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow4(x) == x*x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow5(x) == x*x*x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow6(x) == x*x*x*x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow8(x) == x*x*x*x*x*x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(al::pow16(x) == x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x); T(0) T(1) T(2) T(3) T(-1) T(-2) T(-3) #undef T #define T(x) assert(eq(al::pow64(x), x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x)); T(0.) T(1.) T(1.01) T(1.02) T(-1.) T(-1.01) T(-1.02) #undef T // #define T(x,r) assert(al::powerOf2(x) == r); // T(0, false) T(1, true) T(2, true) T(3, false) T(4, true) // #undef T #define T(x,y,r) assert(al::remainder(x,y) == r); T(7,7,0) T(7,1,0) T(7,4,3) T(7,3,1) T(14,3,2) #undef T #define T(x,y) assert(al::round(x) == y); T(0.f, 0.f) T(0.2f, 0.f) T(0.8f, 1.f) T(-0.2f, 0.f) T(-0.8f,-1.f) T(0.5f, 1.f) T(-0.5f, -1.f) T(0.0, 0.0) T(0.20, 0.0) T(0.80, 1.0) T(-0.20, 0.0) T(-0.80,-1.0) T(0.50, 1.0) T(-0.50, -1.0) #undef T #define T(x,y,r) assert(al::round(x,y) == r); T(0.0,0.1, 0.0) T(0.1,0.1, 0.1) T(0.15,0.1, 0.1) T(-0.15,0.1, -0.1) #undef T #define T(x,y,r) assert(al::roundAway(x,y) == r); T(0.0,0.1, 0.0) T(0.1,0.1, 0.1) T(0.15,0.1, 0.2) T(-0.15,0.1, -0.2) #undef T #define T(x,r) assert(al::sgn(x) == r); T(-0.1, -1.) T(0.1, 1.) T(0., 0.) #undef T #define T(x1,y1,x2,y2, r) assert(al::slope(x1,y1,x2,y2) == r); T(3.,3.,4.,4., 1.) T(3.,-3.,4.,-4., -1.) #undef T { double x=1, y=0; sort(x,y); assert(x==0 && y==1); sort(x,y); assert(x==0 && y==1); } #define T(x,y) assert(al::sumOfSquares(x) == y); T(1., 1.) T(2., 1*1+2*2) T(3., 1*1+2*2+3*3) T(4., 1*1+2*2+3*3+4*4) T(5., 1*1+2*2+3*3+4*4+5*5) #undef T #define T(x,r) assert(al::trailingZeroes(x) == r); T(0, 0) T(1, 0) T(2, 1) T(3, 0) T(4, 2) T(8, 3) T(9, 0) #undef T #define T(x,y) assert(al::trunc(x) == y); T(0.f, 0.f) T(0.2f, 0.f) T(0.8f, 0.f) T(-0.2f, 0.f) T(-0.8f, 0.f) T(0.5f, 0.f) T(-0.5f, 0.f) T(0.0, 0.0) T(0.20, 0.0) T(0.80, 0.0) T(-0.20, 0.0) T(-0.80, 0.0) T(0.50, 0.0) T(-0.50, 0.0) #undef T #define T(x,l,h,r) assert(al::within(x,l,h)==r); T(0,0,1, true) T(1,0,1, true) #undef T // printf("%.20g\n", wrap<double>(-32.0, 32.0, 0.)); // should be 0.0 // printf("%.20g\n", wrap<double>(-64.0, 32.0, 0.)); // should be 0.0 // printf("%.20g\n", wrap<double>(-1e-16, 32., 0.)); // should be 31.999999999999996447 #define T(x, y) assert(eq(al::wrap(x, 1., -1.), y)); T(0., 0.) T( 0.5, 0.5) T( 1.,-1.) T( 1.2,-0.8) T( 2.2, 0.2) T(-0.5,-0.5) T(-1.,-1.) T(-1.2, 0.8) T(-2.2,-0.2) #undef T } #define T(x, y) assert(eq(al::wrapPhase(x), y)); T(0., 0.) T( 1., 1.) T( M_PI,-M_PI) T( M_PI+1, -M_PI+1) T( 7*M_PI+1, -M_PI+1) T(-1.,-1.) T(-M_PI,-M_PI) T(-M_PI-1, M_PI-1) T(-7*M_PI+1, -M_PI+1) #undef T // Special Functions { struct F{ // Pl,-m(x) = (-1)^m (l-m)! / (l+m)! Pl,m(x) static double testLegendreP(int l, int m, double x){ switch(l){ case 0: return 1; case 1: switch(m){ case -1: return -1./(2) * testLegendreP(l,-m,x); case 0: return x; case 1: return -sqrt(1 - x*x); } break; case 2: switch(m){ case -2: return +1./(4*3*2*1) * testLegendreP(l,-m,x); case -1: return -1./( 3*2 ) * testLegendreP(l,-m,x); case 0: return 0.5 * (3*x*x - 1); case 1: return -3 * x * sqrt(1 - x*x); case 2: return 3 * (1 - x*x); } break; case 3: switch(m){ case -3: return -1. / (6*5*4*3*2*1) * testLegendreP(l,-m,x); case -2: return +1. / ( 5*4*3*2 ) * testLegendreP(l,-m,x); case -1: return -1. / ( 4*3 ) * testLegendreP(l,-m,x); case 0: return 0.5 * x * (5*x*x - 3); case 1: return 1.5 * (1 - 5*x*x) * sqrt(1-x*x); case 2: return 15 * x * (1 - x*x); case 3: return -15 * al::pow3(sqrt(1-x*x)); } break; } return 0; // undefined } static double testLaguerre(int n, int k, double x){ switch(n){ case 0: return 1; case 1: return -x + k + 1; case 2: return (1./2)*(x*x - 2*(k+2)*x + (k+2)*(k+1)); case 3: return (1./6)*(-x*x*x + 3*(k+3)*x*x - 3*(k+2)*(k+3)*x + (k+1)*(k+2)*(k+3)); default: return 0; } } }; const int M = 2000; // granularity of domain // test associated legendre for(int l=0; l<=3; ++l){ for(int m=0; m<=l; ++m){ for(int i=0; i<M; ++i){ double theta = double(i)*M_PI / M; double a = al::legendreP(l, m, theta); double b = F::testLegendreP(l, m, cos(theta)); // if(!al::aeq(a, b, 1<<16)){ if(!(al::abs(a - b)<1e-10)){ printf("\nP(%d, %d, %g) = %.16g (actual = %.16g)\n", l,m, cos(theta), a,b); assert(false); } }}} // test laguerre for(int n=0; n<=3; ++n){ for(int i=0; i<M; ++i){ double x =double(i)/M * 4; double a = al::laguerreL(n,1, x); double b = F::testLaguerre(n,1, x); if(!(al::abs(a - b)<1e-10)){ printf("\nL(%d, %g) = %.16g (actual = %.16g)\n", n, x, a,b); assert(false); } }} // TODO: spherical harmonics // for(int l=0; l<=SphericalHarmonic::L_MAX; ++l){ // for(int m=-l; m<=l; ++m){ // // double c = SphericalHarmonic::coef(l,m); // // double t = computeCoef(l,m); // // assert(c == t); // }} // // for(int j=0; j<M; ++j){ double ph = double(j)/M * M_PI; // for(int i=0; i<M; ++i){ double th = double(i)/M * M_2PI; // // }} } // Interval { Interval<double> i(0,1); assert(i.min()==0 && i.max()==1); i.min(2); assert(i.min()==1 && i.max()==2); i.max(0); assert(i.min()==0 && i.max()==1); i.endpoints(-1,1); assert(i.min()==-1 && i.max()==1); assert(i.center() ==0); assert(i.diameter() ==2); assert(i.radius() ==1); assert(i.proper()); i.endpoints(0,0); assert(i.degenerate()); i.centerDiameter(1, 4); assert(i.center() ==1); assert(i.diameter() ==4); assert(i.min()==-1 && i.max()==3); i.center(2); assert(i.min()==0 && i.max()==4); i.diameter(6); assert(i.min()==-1 && i.max()==5); i.endpoints(-1, 1); assert(i.toUnit(0) == 0.5); assert(Interval<int>(0,1) == Interval<int>(0,1)); assert(Interval<int>(0,2) != Interval<int>(0,1)); assert((Interval<int>(0,2) += Interval<int>(-1,2)) == Interval<int>(-1,4)); assert((Interval<int>(0,2) -= Interval<int>(-1,2)) == Interval<int>(-2,3)); } // Random { using namespace al::rnd; // Ensure uniqueness of sequences assert(seed() != seed()); { LinCon a,b; assert(a() != b()); // sequences are unique assert(a() != a()); // successive values are unique } { MulLinCon a,b; assert(a() != b()); // sequences are unique assert(a() != a()); // successive values are unique } { Tausworthe a,b; assert(a() != b()); // sequences are unique assert(a() != a()); // successive values are unique } Random<> r; int N = 1000000; for(int i=0; i<N; ++i){ float v=r.uniform(); assert( 0 <= v && v < 1); } for(int i=0; i<N; ++i){ int v=r.uniform(20, 0); assert( 0 <= v && v < 20); } for(int i=0; i<N; ++i){ int v=r.uniform(20, 10); assert( 10 <= v && v < 20); } for(int i=0; i<N; ++i){ int v=r.uniform(20,-10); assert(-10 <= v && v < 20); } for(int i=0; i<N; ++i){ float v=r.uniformS(); assert( -1 <= v && v < 1); } for(int i=0; i<N; ++i){ int v=r.uniformS(20); assert( -20 <= v && v < 20); } //for(int i=0; i<32; ++i) printf("% g ", r.uniformS()); //for(int i=0; i<32; ++i) printf("%d ", r.prob(0.1)); //for(int i=0; i<128; ++i) printf("% g\n", r.gaussian()); int arr[] = {0,1,2,3,4,5,6,7}; r.shuffle(arr, 8); //for(int i=0; i<8; ++i) printf("%d\n", arr[i]); //printf("\n"); // Test uniformity of random sequence { Random<> r; const int N=64; const int M=1000; const int eps=M*0.2; int histo[N] = {0}; for(int i=0; i<M*N; ++i){ int idx = r.uniform(N); ++histo[idx]; } for(int i=0; i<N; ++i){ int cnt = histo[i]; //printf("%d\n", cnt); assert(M-eps < cnt && cnt < M+eps); } } } { al::Plane<double> p; p.fromNormalAndPoint(Vec3d(1,0,0), Vec3d(1,1,1)); assert(p.distance(Vec3d(1,0,0)) == 0); assert(p.distance(Vec3d(0,1,0)) == -1); assert(p.distance(Vec3d(0,0,1)) == -1); assert(p.distance(Vec3d(2,0,0)) == 1); } { Frustumd f; f.fbl = Vec3d(-1,-1,-1); f.fbr = Vec3d( 1,-1,-1); f.ftl = Vec3d(-1, 1,-1); f.ftr = Vec3d( 1, 1,-1); f.nbl = Vec3d(-1,-1, 1); f.nbr = Vec3d( 1,-1, 1); f.ntl = Vec3d(-1, 1, 1); f.ntr = Vec3d( 1, 1, 1); f.computePlanes(); // Plane<double>& p = f.pl[Frustumd::LEFT]; // printf("%g %g %g\n", p.normal()[0], p.normal()[1], p.normal()[2]); // // Vec3d nrm = cross(f.fbl-f.nbl, f.ntl-f.nbl); // printf("%g %g %g\n", nrm[0], nrm[1], nrm[2]); // // printf("%g %g %g\n", (f.fbl-f.nbl)[0], (f.fbl-f.nbl)[1], (f.fbl-f.nbl)[2]); // printf("%g %g %g\n", (f.ntl-f.nbl)[0], (f.ntl-f.nbl)[1], (f.ntl-f.nbl)[2]); assert(f.testPoint(Vec3d(0,0,0)) == Frustumd::INSIDE); assert(f.testPoint(Vec3d(2,1,1)) == Frustumd::OUTSIDE); assert(f.testSphere(Vec3d(0,0,0), 0.9) == Frustumd::INSIDE); assert(f.testSphere(Vec3d(0,0,0), 1.1) == Frustumd::INTERSECT); assert(f.testSphere(Vec3d(2,2,2), 0.5) == Frustumd::OUTSIDE); } return 0; }
AlloSphere-Research-Group/AlloSystem
allocore/unitTests/utMath.cpp
C++
bsd-3-clause
23,752
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/app_list/views/contents_view.h" #include <algorithm> #include "ui/app_list/app_list_constants.h" #include "ui/app_list/pagination_model.h" #include "ui/app_list/views/app_list_main_view.h" #include "ui/app_list/views/apps_grid_view.h" #include "ui/app_list/views/search_result_list_view.h" #include "ui/base/events/event.h" #include "ui/views/animation/bounds_animator.h" #include "ui/views/view_model.h" #include "ui/views/view_model_utils.h" namespace app_list { namespace { const int kPreferredIconDimension = 48; // Indexes of interesting views in ViewModel of ContentsView. const int kIndexAppsGrid = 0; const int kIndexSearchResults = 1; const int kMinMouseWheelToSwitchPage = 20; const int kMinScrollToSwitchPage = 20; const int kMinHorizVelocityToSwitchPage = 800; const double kFinishTransitionThreshold = 0.33; // Helpers to get certain child view from |model|. AppsGridView* GetAppsGridView(views::ViewModel* model) { return static_cast<AppsGridView*>(model->view_at(kIndexAppsGrid)); } SearchResultListView* GetSearchResultListView(views::ViewModel* model) { return static_cast<SearchResultListView*>( model->view_at(kIndexSearchResults)); } } // namespace ContentsView::ContentsView(AppListMainView* app_list_main_view, PaginationModel* pagination_model) : show_state_(SHOW_APPS), pagination_model_(pagination_model), view_model_(new views::ViewModel), bounds_animator_(new views::BoundsAnimator(this)) { pagination_model_->SetTransitionDurations( kPageTransitionDurationInMs, kOverscrollPageTransitionDurationMs); apps_grid_view_ = new AppsGridView(app_list_main_view, pagination_model); apps_grid_view_->SetLayout(kPreferredIconDimension, kPreferredCols, kPreferredRows); AddChildView(apps_grid_view_); view_model_->Add(apps_grid_view_, kIndexAppsGrid); SearchResultListView* search_results_view = new SearchResultListView( app_list_main_view); AddChildView(search_results_view); view_model_->Add(search_results_view, kIndexSearchResults); } ContentsView::~ContentsView() { } void ContentsView::SetModel(AppListModel* model) { if (model) { GetAppsGridView(view_model_.get())->SetModel(model); GetSearchResultListView(view_model_.get())->SetResults(model->results()); } else { GetAppsGridView(view_model_.get())->SetModel(NULL); GetSearchResultListView(view_model_.get())->SetResults(NULL); } } void ContentsView::SetDragAndDropHostOfCurrentAppList( app_list::ApplicationDragAndDropHost* drag_and_drop_host) { apps_grid_view_->SetDragAndDropHostOfCurrentAppList(drag_and_drop_host); } void ContentsView::SetShowState(ShowState show_state) { if (show_state_ == show_state) return; show_state_ = show_state; ShowStateChanged(); } void ContentsView::ShowStateChanged() { if (show_state_ == SHOW_SEARCH_RESULTS) { // TODO(xiyuan): Highlight default match instead of the first. SearchResultListView* results_view = GetSearchResultListView(view_model_.get()); if (results_view->visible()) results_view->SetSelectedIndex(0); } AnimateToIdealBounds(); } void ContentsView::CalculateIdealBounds() { gfx::Rect rect(GetContentsBounds()); if (rect.IsEmpty()) return; gfx::Rect grid_frame(rect); gfx::Rect results_frame(rect); // Offsets apps grid and result list based on |show_state_|. // SearchResultListView is on top of apps grid. Visible view is left in // visible area and invisible ones is put out of the visible area. int contents_area_height = rect.height(); switch (show_state_) { case SHOW_APPS: results_frame.Offset(0, -contents_area_height); break; case SHOW_SEARCH_RESULTS: grid_frame.Offset(0, contents_area_height); break; default: NOTREACHED() << "Unknown show_state_ " << show_state_; break; } view_model_->set_ideal_bounds(kIndexAppsGrid, grid_frame); view_model_->set_ideal_bounds(kIndexSearchResults, results_frame); } void ContentsView::AnimateToIdealBounds() { CalculateIdealBounds(); for (int i = 0; i < view_model_->view_size(); ++i) { bounds_animator_->AnimateViewTo(view_model_->view_at(i), view_model_->ideal_bounds(i)); } } void ContentsView::ShowSearchResults(bool show) { SetShowState(show ? SHOW_SEARCH_RESULTS : SHOW_APPS); } void ContentsView::Prerender() { const int selected_page = std::max(0, pagination_model_->selected_page()); GetAppsGridView(view_model_.get())->Prerender(selected_page); } gfx::Size ContentsView::GetPreferredSize() { const gfx::Size grid_size = GetAppsGridView(view_model_.get())->GetPreferredSize(); const gfx::Size results_size = GetSearchResultListView(view_model_.get())->GetPreferredSize(); int width = std::max(grid_size.width(), results_size.width()); int height = std::max(grid_size.height(), results_size.height()); return gfx::Size(width, height); } void ContentsView::Layout() { CalculateIdealBounds(); views::ViewModelUtils::SetViewBoundsToIdealBounds(*view_model_); } bool ContentsView::OnKeyPressed(const ui::KeyEvent& event) { switch (show_state_) { case SHOW_APPS: return GetAppsGridView(view_model_.get())->OnKeyPressed(event); case SHOW_SEARCH_RESULTS: return GetSearchResultListView(view_model_.get())->OnKeyPressed(event); default: NOTREACHED() << "Unknown show state " << show_state_; } return false; } bool ContentsView::OnMouseWheel(const ui::MouseWheelEvent& event) { if (show_state_ != SHOW_APPS) return false; if (abs(event.y_offset()) > kMinMouseWheelToSwitchPage) { if (!pagination_model_->has_transition()) { pagination_model_->SelectPageRelative( event.y_offset() > 0 ? -1 : 1, true); } return true; } return false; } void ContentsView::OnGestureEvent(ui::GestureEvent* event) { if (show_state_ != SHOW_APPS) return; switch (event->type()) { case ui::ET_GESTURE_SCROLL_BEGIN: pagination_model_->StartScroll(); event->SetHandled(); return; case ui::ET_GESTURE_SCROLL_UPDATE: // event->details.scroll_x() > 0 means moving contents to right. That is, // transitioning to previous page. pagination_model_->UpdateScroll( event->details().scroll_x() / GetContentsBounds().width()); event->SetHandled(); return; case ui::ET_GESTURE_SCROLL_END: pagination_model_->EndScroll(pagination_model_-> transition().progress < kFinishTransitionThreshold); event->SetHandled(); return; case ui::ET_SCROLL_FLING_START: { pagination_model_->EndScroll(true); if (fabs(event->details().velocity_x()) > kMinHorizVelocityToSwitchPage) { pagination_model_->SelectPageRelative( event->details().velocity_x() < 0 ? 1 : -1, true); } event->SetHandled(); return; } default: break; } } void ContentsView::OnScrollEvent(ui::ScrollEvent* event) { if (show_state_ != SHOW_APPS || event->type() == ui::ET_SCROLL_FLING_CANCEL || abs(event->x_offset()) < kMinScrollToSwitchPage) { return; } if (!pagination_model_->has_transition()) { pagination_model_->SelectPageRelative(event->x_offset() > 0 ? -1 : 1, true); } event->SetHandled(); event->StopPropagation(); } } // namespace app_list
loopCM/chromium
ui/app_list/views/contents_view.cc
C++
bsd-3-clause
7,680
/* * Toady * Copyright 2013 Tom Frost */ // Dependencies var fs = require('fs'), config = require('config'), objUtil = require('../util/Object'), env = process.env.NODE_ENV || 'default'; const CONFIG_PATH = __dirname + "/../../config/" + env + "-mod_{mod}.json"; const CONFIG_PREFIX = 'mod_'; /** * Gets a path to save a config file specific to a mod. * * @param {String} modId The mod ID to associate with the file * @returns {String} A path appropriate for a config file for this mod ID */ function getPath(modId) { return CONFIG_PATH.replace('{mod}', modId); } /** * Gets a closure that will JSONify any enumerable properties on 'this' and * save it to a file unique to the given modId when called. * * @param {String} modId The modId for which to generate the closure. This * determines the filename to which the JSON will be saved. * @returns {Function} A closure which, when called, will save the enumerable * local properties of 'this' as JSON to a file. Arguments are: * - {Array} OPTIONAL: An array of top-level properties to save. If * omitted, every enumerable property will be saved. * - {Function} OPTIONAL: A callback function to be executed when * complete. Arguments: * - {Error} If an error occurred while saving the file. */ var getSaveFunc = function(modId) { return function(props, cb) { var serial = this, self = this; if (typeof props == 'function') { cb = props; props = null; } if (props) { serial = {}; props.forEach(function(key) { if (self.hasOwnProperty(key)) serial[key] = self[key]; }); } fs.writeFile(getPath(modId), JSON.stringify(serial, null, '\t'), cb); }; }; /** * Gets an object containing configuration values for a given mod, as well * as a non-enumerable non-writable function called "save" that will persist * any runtime changes to this config to a file. * * The configuration object is created in the following fashion: * 1: Start with any properties passed in with the 'defaults' argument. * Note that, when writing a Toady mod, this will be whatever has * been set to module.exports.configDefault (if anything) * 2: Deep-merge that with any values set in the mod_MODID section of the * default.yaml file (or, for multiple server configs, the SERVER.yaml * file). Conflicting properties will be overwritten. * 3: Deep-merge that with any properties that have been set using * config.save() (where 'config' is the object returned in the * callback of this function). Conflicting properties will be * overridden * * Using the returned config object is very straightforward. Just add whatever * you like: * * config.foo = "bar"; * config.hello = {world: "!"}; * * and save it! * * config.save(); * // OR: * config.save(function(err) { * if (err) console.log(err); * else console.log('Saved!'); * } * // OR: * config.save([prop1, prop2, prop4]); * * Anything saved will still exist when the bot is restarted or the module * is reloaded, thanks to step 3 above. * * Note that calling this function consecutive times with the same * modId/defaults will NOT return the same config object, and is not an * appropriate method for changing the config for a mod from a different mod. * If that functionality is necessary, it's strongly recommended to access the * 'config' property of a mod to read its values, but change those values only * with the Config mod's setConfig() function. * * In the Toady framework, the config object returned by this function is * passed directly to each mod when the mod is loaded. * * @param {String} modId The mod ID whose configuration should be loaded * @param {Object|null} defaults An object containing default properties * to be set if neither the bot config or the mod config file has * those properties set. * @param {Function} cb A callback function to be executed on completion. * Arguments provided are: * - {Error} An error object, if an error occurred * - {Object} An object containing all this mod's config properties, * as well as a save([cb]) function to save any future changes. */ function getConfig(modId, defaults, cb) { var conf = objUtil.deepMerge(defaults || {}, config[CONFIG_PREFIX + modId] || {}); getModConfigFile(modId, function(err, modFile) { conf = objUtil.deepMerge(conf, modFile); Object.defineProperty(conf, 'save', { value: getSaveFunc(modId).bind(conf) }); cb(null, conf); }); } /** * Gets the contents of the mod's JSON-formatted config file, parses it, and * returns it in a callback. If the config file does not (yet) exist, an * empty object will be passed back instead. * * Note that, unlike {@link #getConfig}, the object produced by this function * will NOT be merged from any other config source and will NOT contain a * save() function to persist changes. The contents of the mod config file * will be only what the mod itself was responsible for saving manually. * * @param {String} modId The ID of the mod whose file should be loaded * @param {Function} cb A callback function to be executed on completion. Args: * - {Error} An error object, if an error occurred. Most likely errors * include issues reading the file (excepting the file not existing) * and inability to parse the file's JSON. * - {Object} The parsed config object stored in the file */ function getModConfigFile(modId, cb) { fs.readFile(getPath(modId), function(err, json) { if (err && err.code != 'ENOENT') cb(err); else { var success = true, savedConf = {}; if (json) { try { savedConf = JSON.parse(json); } catch (e) { success = false; cb(e); } } if (success) cb(null, savedConf || {}); } }); } module.exports = { CONFIG_PATH: CONFIG_PATH, CONFIG_PREFIX: CONFIG_PREFIX, getConfig: getConfig, getModConfigFile: getModConfigFile };
TomFrost/Toady
app/modmanager/ModConfig.js
JavaScript
bsd-3-clause
6,115
using System; using Microsoft.Owin.Security; using Microsoft.Owin.Security.Infrastructure; using Newtonsoft.Json.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Security.Claims; using System.Threading.Tasks; namespace Thinktecture.IdentityServer.v3.AccessTokenValidation { public class ReferenceTokenProvider : AuthenticationTokenProvider { private HttpClient _client; private string _tokenValidationEndpoint; private string _authenticationType; public ReferenceTokenProvider(string tokenValidationEndpoint, string authenticationType) { _tokenValidationEndpoint = tokenValidationEndpoint + "?token={0}"; _client = new HttpClient(); _authenticationType = authenticationType; } public override async Task ReceiveAsync(AuthenticationTokenReceiveContext context) { var url = string.Format(_tokenValidationEndpoint, context.Token); var response = await _client.GetAsync(url); if (response.StatusCode != HttpStatusCode.OK) { return; } var json = JArray.Parse(await response.Content.ReadAsStringAsync()); var claims = new List<Claim>(); foreach (var item in json) { claims.Add(new Claim(item["Type"].ToString(), item["Value"].ToString())); } context.SetTicket(new AuthenticationTicket(new ClaimsIdentity(claims, _authenticationType), new AuthenticationProperties())); } } }
fruffin/Thinktecture.IdentityServer.v3.EntityFramework
Source/IdSrvReferenceTokenValidation/ReferenceTokens/ReferenceTokenProvider.cs
C#
bsd-3-clause
1,606
# -*- coding: utf8 -*- """ This is part of shot detector. Produced by w495 at 2017.05.04 04:18:27 """ from __future__ import absolute_import, division, print_function import datetime import logging import av import six # noinspection PyUnresolvedReferences from av.container import InputContainer from shot_detector.objects import ( BaseFrame, FramePosition ) from shot_detector.utils.common import get_objdata_dict from shot_detector.utils.log_meta import ( LogMeta, ignore_log_meta, should_be_overloaded ) class BaseHandler(six.with_metaclass(LogMeta)): """ Finite State Machine for video handling. Works with video at law level. Splits video into frames. You should implement `handle_frame` method. """ __logger = logging.getLogger(__name__) def handle_video(self, input_uri='', format_name=None, **kwargs): """ Runs video handling :param str input_uri: file name of input video or path to resource for example `http://localhost:8090/live.flv` You can use any string, that can be accepted by input ffmpeg-parameter. For example: * 'udp://127.0.0.1:1234'; * 'tcp://localhost:1234?listen'; * 'http://localhost:8090/live.flv'. :param str format_name: name of video format. Use it for hardware devices. :param dict kwargs: any options for consecutive methods, ignores it and pass it through :return: """ # noinspection PyUnresolvedReferences video_container = av.open( file=input_uri, format=format_name, ) logger = self.__logger if logger.isEnabledFor(logging.INFO): self.log_tree( logger, get_objdata_dict( video_container, ext_classes_keys=['format', 'layout'] ) ) result = self.handle_video_container(video_container, **kwargs) return result # noinspection PyUnusedLocal @ignore_log_meta def log_tree(self, logger, value, level=1, **_): """ :param logging.Logger logger: :param Any value: :param int level: :param dict _: any options for consecutive methods, ignores it and pass it through :return: """ space = ' ⇾ ' * level for key, value in six.iteritems(value): if isinstance(value, dict): type_ = value.get('type') if type_: key += " [%s]" % str(type_) name = value.get('name') if name: key += " {%s} " % str(name) long_name = value.get('long_name') if long_name: key += " «%s»" % str(long_name) logger.info("%s %s:" % (space, key)) self.log_tree(logger, value, level=level + 1) else: logger.info("%s %s: %s" % (space, key, value)) def handle_video_container(self, video_container, **kwargs): """ :param av.container.InputContainer video_container: input video container, in terms of av open video file or stream. :param kwargs: any options for consecutive methods, ignores it and pass it through. :return: """ assert isinstance(video_container, InputContainer) packet_seq = self.packets(video_container, **kwargs) packet_seq = self.filter_packets(packet_seq, **kwargs) frame_seq = self.frames(packet_seq, **kwargs) filtered_seq = self.filter_frames(frame_seq, **kwargs) handled_seq = self.handle_frames(filtered_seq, **kwargs) list(handled_seq) return None @staticmethod def packets(video_container, stream_seq=None, **_): """ :param av.container.InputContainer video_container: :param stream_seq: :param _: :return: """ if stream_seq: stream_seq = tuple(stream_seq) return video_container.demux(streams=stream_seq) @should_be_overloaded def filter_packets(self, packet_seq, **_): """ :param collections.Iterable packet_seq: :param dict _: ignores it. :return: """ return packet_seq @staticmethod def packet_frame_seqs(packet_seq, **_): """ :param collections.Iterable packet_seq: :param dict _: ignores it. :return: """ for packet in packet_seq: decoded = packet.decode() yield iter(decoded) def frames(self, packet_seq, **kwargs): """ :param collections.Iterable packet_seq: :param dict kwargs: any options for consecutive methods, ignores it and pass it through. :return: """ packet_frame_seqs = self.packet_frame_seqs(packet_seq, **kwargs) global_number = 0 for packet_number, frame_seq in enumerate(packet_frame_seqs): for frame_number, source_frame in enumerate(frame_seq): position = FramePosition( global_number=global_number, frame_number=frame_number, packet_number=packet_number, ) frame = self.frame( source=source_frame, position=position, ) yield frame global_number += 1 def frame(self, source=None, position=None): """ :param source: :param position: :return: """ frame = BaseFrame( av_frame=source, position=position, ) return frame @should_be_overloaded def filter_frames(self, frame_seq, **_): """ :param collections.Iterable frame_seq: :param dict _: ignores it. :return: """ return frame_seq @should_be_overloaded def handle_frames(self, frame_seq, **_): """ :param collections.Iterable frame_seq: :param dict _: ignores it.. :return: """ return frame_seq @staticmethod def limit_seq(sequence, first=0, last=10, as_stream=False, **_): """ :param sequence: :param float first: :param float last: :param bool as_stream: :param _: :return: """ at_start = None for unit in sequence: BaseHandler.__logger.debug('unit = %s', unit) current = float(unit.time) if as_stream: if at_start is None: at_start = current current = current - at_start if last <= current: sequence.close() if first <= current: yield unit def log_seq(self, sequence, fmt="[{delta_time}] {item}", logger=None, log=None, **kwargs): """ Prints sequence item by item :param sequence: :param fmt: :param logger: :param log: :param kwargs: :return: """ start_time = datetime.datetime.now() if logger is None: logger = logging.getLogger(__name__) if log is None: log = logger.info if fmt is None: fmt = "WRONG FORMAT …" for item in sequence: now_time = datetime.datetime.now() delta_time = now_time - start_time item_dict = kwargs for attr in dir(item): if not attr.startswith('__'): item_dict['item.{}'.format(attr)] \ = getattr(item, attr) log(fmt.format( delta_time=delta_time, self=self, item=item, **item_dict )) yield item
w495/python-video-shot-detector
shot_detector/handlers/base_handler.py
Python
bsd-3-clause
8,236
(function(){ var routineServices = angular.module('routineApp.routineServices', []); var domain = 'http://127.0.0.1:8000/'; routineServices.factory("RoutineService", ['$http', function($http){ var routine = {}; routine.getRoutines = function() { return $http.get(domain + 'routines/routines/'); }; routine.getRoutine = function(routineId) { return $http.get(domain + 'routines/routines/' + routineId + '/'); }; routine.createRoutine = function(routineJson) { return $http.post(domain + 'routines/routines/', routineJson); }; routine.editRoutine = function(routineJson, routineId) { return $http.put(domain + 'routines/routines/' + routineId +'/', routineJson); }; routine.deleteRoutine = function(routineId) { return $http.delete(domain + 'routines/routines/' + routineId + '/'); }; routine.getExercises = function(routineId) { return $http.get(domain + 'routines/exercises/', { params: { routineId: routineId } }); } routine.createExercises = function(exercisesJson) { return $http.post(domain + 'routines/exercises/create-many/', exercisesJson); }; routine.createExercise = function(exerciseJson) { return $http.post(domain + 'routines/exercises/', exerciseJson); }; routine.editExercise = function(exerciseJson, exerciseId) { return $http.put(domain + 'routines/exercises/' + exerciseId + '/', exerciseJson); }; routine.deleteExercise = function(exerciseId) { return $http.delete(domain + 'routines/exercises/' + exerciseId + '/'); }; return routine; }]); })();
Kayra/Thunder
client/js/routines/routineServices.js
JavaScript
bsd-3-clause
1,858
package me.hatter.tools.markdowndocs.template; import java.io.File; import me.hatter.tools.commons.file.FileUtil; import me.hatter.tools.commons.string.StringUtil; import me.hatter.tools.markdowndocs.config.Config; import me.hatter.tools.markdowndocs.config.Configs; import me.hatter.tools.markdowndocs.config.GlobalVars; import com.alibaba.fastjson.JSONObject; public class ConfigParser { private static Config globalConfig; synchronized public static Config getGlobalConfig() { if (globalConfig != null) { return globalConfig; } File config = new File(GlobalVars.getBasePath(), "config.json"); globalConfig = parseConfig(config); return globalConfig; } public static Config readConfig(String dirName) { if (StringUtil.isEmpty(dirName)) { return Configs.mergeConfig(getGlobalConfig(), Configs.DEFAULT); } File config = new File(new File(GlobalVars.getBasePath(), dirName), "config.json"); Config conf = parseConfig(config); return Configs.mergeConfig(conf, getGlobalConfig(), Configs.DEFAULT); } private static Config parseConfig(File config) { try { if ((config != null) && (config.exists())) { return JSONObject.parseObject(FileUtil.readFileToString(config), Config.class); } else { return new Config(); } } catch (Exception e) { throw new IllegalStateException("Parse config failed: " + config, e); } } }
KingBowser/hatter-source-code
markdowndocs/src/main/java/me/hatter/tools/markdowndocs/template/ConfigParser.java
Java
bsd-3-clause
1,553
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'formsfive.views.example', name='example'), )
iamjstates/django-formsfive
urls.py
Python
bsd-3-clause
268
<? // Note: PHP Examples use the Heello PHP Client library // https://github.com/Heello/heello-php require_once dirname(dirname(dirname(__FILE__))) . '/Heello.php'; // You can get an Application Key and Secret by visiting: // http://developer.heello.com/apps $api_application_key = "APPLICATION_KEY"; $api_application_secret = "APPLICATION_SECRET"; // You can get an access token and refresh token by implementing the auth // flow described at (or use the demo provided): // http://developer.heello.com/guides/authentication $access_token = "ACCESS_TOKEN"; $api = new Heello\Client($api_application_key, $api_application_secret); Heello\Client::config()->set_access_token($access_token); try{ // Special me() API Call to get auth'd user information $user = $api->users->me(); $id = $user->id; $name = $user->name; print_r($user); } catch (Exception $e){ print $e->getMessage(); }
Heello/heello-php
examples/auth/users.me.php
PHP
bsd-3-clause
944
<?php // This module takes over mailer settings; to enable outbound sends, add the following // CaptureMailer::$outbound_send = true // to stop capturing emails, add // CaptureMailer::$capture_emails = false Email::set_mailer(new CaptureMailer());
silverstripe-australia/scavenger-hunt
mailcapture/_config.php
PHP
bsd-3-clause
251
import os from tempfile import NamedTemporaryFile from numpy.testing import assert_array_equal import pandas as pd import oddt import oddt.pandas as opd test_data_dir = os.path.dirname(os.path.abspath(__file__)) input_fname = os.path.join(test_data_dir, 'data/dude/xiap/actives_docked.sdf') def test_classes(): """ Test oddt.pandas classes behavior """ df = opd.read_sdf(input_fname) # Check classes inheritance assert isinstance(df, opd.ChemDataFrame) assert isinstance(df, pd.DataFrame) assert isinstance(df['mol'], opd.ChemSeries) assert isinstance(df['mol'], pd.Series) assert isinstance(df, pd.DataFrame) # Check custom metadata assert hasattr(df, '_molecule_column') assert hasattr(df[['mol']], '_molecule_column') assert df._molecule_column == df[['mol']]._molecule_column # Check if slicing perserve classes assert isinstance(df.head(1), opd.ChemDataFrame) assert isinstance(df['mol'].head(1), opd.ChemSeries) def test_reading(): """ Test reading molecule files to ChemDataFrame """ df = opd.read_sdf(input_fname) # Check dimensions assert len(df) == 100 assert len(df.columns) == 15 df = opd.read_sdf(input_fname, smiles_column='smi_col') assert 'smi_col' in df.columns df = opd.read_sdf(input_fname, molecule_column=None, molecule_name_column=None, usecols=['name']) assert 'mol' not in df.columns assert 'mol_name' not in df.columns assert len(df.columns) == 1 df = opd.read_sdf(input_fname, usecols=['name', 'uniprot_id', 'act']) assert len(df.columns) == 5 # 3 from use_cols + 1 'mol' + 1 'mol_name' assert 'uniprot_id' in df.columns assert 'smi_col' not in df.columns # Chunk reading chunks = [] for chunk in opd.read_sdf(input_fname, chunksize=10): assert len(chunk) == 10 chunks.append(chunk) assert len(chunks) == 10 df = pd.concat(chunks) # Check dimensions assert len(df) == 100 def test_substruct_sim_search(): df = opd.read_sdf(input_fname).head(10) query = oddt.toolkit.readstring('smi', 'C(=O)(N1C[C@H](C[C@H]1C(=O)N[C@@H]1CCCc2c1cccc2)Oc1ccccc1)[C@@H](NC(=O)[C@H](C)NC)C1CCCCC1') ge_answear = [True, True, True, False, True, False, False, False, False, False] assert (df.mol >= query).tolist() == ge_answear assert (query <= df.mol).tolist() == ge_answear le_answear = [True, True, True, True, True, True, False, False, False, True] assert (df.mol <= query).tolist() == le_answear assert (query >= df.mol).tolist() == le_answear sim = df.mol.calcfp() | query.calcfp() assert sim.dtype == 'float64' def test_mol2(): """Writing and reading of mol2 fils to/from ChemDataFrame""" if oddt.toolkit.backend == 'ob': df = opd.read_sdf(input_fname) with NamedTemporaryFile(suffix='.mol2') as f: df.to_mol2(f.name) df2 = opd.read_mol2(f.name) assert df.shape == df2.shape chunks = [] for chunk in opd.read_mol2(f.name, chunksize=10): assert len(chunk) == 10 chunks.append(chunk) df3 = pd.concat(chunks) assert df.shape == df3.shape with NamedTemporaryFile(suffix='.mol2') as f: df.to_mol2(f.name, columns=['name', 'uniprot_id', 'act']) df2 = opd.read_mol2(f.name) assert len(df2.columns) == 5 def test_sdf(): """Writing ChemDataFrame to SDF molecular files""" df = opd.read_sdf(input_fname) with NamedTemporaryFile(suffix='.sdf') as f: df.to_sdf(f.name) df2 = opd.read_sdf(f.name) assert_array_equal(df.columns.sort_values(), df2.columns.sort_values()) with NamedTemporaryFile(suffix='.sdf') as f: df.to_sdf(f.name, columns=['name', 'uniprot_id', 'act']) df2 = opd.read_sdf(f.name) assert len(df2.columns) == 5 def test_csv(): df = opd.read_sdf(input_fname, columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act']) df['act'] = df['act'].astype(float) df['name'] = df['name'].astype(int) with NamedTemporaryFile(suffix='.csv', mode='w+') as f: for str_buff in (f, f.name): df.to_csv(str_buff, index=False) f.seek(0) df2 = opd.read_csv(f.name, smiles_to_molecule='mol', molecule_column='mol') assert df.shape == df2.shape assert df.columns.tolist() == df2.columns.tolist() assert df.dtypes.tolist() == df2.dtypes.tolist() with NamedTemporaryFile(suffix='.csv', mode='w+') as f: for str_buff in (f, f.name): df.to_csv(str_buff, index=False, columns=['name', 'act']) f.seek(0) df2 = pd.read_csv(f.name) assert df[['name', 'act']].shape == df2.shape assert df[['name', 'act']].columns.tolist() == df2.columns.tolist() assert df[['name', 'act']].dtypes.tolist() == df2.dtypes.tolist() def test_excel(): # just check if it doesn't fail df = opd.read_sdf(input_fname, columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act']) df = df.head(10) # it's slow so use first 10 mols df['act'] = df['act'].astype(float) df['name'] = df['name'].astype(int) with NamedTemporaryFile(suffix='.xls', mode='w') as f: df.to_excel(f.name, index=False) writer = pd.ExcelWriter(f.name, engine='xlsxwriter') df.to_excel(writer, index=False) def test_chemseries_writers(): df = opd.read_sdf(input_fname, columns=['mol', 'name', 'chembl_id', 'dude_smiles', 'act']) mols = df['mol'] # SMILES with NamedTemporaryFile(suffix='.ism', mode='w') as f: mols.to_smiles(f) for mol in oddt.toolkit.readfile('smi', f.name): assert isinstance(mol, oddt.toolkit.Molecule) # SDF with NamedTemporaryFile(suffix='.sdf', mode='w') as f: mols.to_sdf(f) for mol in oddt.toolkit.readfile('sdf', f.name): assert isinstance(mol, oddt.toolkit.Molecule) # mol2 if oddt.toolkit.backend == 'ob': with NamedTemporaryFile(suffix='.mol2', mode='w') as f: mols.to_mol2(f) for mol in oddt.toolkit.readfile('mol2', f.name): assert isinstance(mol, oddt.toolkit.Molecule) def test_ipython(): """iPython Notebook molecule rendering in SVG""" df = opd.read_sdf(input_fname) # mock ipython oddt.toolkit.ipython_notebook = True # png oddt.toolkit.image_backend = 'png' html = df.head(1).to_html() assert '<img src="data:image/png;base64,' in html # svg oddt.toolkit.image_backend = 'svg' html = df.head(1).to_html() assert '<svg' in html oddt.toolkit.ipython_notebook = False
oddt/oddt
tests/test_pandas.py
Python
bsd-3-clause
6,915
<?php use frontend\widgets\banner\BannerWidget; use frontend\widgets\post\PostWidget; use frontend\widgets\chat\ChatWidget; use frontend\widgets\hot\HotWidget; use frontend\widgets\tag\TagWidget; $this->title = '博客-首页'; ?> <div class="row"> <div class="col-lg-9"> <!--图片轮播--> <?= BannerWidget::widget() ?> <!--文章列表--> <?= PostWidget::widget() ?> </div> <div class="col-lg-3"> <!-- 留言板组件 --> <?= ChatWidget::widget() ?> <!-- 热门文章组件 --> <?= HotWidget::widget() ?> <!-- 标签云组件 --> <?= TagWidget::widget() ?> </div> </div>
wfnking/yii2_blog
frontend/views/site/index.php
PHP
bsd-3-clause
671
jQuery(document).ready(function(){ module("TiddlyWiki core"); test("RGB tests", function() { expect(4); var actual = new RGB(1,0,1).toString(); var expected = "#ff00ff"; ok(actual==expected,'RGB(1,0,1) is the same as #ff00ff'); actual = new RGB("#f00").toString(); expected = "#ff0000"; ok(actual==expected,'#ff0000 is the same as #f00'); actual = new RGB("#123").toString(); console.log("actual",actual); expected = "#112233"; ok(actual==expected,'#112233 is the same as #123'); actual = new RGB("#abc").toString(); console.log("actual",actual); expected = "#aabbcc"; ok(actual==expected,'#aabbcc is the same as #abc'); actual = new RGB("#123456").toString(); expected = "#123456"; ok(actual==expected,'#123456 is the same as #123456'); actual = new RGB("rgb(0,255,0)").toString(); expected = "#00ff00"; ok(actual==expected,'RGB object created from rgb value > toString method gives hex'); actual = new RGB("rgb(120,0,0)").mix(new RGB("#00ff00"),0.5).toString(); //120 + (0 - 120) *0.5 and 0 + (255-0) * 0.5 expected = new RGB("rgb(60,127,0)").toString(); ok(actual==expected,'RGB mix function proportion 0.5'); }); });
TeravoxelTwoPhotonTomography/nd
doc/node_modules/tiddlywiki/editions/tw2/source/tiddlywiki/test/js/RGB.js
JavaScript
bsd-3-clause
1,187
from django.conf import settings from django.conf.urls import url from django.contrib.staticfiles.templatetags.staticfiles import static from django.utils.html import format_html, format_html_join from wagtail.wagtailcore import hooks from .views import get_full_image_url @hooks.register('register_admin_urls') def register_admin_urls(): return [ url(r'^full_image/(\d+)/$', get_full_image_url), ] @hooks.register('insert_editor_css') def editor_css(): return format_html( '<link rel="stylesheet" href="{}">', static('annotated-image/annotated-image.css') ) @hooks.register('insert_editor_js') def editor_js(): js_files = [ 'annotated-image/annotated-image-handler.js', 'annotated-image/jquery.annotate.js', ] return format_html_join('\n', '<script src="{0}{1}"></script>', ((settings.STATIC_URL, filename) for filename in js_files) )
takeflight/wagtailannotatedimage
wagtailannotatedimage/wagtail_hooks.py
Python
bsd-3-clause
923
package net.njcull.collections; import org.junit.Assert; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Tests for ImmutableSortedArrayMap. * * @author run2000 * @version 8/01/2016. */ public final class TestImmutableSortedArrayBiMap { @Test public void testEmptyMap() throws Exception { ImmutableSortedArrayMap<String, String> test = ImmutableSortedArrayMap.<String, String>builder().asBiMap().build(); Assert.assertFalse(test.containsKey("3")); Assert.assertSame(test, ImmutableSortedArrayMap.emptyMap()); Assert.assertTrue(test.isEmpty()); Assert.assertEquals(0, test.size()); Assert.assertEquals("{}", test.toString()); Assert.assertEquals(0, test.hashCode()); ArrayBackedCollection<String> keys = test.keySet(); Assert.assertEquals("[]", keys.toString()); Assert.assertEquals(0, keys.hashCode()); Assert.assertEquals(1, keys.asList().hashCode()); } @Test public void testCollector() throws Exception { HashMap<String, String> map1 = new HashMap<>(); map1.put("a", "ac"); map1.put("b", "bc"); map1.put("c", "cc"); map1.put("d", "dx"); map1.put("e", "ec"); map1.put("f", "fc"); map1.put("g", "gc"); map1.put("m", "0ma"); map1.put("n", "0na"); map1.put("o", "Zoa"); map1.put("p", "Zpa"); ImmutableSortedArrayMap<String, String> result = map1.entrySet().stream() .filter(p -> p.getValue().charAt(1) != 'x') .collect(Collectors.toImmutableSortedArrayBiMap()); Assert.assertEquals(10, result.size()); Assert.assertEquals("{a=ac, b=bc, c=cc, e=ec, f=fc, g=gc, m=0ma, n=0na, o=Zoa, p=Zpa}", result.toString()); Assert.assertEquals(0, result.indexOfKey("a")); Assert.assertEquals(1, result.indexOfKey("b")); Assert.assertEquals(2, result.indexOfKey("c")); Assert.assertEquals(3, result.indexOfKey("e")); Assert.assertEquals(4, result.indexOfKey("f")); Assert.assertEquals(5, result.indexOfKey("g")); Assert.assertEquals(6, result.indexOfKey("m")); Assert.assertEquals(7, result.indexOfKey("n")); Assert.assertEquals(8, result.indexOfKey("o")); Assert.assertEquals(9, result.indexOfKey("p")); Assert.assertEquals(-1, result.indexOfKey("0")); Assert.assertTrue(result.containsKey("a")); Assert.assertTrue(result.containsKey("b")); Assert.assertTrue(result.containsKey("c")); Assert.assertFalse(result.containsKey("d")); Assert.assertTrue(result.containsKey("e")); Assert.assertTrue(result.containsKey("f")); Assert.assertTrue(result.containsKey("g")); Assert.assertTrue(result.containsKey("m")); Assert.assertTrue(result.containsKey("n")); Assert.assertTrue(result.containsKey("o")); Assert.assertTrue(result.containsKey("p")); Assert.assertFalse(result.containsKey("0")); Assert.assertEquals("a", result.keyAt(0)); Assert.assertEquals("b", result.keyAt(1)); Assert.assertEquals("c", result.keyAt(2)); Assert.assertEquals("e", result.keyAt(3)); Assert.assertEquals("f", result.keyAt(4)); Assert.assertEquals("g", result.keyAt(5)); Assert.assertEquals("m", result.keyAt(6)); Assert.assertEquals("n", result.keyAt(7)); Assert.assertEquals("o", result.keyAt(8)); Assert.assertEquals("p", result.keyAt(9)); Assert.assertTrue(result.containsValue("ac")); Assert.assertTrue(result.containsValue("bc")); Assert.assertTrue(result.containsValue("cc")); Assert.assertFalse(result.containsValue("dx")); Assert.assertTrue(result.containsValue("ec")); Assert.assertTrue(result.containsValue("fc")); Assert.assertTrue(result.containsValue("gc")); Assert.assertTrue(result.containsValue("0ma")); Assert.assertTrue(result.containsValue("0na")); Assert.assertTrue(result.containsValue("Zoa")); Assert.assertTrue(result.containsValue("Zpa")); Assert.assertFalse(result.containsValue("0")); Assert.assertEquals(0, result.indexOfValue("ac")); Assert.assertEquals(1, result.indexOfValue("bc")); Assert.assertEquals(2, result.indexOfValue("cc")); Assert.assertEquals(3, result.indexOfValue("ec")); Assert.assertEquals(4, result.indexOfValue("fc")); Assert.assertEquals(5, result.indexOfValue("gc")); Assert.assertEquals(6, result.indexOfValue("0ma")); Assert.assertEquals(7, result.indexOfValue("0na")); Assert.assertEquals(8, result.indexOfValue("Zoa")); Assert.assertEquals(9, result.indexOfValue("Zpa")); Assert.assertEquals(-1, result.indexOfValue("0")); Assert.assertEquals("ac", result.valueAt(0)); Assert.assertEquals("bc", result.valueAt(1)); Assert.assertEquals("cc", result.valueAt(2)); Assert.assertEquals("ec", result.valueAt(3)); Assert.assertEquals("fc", result.valueAt(4)); Assert.assertEquals("gc", result.valueAt(5)); Assert.assertEquals("0ma", result.valueAt(6)); Assert.assertEquals("0na", result.valueAt(7)); Assert.assertEquals("Zoa", result.valueAt(8)); Assert.assertEquals("Zpa", result.valueAt(9)); Assert.assertEquals(0, result.indexOfValueSorted("0ma")); Assert.assertEquals(1, result.indexOfValueSorted("0na")); Assert.assertEquals(2, result.indexOfValueSorted("Zoa")); Assert.assertEquals(3, result.indexOfValueSorted("Zpa")); Assert.assertEquals(4, result.indexOfValueSorted("ac")); Assert.assertEquals(5, result.indexOfValueSorted("bc")); Assert.assertEquals(6, result.indexOfValueSorted("cc")); Assert.assertEquals(7, result.indexOfValueSorted("ec")); Assert.assertEquals(8, result.indexOfValueSorted("fc")); Assert.assertEquals(9, result.indexOfValueSorted("gc")); Assert.assertEquals(-1, result.indexOfValueSorted("0")); Assert.assertEquals("0ma", result.sortedValueAt(0)); Assert.assertEquals("0na", result.sortedValueAt(1)); Assert.assertEquals("Zoa", result.sortedValueAt(2)); Assert.assertEquals("Zpa", result.sortedValueAt(3)); Assert.assertEquals("ac", result.sortedValueAt(4)); Assert.assertEquals("bc", result.sortedValueAt(5)); Assert.assertEquals("cc", result.sortedValueAt(6)); Assert.assertEquals("ec", result.sortedValueAt(7)); Assert.assertEquals("fc", result.sortedValueAt(8)); Assert.assertEquals("gc", result.sortedValueAt(9)); Map.Entry<String, String> entry = result.entryAt(0); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); entry = result.entryAt(1); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); entry = result.entryAt(2); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); entry = result.entryAt(3); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); entry = result.entryAt(4); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); entry = result.entryAt(5); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); entry = result.entryAt(6); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); entry = result.entryAt(7); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); entry = result.entryAt(8); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); entry = result.entryAt(9); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertEquals("ac", result.get("a")); Assert.assertEquals("bc", result.get("b")); Assert.assertEquals("cc", result.get("c")); Assert.assertEquals("ec", result.get("e")); Assert.assertEquals("fc", result.get("f")); Assert.assertEquals("gc", result.get("g")); Assert.assertEquals("0ma", result.get("m")); Assert.assertEquals("0na", result.get("n")); Assert.assertEquals("Zoa", result.get("o")); Assert.assertEquals("Zpa", result.get("p")); Assert.assertSame(null, result.get("0")); ArrayBackedSet<Map.Entry<String, String>> entrySet = result.entrySet(); Iterator<Map.Entry<String, String>> entryIt = entrySet.iterator(); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertFalse(entryIt.hasNext()); entrySet = result.entrySetByValue(); entryIt = entrySet.iterator(); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); Assert.assertFalse(entryIt.hasNext()); } @Test public void testSplitter() throws Exception { HashMap<String, String> map1 = new HashMap<>(); map1.put("a", "ac"); map1.put("b", "bc"); map1.put("c", "cc"); map1.put("d", "dx"); map1.put("e", "ec"); map1.put("f", "fc"); map1.put("g", "gc"); map1.put("m", "0ma"); map1.put("n", "0na"); map1.put("o", "Zoa"); map1.put("p", "Zpa"); ImmutableSortedArrayMap<String, String> result1 = map1.entrySet().stream() .filter(p -> p.getValue().charAt(1) != 'x') .collect(Collectors.toImmutableSortedArrayBiMap()); // Stream the results from one map, collect back to another ImmutableSortedArrayMap<String, String> result2 = result1.entrySet().parallelStream() .collect(Collectors.toImmutableSortedArrayBiMap()); Assert.assertEquals(10, result2.size()); Assert.assertEquals("{a=ac, b=bc, c=cc, e=ec, f=fc, g=gc, m=0ma, n=0na, o=Zoa, p=Zpa}", result2.toString()); Assert.assertEquals(0, result2.indexOfKey("a")); Assert.assertEquals(1, result2.indexOfKey("b")); Assert.assertEquals(2, result2.indexOfKey("c")); Assert.assertEquals(3, result2.indexOfKey("e")); Assert.assertEquals(4, result2.indexOfKey("f")); Assert.assertEquals(5, result2.indexOfKey("g")); Assert.assertEquals(6, result2.indexOfKey("m")); Assert.assertEquals(7, result2.indexOfKey("n")); Assert.assertEquals(8, result2.indexOfKey("o")); Assert.assertEquals(9, result2.indexOfKey("p")); Assert.assertEquals(-1, result2.indexOfKey("0")); Assert.assertTrue(result2.containsKey("a")); Assert.assertTrue(result2.containsKey("b")); Assert.assertTrue(result2.containsKey("c")); Assert.assertFalse(result2.containsKey("d")); Assert.assertTrue(result2.containsKey("e")); Assert.assertTrue(result2.containsKey("f")); Assert.assertTrue(result2.containsKey("g")); Assert.assertTrue(result2.containsKey("m")); Assert.assertTrue(result2.containsKey("n")); Assert.assertTrue(result2.containsKey("o")); Assert.assertTrue(result2.containsKey("p")); Assert.assertFalse(result2.containsKey("0")); Assert.assertEquals("a", result2.keyAt(0)); Assert.assertEquals("b", result2.keyAt(1)); Assert.assertEquals("c", result2.keyAt(2)); Assert.assertEquals("e", result2.keyAt(3)); Assert.assertEquals("f", result2.keyAt(4)); Assert.assertEquals("g", result2.keyAt(5)); Assert.assertEquals("m", result2.keyAt(6)); Assert.assertEquals("n", result2.keyAt(7)); Assert.assertEquals("o", result2.keyAt(8)); Assert.assertEquals("p", result2.keyAt(9)); Assert.assertTrue(result2.containsValue("ac")); Assert.assertTrue(result2.containsValue("bc")); Assert.assertTrue(result2.containsValue("cc")); Assert.assertFalse(result2.containsValue("dx")); Assert.assertTrue(result2.containsValue("ec")); Assert.assertTrue(result2.containsValue("fc")); Assert.assertTrue(result2.containsValue("gc")); Assert.assertTrue(result2.containsValue("0ma")); Assert.assertTrue(result2.containsValue("0na")); Assert.assertTrue(result2.containsValue("Zoa")); Assert.assertTrue(result2.containsValue("Zpa")); Assert.assertFalse(result2.containsValue("0")); Assert.assertEquals(0, result2.indexOfValue("ac")); Assert.assertEquals(1, result2.indexOfValue("bc")); Assert.assertEquals(2, result2.indexOfValue("cc")); Assert.assertEquals(3, result2.indexOfValue("ec")); Assert.assertEquals(4, result2.indexOfValue("fc")); Assert.assertEquals(5, result2.indexOfValue("gc")); Assert.assertEquals(6, result2.indexOfValue("0ma")); Assert.assertEquals(7, result2.indexOfValue("0na")); Assert.assertEquals(8, result2.indexOfValue("Zoa")); Assert.assertEquals(9, result2.indexOfValue("Zpa")); Assert.assertEquals(-1, result2.indexOfValue("0")); Assert.assertEquals("ac", result2.valueAt(0)); Assert.assertEquals("bc", result2.valueAt(1)); Assert.assertEquals("cc", result2.valueAt(2)); Assert.assertEquals("ec", result2.valueAt(3)); Assert.assertEquals("fc", result2.valueAt(4)); Assert.assertEquals("gc", result2.valueAt(5)); Assert.assertEquals("0ma", result2.valueAt(6)); Assert.assertEquals("0na", result2.valueAt(7)); Assert.assertEquals("Zoa", result2.valueAt(8)); Assert.assertEquals("Zpa", result2.valueAt(9)); Assert.assertEquals(0, result2.indexOfValueSorted("0ma")); Assert.assertEquals(1, result2.indexOfValueSorted("0na")); Assert.assertEquals(2, result2.indexOfValueSorted("Zoa")); Assert.assertEquals(3, result2.indexOfValueSorted("Zpa")); Assert.assertEquals(4, result2.indexOfValueSorted("ac")); Assert.assertEquals(5, result2.indexOfValueSorted("bc")); Assert.assertEquals(6, result2.indexOfValueSorted("cc")); Assert.assertEquals(7, result2.indexOfValueSorted("ec")); Assert.assertEquals(8, result2.indexOfValueSorted("fc")); Assert.assertEquals(9, result2.indexOfValueSorted("gc")); Assert.assertEquals(-1, result2.indexOfValueSorted("0")); Assert.assertEquals("0ma", result2.sortedValueAt(0)); Assert.assertEquals("0na", result2.sortedValueAt(1)); Assert.assertEquals("Zoa", result2.sortedValueAt(2)); Assert.assertEquals("Zpa", result2.sortedValueAt(3)); Assert.assertEquals("ac", result2.sortedValueAt(4)); Assert.assertEquals("bc", result2.sortedValueAt(5)); Assert.assertEquals("cc", result2.sortedValueAt(6)); Assert.assertEquals("ec", result2.sortedValueAt(7)); Assert.assertEquals("fc", result2.sortedValueAt(8)); Assert.assertEquals("gc", result2.sortedValueAt(9)); Map.Entry<String, String> entry = result2.entryAt(0); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); entry = result2.entryAt(1); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); entry = result2.entryAt(2); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); entry = result2.entryAt(3); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); entry = result2.entryAt(4); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); entry = result2.entryAt(5); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); entry = result2.entryAt(6); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); entry = result2.entryAt(7); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); entry = result2.entryAt(8); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); entry = result2.entryAt(9); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertEquals("ac", result2.get("a")); Assert.assertEquals("bc", result2.get("b")); Assert.assertEquals("cc", result2.get("c")); Assert.assertEquals("ec", result2.get("e")); Assert.assertEquals("fc", result2.get("f")); Assert.assertEquals("gc", result2.get("g")); Assert.assertEquals("0ma", result2.get("m")); Assert.assertEquals("0na", result2.get("n")); Assert.assertEquals("Zoa", result2.get("o")); Assert.assertEquals("Zpa", result2.get("p")); Assert.assertSame(null, result2.get("0")); ArrayBackedSet<Map.Entry<String, String>> entrySet = result2.entrySet(); Iterator<Map.Entry<String, String>> entryIt = entrySet.iterator(); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertFalse(entryIt.hasNext()); entrySet = result2.entrySetByValue(); entryIt = entrySet.iterator(); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("m", entry.getKey()); Assert.assertEquals("0ma", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("n", entry.getKey()); Assert.assertEquals("0na", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("o", entry.getKey()); Assert.assertEquals("Zoa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("p", entry.getKey()); Assert.assertEquals("Zpa", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("a", entry.getKey()); Assert.assertEquals("ac", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("b", entry.getKey()); Assert.assertEquals("bc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("c", entry.getKey()); Assert.assertEquals("cc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("e", entry.getKey()); Assert.assertEquals("ec", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("f", entry.getKey()); Assert.assertEquals("fc", entry.getValue()); Assert.assertTrue(entryIt.hasNext()); entry = entryIt.next(); Assert.assertEquals("g", entry.getKey()); Assert.assertEquals("gc", entry.getValue()); Assert.assertFalse(entryIt.hasNext()); } @Test public void testKeySplitter() throws Exception { HashMap<String, String> map1 = new HashMap<>(); map1.put("a", "ac"); map1.put("b", "bc"); map1.put("c", "cc"); map1.put("d", "dx"); map1.put("e", "ec"); map1.put("f", "fc"); map1.put("g", "gc"); map1.put("m", "0ma"); map1.put("n", "0na"); map1.put("o", "Zoa"); map1.put("p", "Zpa"); ImmutableSortedArrayMap<String, String> result1 = map1.entrySet().stream() .filter(p -> p.getValue().charAt(1) != 'x') .collect(Collectors.toImmutableSortedArrayBiMap()); // Stream the results from one map, collect back to an arraylist ArrayList<String> result2 = result1.keySet().parallelStream() .collect(ArrayList::new, ArrayList::add, ArrayList::addAll); Assert.assertEquals(10, result2.size()); Assert.assertEquals("[a, b, c, e, f, g, m, n, o, p]", result2.toString()); Assert.assertEquals(0, result2.indexOf("a")); Assert.assertEquals(1, result2.indexOf("b")); Assert.assertEquals(2, result2.indexOf("c")); Assert.assertEquals(3, result2.indexOf("e")); Assert.assertEquals(4, result2.indexOf("f")); Assert.assertEquals(5, result2.indexOf("g")); Assert.assertEquals(6, result2.indexOf("m")); Assert.assertEquals(7, result2.indexOf("n")); Assert.assertEquals(8, result2.indexOf("o")); Assert.assertEquals(9, result2.indexOf("p")); Assert.assertEquals(-1, result2.indexOf("0")); Assert.assertTrue(result2.contains("a")); Assert.assertTrue(result2.contains("b")); Assert.assertTrue(result2.contains("c")); Assert.assertFalse(result2.contains("d")); Assert.assertTrue(result2.contains("e")); Assert.assertTrue(result2.contains("f")); Assert.assertTrue(result2.contains("g")); Assert.assertTrue(result2.contains("m")); Assert.assertTrue(result2.contains("n")); Assert.assertTrue(result2.contains("o")); Assert.assertTrue(result2.contains("p")); Assert.assertFalse(result2.contains("0")); Assert.assertEquals("a", result2.get(0)); Assert.assertEquals("b", result2.get(1)); Assert.assertEquals("c", result2.get(2)); Assert.assertEquals("e", result2.get(3)); Assert.assertEquals("f", result2.get(4)); Assert.assertEquals("g", result2.get(5)); Assert.assertEquals("m", result2.get(6)); Assert.assertEquals("n", result2.get(7)); Assert.assertEquals("o", result2.get(8)); Assert.assertEquals("p", result2.get(9)); } @Test public void testValueSplitter() throws Exception { HashMap<String, String> map1 = new HashMap<>(); map1.put("a", "ac"); map1.put("b", "bc"); map1.put("c", "cc"); map1.put("d", "dx"); map1.put("e", "ec"); map1.put("f", "fc"); map1.put("g", "gc"); map1.put("m", "0ma"); map1.put("n", "0na"); map1.put("o", "Zoa"); map1.put("p", "Zpa"); ImmutableSortedArrayMap<String, String> result1 = map1.entrySet().stream() .filter(p -> p.getValue().charAt(1) != 'x') .collect(Collectors.toImmutableSortedArrayBiMap()); // Stream the results from one map, collect back to another ArrayList<String> result2 = result1.values().parallelStream() .collect(ArrayList::new, ArrayList::add, ArrayList::addAll); Assert.assertEquals(10, result2.size()); Assert.assertEquals("[0ma, 0na, Zoa, Zpa, ac, bc, cc, ec, fc, gc]", result2.toString()); Assert.assertTrue(result2.contains("ac")); Assert.assertTrue(result2.contains("bc")); Assert.assertTrue(result2.contains("cc")); Assert.assertFalse(result2.contains("dx")); Assert.assertTrue(result2.contains("ec")); Assert.assertTrue(result2.contains("fc")); Assert.assertTrue(result2.contains("gc")); Assert.assertTrue(result2.contains("0ma")); Assert.assertTrue(result2.contains("0na")); Assert.assertTrue(result2.contains("Zoa")); Assert.assertTrue(result2.contains("Zpa")); Assert.assertFalse(result2.contains("0")); Assert.assertEquals(4, result2.indexOf("ac")); Assert.assertEquals(5, result2.indexOf("bc")); Assert.assertEquals(6, result2.indexOf("cc")); Assert.assertEquals(7, result2.indexOf("ec")); Assert.assertEquals(8, result2.indexOf("fc")); Assert.assertEquals(9, result2.indexOf("gc")); Assert.assertEquals(0, result2.indexOf("0ma")); Assert.assertEquals(1, result2.indexOf("0na")); Assert.assertEquals(2, result2.indexOf("Zoa")); Assert.assertEquals(3, result2.indexOf("Zpa")); Assert.assertEquals(-1, result2.indexOf("0")); Assert.assertEquals("ac", result2.get(4)); Assert.assertEquals("bc", result2.get(5)); Assert.assertEquals("cc", result2.get(6)); Assert.assertEquals("ec", result2.get(7)); Assert.assertEquals("fc", result2.get(8)); Assert.assertEquals("gc", result2.get(9)); Assert.assertEquals("0ma", result2.get(0)); Assert.assertEquals("0na", result2.get(1)); Assert.assertEquals("Zoa", result2.get(2)); Assert.assertEquals("Zpa", result2.get(3)); } @Test public void testMerge() throws Exception { HashMap<String, String> map1 = new HashMap<>(); map1.put("a", "ac"); map1.put("b", "bc"); map1.put("c", "cc"); map1.put("d", "dx"); map1.put("e", "ec"); map1.put("f", "fc"); map1.put("g", "gc"); HashMap<String, String> map2 = new HashMap<>(); map2.put("m", "ma"); map2.put("n", "na"); map2.put("o", "oa"); map2.put("p", "pa"); ImmutableSortedArrayMapBuilder<String, String> builder1 = new ImmutableSortedArrayMapBuilder<>(); ImmutableSortedArrayMapBuilder<String, String> builder2 = new ImmutableSortedArrayMapBuilder<>(); builder1.with(map1); map2.entrySet().forEach(builder2::with); builder1.merge(builder2); Assert.assertEquals(4, builder2.size()); builder2.clear(); Assert.assertEquals(0, builder2.size()); ArrayBackedMap<String, String> result = builder1.asBiMap().build(); Assert.assertEquals(11, result.size()); } @Test public void testBuilder() throws Exception { ImmutableSortedArrayMap<String, String> map = ImmutableSortedArrayMap.<String, String>builder() .with("c", "5", "d", "4", "e", "3") .with("a", "7", "b", "96") .with("f", "2", "g", "1").asBiMap().build(); Assert.assertEquals(7, map.size()); Assert.assertFalse(map.containsKey("0")); Assert.assertEquals("7", map.get("a")); Assert.assertFalse(map.containsKey("ab")); Assert.assertFalse(map.containsKey("da")); Assert.assertEquals("1", map.get("g")); Assert.assertFalse(map.containsKey("zz")); Assert.assertEquals("5", map.getOrDefault("c", "")); Assert.assertEquals("", map.getOrDefault("z", "")); StringBuilder builder = new StringBuilder(); map.forEach((k,v) -> builder.append(k)); Assert.assertEquals("abcdefg", builder.toString()); ArrayBackedSet<String> keySet = map.keySet(); Assert.assertFalse(keySet.isEmpty()); Assert.assertEquals(7, keySet.size()); Assert.assertEquals("[a, b, c, d, e, f, g]", keySet.toString()); List<String> keyList = keySet.asList(); Assert.assertFalse(keyList.isEmpty()); Assert.assertEquals(7, keyList.size()); Assert.assertEquals("[a, b, c, d, e, f, g]", keyList.toString()); Assert.assertNull(map.comparator()); List<String> subKeys = keyList.subList(2, 5); Assert.assertFalse(subKeys.isEmpty()); Assert.assertEquals(3, subKeys.size()); Assert.assertEquals("[c, d, e]", subKeys.toString()); ArrayBackedCollection<String> keyColl = (ArrayBackedCollection<String>)keyList; Assert.assertEquals("a", keyColl.getAtIndex(0)); Assert.assertEquals("b", keyColl.getAtIndex(1)); Assert.assertEquals("c", keyColl.getAtIndex(2)); Assert.assertEquals("d", keyColl.getAtIndex(3)); Assert.assertEquals("e", keyColl.getAtIndex(4)); Assert.assertEquals("f", keyColl.getAtIndex(5)); Assert.assertEquals("g", keyColl.getAtIndex(6)); Assert.assertEquals(3, keyColl.indexOfRange("d", 2, 5)); Assert.assertEquals(4, keyColl.indexOfRange("e", 2, 5)); Assert.assertEquals(2, keyColl.indexOfRange("c", 2, 5)); Assert.assertEquals(-1, keyColl.indexOfRange(null, 2, 5)); ArrayBackedSet<Map.Entry<String, String>> entrySet = map.entrySet(); Assert.assertFalse(entrySet.isEmpty()); Assert.assertEquals(7, entrySet.size()); Assert.assertEquals("[a=7, b=96, c=5, d=4, e=3, f=2, g=1]", entrySet.toString()); List<Map.Entry<String, String>> entryList = entrySet.asList(); Assert.assertFalse(entryList.isEmpty()); Assert.assertEquals(7, entryList.size()); Assert.assertEquals("[a=7, b=96, c=5, d=4, e=3, f=2, g=1]", entryList.toString()); ArrayBackedCollection<String> values = map.values(); Assert.assertFalse(values.isEmpty()); Assert.assertEquals(7, values.size()); Assert.assertEquals("[1, 2, 3, 4, 5, 7, 96]", values.toString()); List<String> valueList = values.asList(); Assert.assertFalse(valueList.isEmpty()); Assert.assertEquals(7, valueList.size()); Assert.assertEquals("[1, 2, 3, 4, 5, 7, 96]", valueList.toString()); ArrayBackedSet<Map.Entry<String, String>> entrySetByValue = map.entrySetByValue(); Assert.assertFalse(entrySetByValue.isEmpty()); Assert.assertEquals(7, entrySetByValue.size()); Assert.assertEquals("[g=1, f=2, e=3, d=4, c=5, a=7, b=96]", entrySetByValue.toString()); List<Map.Entry<String, String>> entryListByValue = entrySetByValue.asList(); Assert.assertFalse(entryListByValue.isEmpty()); Assert.assertEquals(7, entryListByValue.size()); Assert.assertEquals("[g=1, f=2, e=3, d=4, c=5, a=7, b=96]", entryListByValue.toString()); Assert.assertEquals(1, map.indexOfValue("96")); } @Test public void testSubMap() throws Exception { ImmutableSortedArrayMapBuilder<String, String> builder = new ImmutableSortedArrayMapBuilder<>(); builder.with("a", "ac"); builder.with("b", "bc"); builder.with("c", "cc"); builder.with("d", "dx"); builder.with("e", "ec"); builder.with("f", "fc"); builder.with("g", "gc"); builder.byNaturalKeyOrder().byNaturalValueOrder(); ImmutableSortedArrayMap<String, String> map = builder.asBiMap().build(); ImmutableSortedArrayMap<String, String> headMap = map.headMap("d"); ImmutableSortedArrayMap<String, String> tailMap = map.tailMap("e"); ImmutableSortedArrayMap<String, String> subMap = map.subMap("d", "e"); ImmutableSortedArrayMap<String, String> emptySubMap = map.subMap("ab", "ac"); ImmutableSortedArrayMap<String, String> fullSubMap = map.subMap("a", "h"); Assert.assertEquals(3, headMap.size()); Assert.assertEquals(3, tailMap.size()); Assert.assertEquals(1, subMap.size()); Assert.assertTrue(emptySubMap.isEmpty()); Assert.assertEquals(7, fullSubMap.size()); Assert.assertEquals("a", map.firstKey()); Assert.assertEquals("g", map.lastKey()); } @Test public void testExceptions() throws Exception { ImmutableSortedArrayMap<String, String> map = ImmutableSortedArrayMap.<String, String>builder() .asBiMap() .with("c", "5", "d", "4", "e", "3", "a", "7") .with("b", "96") .with("f", "2", "g", "1").build(); Assert.assertEquals(7, map.size()); try { Assert.assertEquals("5", map.remove("c")); Assert.fail("Remove of existing item should fail"); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { Assert.assertNull(map.put("q", "26")); Assert.fail("Put operation for new item should fail"); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { Assert.assertEquals("4", map.put("d", "27")); Assert.fail("Put operation for existing item should fail"); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { map.clear(); Assert.fail("Clear operation for non-empty set should fail"); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { Map<String, String> s = Collections.singletonMap("j", "j"); map.putAll(s); Assert.fail("putAll operation should fail"); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { List<String> s = Collections.singletonList("g"); map.keySet().removeAll(s); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } try { List<String> s = Collections.singletonList("e"); map.keySet().retainAll(s); } catch (UnsupportedOperationException e) { Assert.assertNotNull(e); } // No exception, since no elements removed List<String> s = Collections.singletonList("k"); Assert.assertFalse(map.keySet().removeAll(s)); // No exception, since all elements retained s = Arrays.<String>asList("a", "b", "c", "d", "e", "f", "g"); Assert.assertFalse(map.keySet().retainAll(s)); // No exception, since clearing an empty collection does nothing ImmutableSortedArrayMap.emptyMap().clear(); // The list returned from keySet().asList() is itself an ArrayBackedCollection ArrayBackedCollection<String> keys1 = (ArrayBackedCollection<String>)map.keySet().asList(); List<String> keys2 = keys1.asList(); Assert.assertSame(keys1, keys2); } @SuppressWarnings("unchecked") @Test public void testSerialization() throws Exception { ImmutableSortedArrayMapBuilder<String, Integer> builder = new ImmutableSortedArrayMapBuilder<>(); builder.asBiMap(); builder.with("a", 13); builder.with("b", 23); builder.with("c", 33); builder.with("d", 48); builder.with("e", 53); builder.with("f", 63); builder.with("g", 73); ImmutableSortedArrayMap<String, Integer> map = builder.build(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(map); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(bais); ImmutableSortedArrayMap<String, Integer> map2 = (ImmutableSortedArrayMap<String, Integer>) ois.readObject(); Assert.assertEquals(7, map2.size()); Assert.assertEquals("{a=13, b=23, c=33, d=48, e=53, f=63, g=73}", map2.toString()); Assert.assertEquals(Integer.valueOf(23), map2.get("b")); Assert.assertEquals(Integer.valueOf(73), map2.get("g")); // Keysets baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); ArrayBackedSet<String> keySet = map.keySet(); oos.writeObject(keySet); bais = new ByteArrayInputStream(baos.toByteArray()); ois = new ObjectInputStream(bais); ArrayBackedSet<String> keySet2 = (ArrayBackedSet<String>) ois.readObject(); Assert.assertEquals(keySet, keySet2); Assert.assertEquals(7, keySet2.size()); Assert.assertEquals("[a, b, c, d, e, f, g]", keySet2.toString()); // Values baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); ArrayBackedCollection<Integer> values = map.values(); oos.writeObject(values); bais = new ByteArrayInputStream(baos.toByteArray()); ois = new ObjectInputStream(bais); ArrayBackedCollection<Integer> values2 = (ArrayBackedCollection<Integer>) ois.readObject(); Assert.assertEquals(values, values2); Assert.assertEquals(7, values2.size()); Assert.assertEquals("[13, 23, 33, 48, 53, 63, 73]", values2.toString()); // Entrysets baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); ArrayBackedSet<Map.Entry<String,Integer>> entrySet = map.entrySet(); oos.writeObject(entrySet); bais = new ByteArrayInputStream(baos.toByteArray()); ois = new ObjectInputStream(bais); ArrayBackedSet<Map.Entry<String,Integer>> entrySet2 = (ArrayBackedSet<Map.Entry<String,Integer>>) ois.readObject(); Assert.assertEquals(entrySet, entrySet2); Assert.assertEquals(7, entrySet2.size()); Assert.assertEquals("[a=13, b=23, c=33, d=48, e=53, f=63, g=73]", entrySet2.toString()); // Entrysets by value baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); entrySet = map.entrySetByValue(); oos.writeObject(entrySet); bais = new ByteArrayInputStream(baos.toByteArray()); ois = new ObjectInputStream(bais); entrySet2 = (ArrayBackedSet<Map.Entry<String,Integer>>) ois.readObject(); Assert.assertEquals(entrySet, entrySet2); Assert.assertEquals(7, entrySet2.size()); Assert.assertEquals("[a=13, b=23, c=33, d=48, e=53, f=63, g=73]", entrySet2.toString()); // Test empty map baos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(baos); oos.writeObject(ImmutableSortedArrayMap.emptyMap()); bais = new ByteArrayInputStream(baos.toByteArray()); ois = new ObjectInputStream(bais); map2 = (ImmutableSortedArrayMap<String, Integer>) ois.readObject(); Assert.assertEquals("{}", map2.toString()); Assert.assertEquals(0, map2.size()); Assert.assertSame(ImmutableSortedArrayMap.emptyMap(), map2); } }
run2000/java-immutable-collections
testsrc/net/njcull/collections/TestImmutableSortedArrayBiMap.java
Java
bsd-3-clause
45,385
$(document).ready(function() { getChapterData(); }); function getChapterData() { var idBook = $("#idBook").val(); var bookName = $("#bookName").val(); $.ajax({ url: '/chapter/getChapter/', type: 'GET', dataType: 'json', data: {"idBook": idBook} }) .done(function(resp) { // console.log(resp); if (resp.status == "success") { allChapter = resp.message var obj = "<div id='allChapter' class='col-md-12 col-sm-12 alert alert-success'>"; tableObj = "<div class='table-responsive'><table class='table'>" ; thObj = "<tr> <th>章節序號</th> <th>章節名稱</th> <th>書名</th> </tr>"; obj = obj + tableObj + thObj; for (var i in allChapter) { lineNumber = parseInt(i) + 1 obj = obj + "<tr>" + "<td>" + allChapter[i].chapterOrder + "</td>" + "<td><a class='alink' href='/content/chapterContent/?idBook=" + idBook + "&chapterOrder=" + allChapter[i].chapterOrder + "&chapterName=" + allChapter[i].name + "&bookName=" + bookName + "'>" + allChapter[i].name + "</a></td><td>" + allChapter[i].book_name + "</td></tr>"; } obj = obj + "</table></div></div>"; $("#allChapter").replaceWith(obj); } else if (resp.status == "fail") { var obj = "<div class='box'><div class='box-body' id='allChapter'><div class='col-md-12 col-sm-12'><h4>" + resp.message + "</h4></div></div></div>"; $("#allChapter").replaceWith(obj); } else{ var obj = "<div class='box'><div class='box-body' id='allChapter'><div class='col-md-12 col-sm-12'><h4>" + resp.message + "</h4></div></div></div>"; $("#allChapter").replaceWith(obj); } }) .fail(function(resp) { console.log(resp); }); }
0lidaxiang/WeArt
static/templatesJs/chapter/bookChapter.js
JavaScript
bsd-3-clause
1,721
<?php namespace BackOffice\Form; use DoctrineORMModuleTest\Assets\Entity\Category; use Zend\Form\Form; use BackOffice\Filter\CategoryFilter; class CategoryForm extends Form { public function __construct() { parent::__construct(); $this->add( [ 'type' => 'text', 'name' => 'name', 'options' => ['label' => 'Libellé :'], 'attributes' => ['class' => 'form-control'], ] ); $this->add( [ 'type' => 'submit', 'name' => 'submit', 'attributes' => ['value' => 'Submit', 'class' => 'btn btn-default'], ] ); $this->setInputFilter(new CategoryFilter()); } }
Mattheousse/ProjetZend
module/BackOffice/src/BackOffice/Form/CategoryForm.php
PHP
bsd-3-clause
809
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- 上述3个meta标签*必须*放在最前面,任何其他内容都*必须*跟随其后! --> <meta name="description" content="吕坤洁前端开发工程师,前端工程师简历,互联网招聘,优秀前端工程师"> <meta name="kewwords" content="吕坤洁,前端,前端开发工程师,fe,web research"> <meta name="author" content=""> <title>吕坤洁_前端开发工程师_简历</title> <link rel="shortcut icon" href="../../../../favicon.ico"/> <link rel="bookmark" href="../../../../favicon.ico"/> <!-- Bootstrap core CSS --> <link href="http://cdn.bootcss.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet"> <!-- Custom styles for this template --> <!-- Just for debugging purposes. Don't actually copy these 2 lines! --> <!--[if lt IE 9]><script src="../../assets/js/ie8-responsive-file-warning.js"></script><![endif]--> <!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries --> <!--[if lt IE 9]> <script src="http://cdn.bootcss.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="http://cdn.bootcss.com/respond.js/1.4.2/respond.min.js"></script> <![endif]--> <script src="../../../../bootstrap/js/jquery.js"></script> <script src="../../../../bootstrap/js/bootstrap.min.js"></script> </head> <?php echo $content; ?>
wangchunwei/resume
resume/protected/views/layouts/main.php
PHP
bsd-3-clause
1,709
from django.conf.urls.defaults import * import price_tracker.views from django.contrib import admin admin.autodiscover() handler500 = 'djangotoolbox.errorviews.server_error' urlpatterns = patterns( '', (r'^admin/', include(admin.site.urls)), (r'results', price_tracker.views.results), (r'^$', price_tracker.views.index), ('aboutus', 'django.views.generic.simple.direct_to_template', {'template': 'price_tracker/aboutus.html'}), )
vinayan3/clpricehistory
urls.py
Python
bsd-3-clause
458
<?php use yii\helpers\Html; use yii\grid\GridView; use yii\bootstrap\ActiveForm; use wbraganca\dynamicform\DynamicFormWidget; use backend\modules\org\models\OrgSite; /* @var $this yii\web\View */ /* @var $searchModel backend\modules\qtn\Models\SurveyTabSearch */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = 'Survey Tabs'; $this->params['breadcrumbs'][] = $this->title; ?> <div class="person-form"> <?php $form = ActiveForm::begin(['id' => 'dynamic-form']); ?> <div class="row"> <?php echo $form->field($modelsSurvey, 'site_id') ->dropDownList(OrgSite::getArraySite(), [ 'id'=>'ddl-survey', 'prompt'=>'เลือกเลือกแบบสำรวจ' ] ); ?> </div> <div class="row"> <div class="col-sm-6"> <?= $form->field($modelsSurvey, 'name')->textInput(['maxlength' => true]) ?> </div> <div class="col-sm-6"> <?= $form->field($modelsSurvey, 'title')->textInput(['maxlength' => true]) ?> </div> </div> <div class="padding-v-md"> <div class="line line-dashed"></div> </div> <?php DynamicFormWidget::begin([ 'widgetContainer' => 'dynamicform_wrapper', 'widgetBody' => '.container-tab', 'widgetItem' => '.house-item', 'limit' => 5, 'min' => 1, 'insertButton' => '.add-house', 'deleteButton' => '.remove-house', 'model' => $modelsTab[0], 'formId' => 'dynamic-form', 'formFields' => [ 'description', ], ]); ?> <table class="table table-bordered table-striped"> <thead> <tr> <th>Tab</th> <th style="width: 450px;">หัวข้อ</th> <th class="text-center" style="width: 90px;"> <button type="button" class="add-house btn btn-success btn-xs"><span class="fa fa-plus"></span>+</button> </th> </tr> </thead> <tbody class="container-tab"> <?php foreach ($modelsTab as $indexTab => $modelTab): ?> <tr class="house-item"> <td class="vcenter"> <?php // necessary for update action. if (! $modelTab->isNewRecord) { echo Html::activeHiddenInput($modelTab, "[{$indexTab}]id"); } ?> <?= $form->field($modelTab, "[{$indexTab}]name")->label(false)->textInput(['maxlength' => true]) ?> </td> <td> <?= $this->render('title/_form-title', [ 'form' => $form, 'indexHouse' => $indexTab, 'modelsRoom' => $modelsTitle[$indexTab], ]) ?> </td> <td class="text-center vcenter" style="width: 90px; verti"> <button type="button" class="remove-house btn btn-danger btn-xs"><span class="fa fa-minus">-</span></button> </td> </tr> <?php endforeach; ?> </tbody> </table> <?php DynamicFormWidget::end(); ?> <div class="form-group"> <?= Html::submitButton($modelsSurvey->isNewRecord ? 'Create' : 'Update', ['class' => 'btn btn-primary']) ?> </div> <?php ActiveForm::end(); ?> </div>
yuttapong/webapp
backend/modules/qtn/views/survey/_form.php
PHP
bsd-3-clause
3,552
using System; using NAME.SelfHost.Kestrel; using System.Reflection; using System.Threading; namespace NAME.DummyConsole { class Program { private static readonly AutoResetEvent _closing = new AutoResetEvent(false); static void Main(string[] args) { Action<NAMEKestrelConfiguration> configBuilder = config => { Assembly a = typeof(Program).GetTypeInfo().Assembly; config.APIName = a.GetName().Name; config.APIVersion = a.GetName().Version.ToString(); config.LogHealthCheckToConsole = true; config.ThrowOnDependenciesFail = false; }; using (var selfHost = NAMEServer.EnableName(configBuilder)) { Console.WriteLine("Hello World!"); Console.CancelKeyPress += new ConsoleCancelEventHandler(OnExit); _closing.WaitOne(); } } protected static void OnExit(object sender, ConsoleCancelEventArgs args) { Console.WriteLine("Exiting"); _closing.Set(); } } }
nosinovacao/name-sdk
integration-tests/NAME.DummyConsole.Kestrel/Program.cs
C#
bsd-3-clause
1,142
<?php namespace Calendar\Model; class Calendar { public $event_id; public $created_at; public $event_title; public $event_body; public $user_id; public function exchangeArray($data) { $this->event_id = isset($data['event_id'])?$data['event_id']:null; $this->created_at = isset($data['created_at'])?$data['created_at']:date('Y-m-d'); $this->event_title = isset($data['event_title'])?$data['event_title']:null; $this->event_body = isset($data['event_body'])?$data['event_body']:null; $this->user_id = isset($data['user_id'])?$data['user_id']:null; } public function getArrayCopy() { return get_object_vars($this); } }
marziolek/zend-app
module/Calendar/src/Calendar/Model/Calendar.php
PHP
bsd-3-clause
715
using System; using System.Web.Http; using Thinktecture.Relay.Server.Config; namespace Thinktecture.Relay.Server.Helper { public class ConfigurationDummy : IConfiguration { // RabbitMQ Settings public string RabbitMqConnectionString { get; set; } public string RabbitMqClusterHosts { get; set; } public bool RabbitMqAutomaticRecoveryEnabled { get; set; } public TimeSpan QueueExpiration { get; set; } public TimeSpan RequestExpiration { get; set; } // App Settings public TimeSpan OnPremiseConnectorCallbackTimeout { get; set; } public string TraceFileDirectory { get; set; } public int LinkPasswordLength { get; set; } public int DisconnectTimeout { get; set; } public int ConnectionTimeout { get; set; } public int KeepAliveInterval { get; set; } public bool UseInsecureHttp { get; set; } public ModuleBinding EnableManagementWeb { get; set; } public ModuleBinding EnableRelaying { get; set; } public ModuleBinding EnableOnPremiseConnections { get; set; } public string HostName { get; set; } public int Port { get; set; } public string ManagementWebLocation { get; set; } public TimeSpan TemporaryRequestStoragePeriod { get; set; } public string TemporaryRequestStoragePath { get; set; } public TimeSpan ActiveConnectionTimeout { get; set; } public string CustomCodeAssemblyPath { get; set; } public string SharedSecret { get; set; } public string OAuthCertificate { get; set; } public TimeSpan HstsHeaderMaxAge { get; set; } public bool HstsIncludeSubdomains { get; set; } public IncludeErrorDetailPolicy IncludeErrorDetailPolicy { get; set; } public int MaxFailedLoginAttempts { get; set; } public TimeSpan FailedLoginLockoutPeriod { get; set; } public bool SecureClientController { get; set; } public TimeSpan AccessTokenLifetime { get; set; } public bool LogSensitiveData { get; set; } public Boolean RequireLinkAvailability { get; set; } // Default settings for links public TimeSpan LinkTokenRefreshWindow { get; set; } public TimeSpan LinkReconnectMinWaitTime { get; set; } public TimeSpan LinkReconnectMaxWaitTime { get; set; } public TimeSpan? LinkAbsoluteConnectionLifetime { get; set; } public TimeSpan? LinkSlidingConnectionLifetime { get; set; } public ConfigurationDummy() { TemporaryRequestStoragePeriod = TimeSpan.FromMinutes(1); } } }
thinktecture/relayserver
Thinktecture.Relay.Server.Test/Helper/ConfigurationDummy.cs
C#
bsd-3-clause
2,353
using System; using System.Collections.Generic; using System.Web; using System.Web.UI; using System.Web.UI.WebControls; using entCMS.Models; using entCMS.Services; namespace entCMS.Manage { public partial class LanguageList : BasePage { LanguageService lgs = LanguageService.GetInstance(); public LanguageList() : base(PagePurviewType.PPT_SYSTEM) { } protected void Page_Load(object sender, EventArgs e) { // 初始化分页控件和数据绑定控件 base.InitializePageControls(pager, gv); if (!IsPostBack) { BindGrid(); } } private void BindGrid() { int recordCount = 0; List<cmsLanguage> ls = lgs.GetList(pager.CurrentPageIndex, pager.PageSize, ref recordCount); // 绑定数据到GridView base.BindGrid<cmsLanguage>(recordCount, ls); } protected void gv_RowDataBound(object sender, GridViewRowEventArgs e) { } protected void pager_PageChanged(object src, EventArgs e) { BindGrid(); } } }
plz821/entCMS
entCMS.Manage/Manage/System/LanguageList.aspx.cs
C#
bsd-3-clause
1,243
/* ***************************************************************************** * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license **************************************************************************** */ /** * Created by Samuel Gratzl on 04.08.2014. */ import {mixin} from '../index'; import {parse, RangeLike, Range, CompositeRange1D, all} from '../range'; import {resolve as resolveIDType, createLocalAssigner} from '../idtype'; import {ADataType, IDataType, VALUE_TYPE_CATEGORICAL, ICategory} from '../datatype'; import {getFirstByFQName} from '../data'; import {ICategoricalVector} from '../vector'; import {rangeHist, IHistogram} from '../math'; import {IStratification, IStratificationDataDescription, createDefaultStratificationDesc} from './IStratification'; import StratificationGroup from './StratificationGroup'; import {IStratificationLoader, viaAPILoader, viaDataLoader} from './loader'; import StratificationVector from './StratificationVector'; /** * root matrix implementation holding the data * @internal */ export default class Stratification extends ADataType<IStratificationDataDescription> implements IStratification { private _v: Promise<ICategoricalVector>; constructor(desc: IStratificationDataDescription, private loader: IStratificationLoader) { super(desc); } get idtype() { return resolveIDType(this.desc.idtype); } get groups() { return this.desc.groups; } group(group: number): IStratification { return new StratificationGroup(this, group, this.groups[group]); } async hist(bins?: number, range?: Range): Promise<IHistogram> { //TODO return rangeHist(await this.range()); } vector() { return this.asVector(); } asVector(): Promise<ICategoricalVector> { if (!this._v) { this._v = this.loader(this.desc).then((data) => new StratificationVector(this, data.range)); } return this._v; } origin(): Promise<IDataType> { if ('origin' in this.desc) { return getFirstByFQName(this.desc.origin); } return Promise.reject('no origin specified'); } async range() { return (await this.loader(this.desc)).range; } async idRange() { const data = await this.loader(this.desc); const ids = data.rowIds.dim(0); const range = data.range; return ids.preMultiply(range, this.dim[0]); } async names(range: RangeLike = all()) { return parse(range).filter((await this.loader(this.desc)).rows, this.dim); } async ids(range: RangeLike = all()): Promise<Range> { return (await this.loader(this.desc)).rowIds.preMultiply(parse(range), this.dim); } get idtypes() { return [this.idtype]; } size() { return this.desc.size; } get length() { return this.dim[0]; } get ngroups() { return this.desc.ngroups; } get dim() { return [this.size()]; } persist() { return this.desc.id; } } /** * module entry point for creating a datatype * @param desc * @returns {IVector} */ export function create(desc: IStratificationDataDescription): Stratification { return new Stratification(desc, viaAPILoader()); } export function wrap(desc: IStratificationDataDescription, rows: string[], rowIds: number[], range: CompositeRange1D) { return new Stratification(desc, viaDataLoader(rows, rowIds, range)); } export interface IAsStratifcationOptions { name?: string; idtype?: string; rowassigner?(ids: string[]): Range; } export function asStratification(rows: string[], range: CompositeRange1D, options: IAsStratifcationOptions = {}) { const desc = mixin(createDefaultStratificationDesc(), { size: 0, groups: range.groups.map((r) => ({name: r.name, color: r.color, size: r.length})), ngroups: range.groups.length }, options); const rowAssigner = options.rowassigner || createLocalAssigner(); return new Stratification(desc, viaDataLoader(rows, rowAssigner(rows), range)); } export function wrapCategoricalVector(v: ICategoricalVector) { if (v.valuetype.type !== VALUE_TYPE_CATEGORICAL) { throw new Error('invalid vector value type: ' + v.valuetype.type); } const toGroup = (g: string|ICategory) => { if (typeof g === 'string') { return {name: <string>g, color: 'gray', size: NaN}; } const cat = <ICategory>g; return {name: cat.name, color: cat.color || 'gray', size: NaN}; }; const cats = v.desc.value.categories.map(toGroup); const desc: IStratificationDataDescription = { id: v.desc.id + '-s', type: 'stratification', name: v.desc.name + '-s', fqname: v.desc.fqname + '-s', description: v.desc.description, idtype: v.idtype, ngroups: cats.length, groups: cats, size: v.length, creator: v.desc.creator, ts: v.desc.ts }; function loader() { return Promise.all<any>([v.groups(), v.ids(), v.names()]).then((args) => { const range = <CompositeRange1D>args[0]; range.groups.forEach((g, i) => cats[i].size = g.length); return { range: args[0], rowIds: args[1], rows: args[2] }; }); } return new Stratification(desc, loader); }
MengjiaoH/UpSet-Phovea
node_modules/phovea_core/src/stratification/Stratification.ts
TypeScript
bsd-3-clause
5,263
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace ArduinoMonitor { public enum EventType { ApplicationStart, Initialized, ApplicationStop, ApplicationPause, ApplicationContinue, ConfigurationChanged, LowThresholdCrossed, HighThresholdCrossed, NormalityRestored, EmailSent, EmailFailure, ReadFailure, Error } }
Ehryk/ArduinoMonitor
ArduinoMonitor/Common/Enums/EventType.cs
C#
bsd-3-clause
480
import os import StringIO import subprocess import tempfile from base64 import b64encode from django.conf import settings from django.core.files.storage import default_storage as storage from PIL import Image import olympia.core.logger log = olympia.core.logger.getLogger('z.versions.utils') def write_svg_to_png(svg_content, out): # when settings.DEBUG is on (i.e. locally) don't delete the svgs. tmp_args = { 'dir': settings.TMP_PATH, 'mode': 'wb', 'suffix': '.svg', 'delete': not settings.DEBUG} with tempfile.NamedTemporaryFile(**tmp_args) as temporary_svg: temporary_svg.write(svg_content) temporary_svg.flush() try: if not os.path.exists(os.path.dirname(out)): os.makedirs(out) command = [ settings.RSVG_CONVERT_BIN, '--output', out, temporary_svg.name, ] subprocess.check_call(command) except IOError as io_error: log.debug(io_error) return False except subprocess.CalledProcessError as process_error: log.debug(process_error) return False return True def encode_header_image(path): try: with storage.open(path, 'rb') as image: header_blob = image.read() with Image.open(StringIO.StringIO(header_blob)) as header_image: (width, height) = header_image.size src = 'data:image/%s;base64,%s' % ( header_image.format.lower(), b64encode(header_blob)) except IOError as io_error: log.debug(io_error) return (None, 0, 0) return (src, width, height) class AdditionalBackground(object): @classmethod def split_alignment(cls, alignment): alignments = alignment.split() # e.g. "center top" if len(alignments) >= 2: return (alignments[0], alignments[1]) elif len(alignments) == 1: # e.g. "left", which is the same as 'left center' if alignments[0] in ['left', 'right']: return (alignments[0], 'center') # e.g. "top", which is the same as 'center top' else: return ('center', alignments[0]) else: return ('', '') def __init__(self, path, alignment, tiling, header_root): # If there an unequal number of alignments or tiling to srcs the value # will be None so use defaults. self.alignment = (alignment or 'right top').lower() self.tiling = (tiling or 'no-repeat').lower() self.src, self.width, self.height = encode_header_image( os.path.join(header_root, path)) def calculate_pattern_offsets(self, svg_width, svg_height): align_x, align_y = self.split_alignment(self.alignment) if align_x == 'right': self.pattern_x = svg_width - self.width elif align_x == 'center': self.pattern_x = (svg_width - self.width) / 2 else: self.pattern_x = 0 if align_y == 'bottom': self.pattern_y = svg_height - self.height elif align_y == 'center': self.pattern_y = (svg_height - self.height) / 2 else: self.pattern_y = 0 if self.tiling in ['repeat', 'repeat-x'] or self.width > svg_width: self.pattern_width = self.width else: self.pattern_width = svg_width if self.tiling in ['repeat', 'repeat-y'] or self.height > svg_height: self.pattern_height = self.height else: self.pattern_height = svg_height CHROME_COLOR_TO_CSS = { 'bookmark_text': 'toolbar_text', 'frame': 'accentcolor', 'frame_inactive': 'accentcolor', 'tab_background_text': 'textcolor', } def process_color_value(prop, value): prop = CHROME_COLOR_TO_CSS.get(prop, prop) if isinstance(value, list) and len(value) == 3: return prop, u'rgb(%s,%s,%s)' % tuple(value) # strip out spaces because jquery.minicolors chokes on them return prop, unicode(value).replace(' ', '')
atiqueahmedziad/addons-server
src/olympia/versions/utils.py
Python
bsd-3-clause
4,124
import json import re import pytest from django import forms from django.core import exceptions, serializers from django.core.management import call_command from django.db import connection, models from django.db.migrations.writer import MigrationWriter from django.db.models import Q from django.test import SimpleTestCase, TestCase, TransactionTestCase, override_settings from django_mysql.forms import SimpleListField from django_mysql.models import ListCharField, ListF from django_mysql.test.utils import override_mysql_variables from tests.testapp.models import ( CharListDefaultModel, CharListModel, IntListModel, TemporaryModel, ) class TestSaveLoad(TestCase): def test_char_easy(self): s = CharListModel.objects.create(field=["comfy", "big"]) assert s.field == ["comfy", "big"] s = CharListModel.objects.get(id=s.id) assert s.field == ["comfy", "big"] s.field.append("round") s.save() assert s.field == ["comfy", "big", "round"] s = CharListModel.objects.get(id=s.id) assert s.field == ["comfy", "big", "round"] def test_char_string_direct(self): s = CharListModel.objects.create(field="big,bad") s = CharListModel.objects.get(id=s.id) assert s.field == ["big", "bad"] def test_is_a_list_immediately(self): s = CharListModel() assert s.field == [] s.field.append("bold") s.field.append("brave") s.save() assert s.field == ["bold", "brave"] s = CharListModel.objects.get(id=s.id) assert s.field == ["bold", "brave"] def test_empty(self): s = CharListModel.objects.create() assert s.field == [] s = CharListModel.objects.get(id=s.id) assert s.field == [] def test_char_cant_create_lists_with_empty_string(self): with pytest.raises(ValueError): CharListModel.objects.create(field=[""]) def test_char_cant_create_sets_with_commas(self): with pytest.raises(ValueError): CharListModel.objects.create(field=["co,mma", "contained"]) def test_char_basic_lookup(self): mymodel = CharListModel.objects.create() empty = CharListModel.objects.filter(field="") assert empty.count() == 1 assert empty[0] == mymodel mymodel.delete() assert empty.count() == 0 def test_char_lookup_contains(self): self.check_char_lookup("contains") def test_char_lookup_icontains(self): self.check_char_lookup("icontains") def check_char_lookup(self, lookup): lname = "field__" + lookup mymodel = CharListModel.objects.create(field=["mouldy", "rotten"]) mouldy = CharListModel.objects.filter(**{lname: "mouldy"}) assert mouldy.count() == 1 assert mouldy[0] == mymodel rotten = CharListModel.objects.filter(**{lname: "rotten"}) assert rotten.count() == 1 assert rotten[0] == mymodel clean = CharListModel.objects.filter(**{lname: "clean"}) assert clean.count() == 0 with pytest.raises(ValueError): list(CharListModel.objects.filter(**{lname: ["a", "b"]})) both = CharListModel.objects.filter( Q(**{lname: "mouldy"}) & Q(**{lname: "rotten"}) ) assert both.count() == 1 assert both[0] == mymodel either = CharListModel.objects.filter( Q(**{lname: "mouldy"}) | Q(**{lname: "clean"}) ) assert either.count() == 1 not_clean = CharListModel.objects.exclude(**{lname: "clean"}) assert not_clean.count() == 1 not_mouldy = CharListModel.objects.exclude(**{lname: "mouldy"}) assert not_mouldy.count() == 0 def test_char_len_lookup_empty(self): mymodel = CharListModel.objects.create(field=[]) empty = CharListModel.objects.filter(field__len=0) assert empty.count() == 1 assert empty[0] == mymodel one = CharListModel.objects.filter(field__len=1) assert one.count() == 0 one_or_more = CharListModel.objects.filter(field__len__gte=0) assert one_or_more.count() == 1 def test_char_len_lookup(self): mymodel = CharListModel.objects.create(field=["red", "expensive"]) empty = CharListModel.objects.filter(field__len=0) assert empty.count() == 0 one_or_more = CharListModel.objects.filter(field__len__gte=1) assert one_or_more.count() == 1 assert one_or_more[0] == mymodel two = CharListModel.objects.filter(field__len=2) assert two.count() == 1 assert two[0] == mymodel three = CharListModel.objects.filter(field__len=3) assert three.count() == 0 def test_char_default(self): mymodel = CharListDefaultModel.objects.create() assert mymodel.field == ["a", "d"] mymodel = CharListDefaultModel.objects.get(id=mymodel.id) assert mymodel.field == ["a", "d"] def test_char_position_lookup(self): mymodel = CharListModel.objects.create(field=["red", "blue"]) blue0 = CharListModel.objects.filter(field__0="blue") assert blue0.count() == 0 red0 = CharListModel.objects.filter(field__0="red") assert list(red0) == [mymodel] red0_red1 = CharListModel.objects.filter(field__0="red", field__1="red") assert red0_red1.count() == 0 red0_blue1 = CharListModel.objects.filter(field__0="red", field__1="blue") assert list(red0_blue1) == [mymodel] red0_or_blue0 = CharListModel.objects.filter( Q(field__0="red") | Q(field__0="blue") ) assert list(red0_or_blue0) == [mymodel] def test_char_position_lookup_repeat_fails(self): """ FIND_IN_SET returns the *first* position so repeats are not dealt with """ CharListModel.objects.create(field=["red", "red", "blue"]) red1 = CharListModel.objects.filter(field__1="red") assert list(red1) == [] # should be 'red' def test_char_position_lookup_too_long(self): CharListModel.objects.create(field=["red", "blue"]) red1 = CharListModel.objects.filter(field__2="blue") assert list(red1) == [] def test_int_easy(self): mymodel = IntListModel.objects.create(field=[1, 2]) assert mymodel.field == [1, 2] mymodel = IntListModel.objects.get(id=mymodel.id) assert mymodel.field == [1, 2] def test_int_contains_lookup(self): onetwo = IntListModel.objects.create(field=[1, 2]) ones = IntListModel.objects.filter(field__contains=1) assert ones.count() == 1 assert ones[0] == onetwo twos = IntListModel.objects.filter(field__contains=2) assert twos.count() == 1 assert twos[0] == onetwo threes = IntListModel.objects.filter(field__contains=3) assert threes.count() == 0 with pytest.raises(ValueError): list(IntListModel.objects.filter(field__contains=[1, 2])) ones_and_twos = IntListModel.objects.filter( Q(field__contains=1) & Q(field__contains=2) ) assert ones_and_twos.count() == 1 assert ones_and_twos[0] == onetwo ones_and_threes = IntListModel.objects.filter( Q(field__contains=1) & Q(field__contains=3) ) assert ones_and_threes.count() == 0 ones_or_threes = IntListModel.objects.filter( Q(field__contains=1) | Q(field__contains=3) ) assert ones_or_threes.count() == 1 no_three = IntListModel.objects.exclude(field__contains=3) assert no_three.count() == 1 no_one = IntListModel.objects.exclude(field__contains=1) assert no_one.count() == 0 def test_int_position_lookup(self): onetwo = IntListModel.objects.create(field=[1, 2]) one0 = IntListModel.objects.filter(field__0=1) assert list(one0) == [onetwo] two0 = IntListModel.objects.filter(field__0=2) assert two0.count() == 0 one0two1 = IntListModel.objects.filter(field__0=1, field__1=2) assert list(one0two1) == [onetwo] class TestListF(TestCase): def test_append_to_none(self): CharListModel.objects.create(field=[]) CharListModel.objects.update(field=ListF("field").append("first")) model = CharListModel.objects.get() assert model.field == ["first"] def test_append_to_one(self): CharListModel.objects.create(field=["big"]) CharListModel.objects.update(field=ListF("field").append("bad")) model = CharListModel.objects.get() assert model.field == ["big", "bad"] def test_append_to_some(self): CharListModel.objects.create(field=["big", "blue"]) CharListModel.objects.update(field=ListF("field").append("round")) model = CharListModel.objects.get() assert model.field == ["big", "blue", "round"] def test_append_to_multiple_objects(self): CharListModel.objects.create(field=["mouse"]) CharListModel.objects.create(field=["keyboard"]) CharListModel.objects.update(field=ListF("field").append("screen")) first, second = tuple(CharListModel.objects.all()) assert first.field == ["mouse", "screen"] assert second.field == ["keyboard", "screen"] def test_append_exists(self): CharListModel.objects.create(field=["nice"]) CharListModel.objects.update(field=ListF("field").append("nice")) model = CharListModel.objects.get() assert model.field == ["nice", "nice"] @override_mysql_variables(SQL_MODE="ANSI") def test_append_works_in_ansi_mode(self): CharListModel.objects.create() CharListModel.objects.update(field=ListF("field").append("big")) CharListModel.objects.update(field=ListF("field").append("bad")) model = CharListModel.objects.get() assert model.field == ["big", "bad"] def test_append_assignment(self): model = CharListModel.objects.create(field=["red"]) model.field = ListF("field").append("blue") model.save() model = CharListModel.objects.get() assert model.field == ["red", "blue"] def test_appendleft_to_none(self): CharListModel.objects.create(field=[]) CharListModel.objects.update(field=ListF("field").appendleft("first")) model = CharListModel.objects.get() assert model.field == ["first"] def test_appendleft_to_one(self): CharListModel.objects.create(field=["big"]) CharListModel.objects.update(field=ListF("field").appendleft("bad")) model = CharListModel.objects.get() assert model.field == ["bad", "big"] def test_appendleft_to_some(self): CharListModel.objects.create(field=["big", "blue"]) CharListModel.objects.update(field=ListF("field").appendleft("round")) model = CharListModel.objects.get() assert model.field == ["round", "big", "blue"] def test_appendleft_to_multiple_objects(self): CharListModel.objects.create(field=["mouse"]) CharListModel.objects.create(field=["keyboard"]) CharListModel.objects.update(field=ListF("field").appendleft("screen")) first, second = tuple(CharListModel.objects.all()) assert first.field == ["screen", "mouse"] assert second.field == ["screen", "keyboard"] def test_appendleft_exists(self): CharListModel.objects.create(field=["nice"]) CharListModel.objects.update(field=ListF("field").appendleft("nice")) model = CharListModel.objects.get() assert model.field == ["nice", "nice"] @override_mysql_variables(SQL_MODE="ANSI") def test_appendleft_works_in_ansi_mode(self): CharListModel.objects.create() CharListModel.objects.update(field=ListF("field").appendleft("big")) CharListModel.objects.update(field=ListF("field").appendleft("bad")) model = CharListModel.objects.get() assert model.field == ["bad", "big"] def test_appendleft_assignment(self): model = CharListModel.objects.create(field=["red"]) model.field = ListF("field").appendleft("blue") model.save() model = CharListModel.objects.get() assert model.field == ["blue", "red"] def test_pop_none(self): CharListModel.objects.create(field=[]) CharListModel.objects.update(field=ListF("field").pop()) model = CharListModel.objects.get() assert model.field == [] def test_pop_one(self): CharListModel.objects.create(field=["red"]) CharListModel.objects.update(field=ListF("field").pop()) model = CharListModel.objects.get() assert model.field == [] def test_pop_two(self): CharListModel.objects.create(field=["red", "blue"]) CharListModel.objects.update(field=ListF("field").pop()) model = CharListModel.objects.get() assert model.field == ["red"] def test_pop_three(self): CharListModel.objects.create(field=["green", "yellow", "p"]) CharListModel.objects.update(field=ListF("field").pop()) model = CharListModel.objects.get() assert model.field == ["green", "yellow"] def test_popleft_none(self): CharListModel.objects.create(field=[]) CharListModel.objects.update(field=ListF("field").popleft()) model = CharListModel.objects.get() assert model.field == [] def test_popleft_one(self): CharListModel.objects.create(field=["red"]) CharListModel.objects.update(field=ListF("field").popleft()) model = CharListModel.objects.get() assert model.field == [] def test_popleft_two(self): CharListModel.objects.create(field=["red", "blue"]) CharListModel.objects.update(field=ListF("field").popleft()) model = CharListModel.objects.get() assert model.field == ["blue"] def test_popleft_three(self): CharListModel.objects.create(field=["green", "yellow", "p"]) CharListModel.objects.update(field=ListF("field").popleft()) model = CharListModel.objects.get() assert model.field == ["yellow", "p"] class TestValidation(SimpleTestCase): def test_max_length(self): field = ListCharField(models.CharField(max_length=32), size=3, max_length=32) field.clean({"a", "b", "c"}, None) with pytest.raises(exceptions.ValidationError) as excinfo: field.clean({"a", "b", "c", "d"}, None) assert ( excinfo.value.messages[0] == "List contains 4 items, it should contain no more than 3." ) class TestCheck(SimpleTestCase): def test_field_checks(self): class InvalidListCharModel1(TemporaryModel): field = ListCharField(models.CharField(), max_length=32) errors = InvalidListCharModel1.check(actually_check=True) assert len(errors) == 1 assert errors[0].id == "django_mysql.E004" assert "Base field for list has errors" in errors[0].msg assert "max_length" in errors[0].msg def test_invalid_base_fields(self): class InvalidListCharModel2(TemporaryModel): field = ListCharField( models.ForeignKey("testapp.Author", on_delete=models.CASCADE), max_length=32, ) errors = InvalidListCharModel2.check(actually_check=True) assert len(errors) == 1 assert errors[0].id == "django_mysql.E005" assert "Base field for list must be" in errors[0].msg def test_max_length_including_base(self): class InvalidListCharModel3(TemporaryModel): field = ListCharField( models.CharField(max_length=32), size=2, max_length=32 ) errors = InvalidListCharModel3.check(actually_check=True) assert len(errors) == 1 assert errors[0].id == "django_mysql.E006" assert "Field can overrun" in errors[0].msg def test_max_length_missing_doesnt_crash(self): class InvalidListCharModel4(TemporaryModel): field = ListCharField(models.CharField(max_length=2), size=2) errors = InvalidListCharModel4.check(actually_check=True) assert len(errors) == 1 assert errors[0].id == "fields.E120" assert errors[0].msg == "CharFields must define a 'max_length' attribute." class TestDeconstruct(TestCase): def test_deconstruct(self): field = ListCharField(models.IntegerField(), max_length=32) name, path, args, kwargs = field.deconstruct() new = ListCharField(*args, **kwargs) assert new.base_field.__class__ == field.base_field.__class__ def test_deconstruct_with_size(self): field = ListCharField(models.IntegerField(), size=3, max_length=32) name, path, args, kwargs = field.deconstruct() new = ListCharField(*args, **kwargs) assert new.size == field.size def test_deconstruct_args(self): field = ListCharField(models.CharField(max_length=5), max_length=32) name, path, args, kwargs = field.deconstruct() new = ListCharField(*args, **kwargs) assert new.base_field.max_length == field.base_field.max_length class TestMigrationWriter(TestCase): def test_makemigrations(self): field = ListCharField(models.CharField(max_length=5), max_length=32) statement, imports = MigrationWriter.serialize(field) # The order of the output max_length/size statements varies by # python version, hence a little regexp to match them assert re.compile( r"""^django_mysql\.models\.ListCharField\( models\.CharField\(max_length=5\),\ # space here ( max_length=32,\ size=None| size=None,\ max_length=32 ) \)$ """, re.VERBOSE, ).match(statement) def test_makemigrations_with_size(self): field = ListCharField(models.CharField(max_length=5), max_length=32, size=5) statement, imports = MigrationWriter.serialize(field) # The order of the output max_length/size statements varies by # python version, hence a little regexp to match them assert re.compile( r"""^django_mysql\.models\.ListCharField\( models\.CharField\(max_length=5\),\ # space here ( max_length=32,\ size=5| size=5,\ max_length=32 ) \)$ """, re.VERBOSE, ).match(statement) class TestMigrations(TransactionTestCase): @override_settings( MIGRATION_MODULES={"testapp": "tests.testapp.list_default_migrations"} ) def test_adding_field_with_default(self): table_name = "testapp_intlistdefaultmodel" table_names = connection.introspection.table_names with connection.cursor() as cursor: assert table_name not in table_names(cursor) call_command( "migrate", "testapp", verbosity=0, skip_checks=True, interactive=False ) with connection.cursor() as cursor: assert table_name in table_names(cursor) call_command( "migrate", "testapp", "zero", verbosity=0, skip_checks=True, interactive=False, ) with connection.cursor() as cursor: assert table_name not in table_names(cursor) class TestSerialization(SimpleTestCase): def test_dumping(self): instance = CharListModel(field=["big", "comfy"]) data = json.loads(serializers.serialize("json", [instance]))[0] field = data["fields"]["field"] assert sorted(field.split(",")) == ["big", "comfy"] def test_loading(self): test_data = """ [{"fields": {"field": "big,leather,comfy"}, "model": "testapp.CharListModel", "pk": null}] """ objs = list(serializers.deserialize("json", test_data)) instance = objs[0].object assert instance.field == ["big", "leather", "comfy"] class TestDescription(SimpleTestCase): def test_char(self): field = ListCharField(models.CharField(max_length=5), max_length=32) assert field.description == "List of String (up to %(max_length)s)" def test_int(self): field = ListCharField(models.IntegerField(), max_length=32) assert field.description == "List of Integer" class TestFormField(SimpleTestCase): def test_model_field_formfield(self): model_field = ListCharField(models.CharField(max_length=27)) form_field = model_field.formfield() assert isinstance(form_field, SimpleListField) assert isinstance(form_field.base_field, forms.CharField) assert form_field.base_field.max_length == 27 def test_model_field_formfield_size(self): model_field = ListCharField(models.IntegerField(), size=4) form_field = model_field.formfield() assert isinstance(form_field, SimpleListField) assert form_field.max_length == 4
arnau126/django-mysql
tests/testapp/test_listcharfield.py
Python
bsd-3-clause
21,270
/* * MSR Tools - tools for mining software repositories * * Copyright (C) 2010-2011 Semyon Kirnosenko */ using System; using System.Linq; namespace MSR.Data.Entities.DSL.Selection { public static class ModificationSelectionExtensions { public static ModificationSelectionExpression Modifications(this IRepositorySelectionExpression parentExp) { return new ModificationSelectionExpression(parentExp); } public static CommitSelectionExpression ContainModifications(this CommitSelectionExpression parentExp) { return parentExp.Reselect(s => ( from c in s join m in parentExp.Selection<Modification>() on c.ID equals m.CommitID select c ).Distinct() ); } public static ProjectFileSelectionExpression ContainModifications(this ProjectFileSelectionExpression parentExp) { return parentExp.Reselect(s => ( from f in s join m in parentExp.Selection<Modification>() on f.ID equals m.FileID select f ).Distinct() ); } public static CommitSelectionExpression TouchFiles(this CommitSelectionExpression parentExp) { return parentExp.Reselect(s => ( from c in s join m in parentExp.Queryable<Modification>() on c.ID equals m.CommitID join f in parentExp.Selection<ProjectFile>() on m.FileID equals f.ID select c ).Distinct() ); } public static ProjectFileSelectionExpression TouchedInCommits(this ProjectFileSelectionExpression parentExp) { return parentExp.Reselect(s => ( from f in s join m in parentExp.Queryable<Modification>() on f.ID equals m.FileID join c in parentExp.Selection<Commit>() on m.CommitID equals c.ID select f ).Distinct() ); } } public class ModificationSelectionExpression : EntitySelectionExpression<Modification,ModificationSelectionExpression> { public ModificationSelectionExpression(IRepositorySelectionExpression parentExp) : base(parentExp) { } public ModificationSelectionExpression InCommits() { return Reselect((s) => from m in s join c in Selection<Commit>() on m.CommitID equals c.ID select m ); } public ModificationSelectionExpression InFiles() { return Reselect((s) => from m in s join f in Selection<ProjectFile>() on m.FileID equals f.ID select m ); } protected override ModificationSelectionExpression Recreate() { return new ModificationSelectionExpression(this); } } }
kirnosenko/msr-tools
src/MSR/Data/Entities/DSL/Selection/ModificationSelectionExpression.cs
C#
bsd-3-clause
2,527
/* * Copyright 2002-2014 Drew Noakes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ package com.drew.metadata.exif.makernotes; import com.drew.lang.annotations.NotNull; import com.drew.lang.annotations.Nullable; import com.drew.metadata.TagDescriptor; import static com.drew.metadata.exif.makernotes.CasioType2MakernoteDirectory.*; /** * Provides human-readable string representations of tag values stored in a {@link CasioType2MakernoteDirectory}. * * @author Drew Noakes https://drewnoakes.com */ public class CasioType2MakernoteDescriptor extends TagDescriptor<CasioType2MakernoteDirectory> { public CasioType2MakernoteDescriptor(@NotNull CasioType2MakernoteDirectory directory) { super(directory); } @Override @Nullable public String getDescription(int tagType) { switch (tagType) { case TAG_THUMBNAIL_DIMENSIONS: return getThumbnailDimensionsDescription(); case TAG_THUMBNAIL_SIZE: return getThumbnailSizeDescription(); case TAG_THUMBNAIL_OFFSET: return getThumbnailOffsetDescription(); case TAG_QUALITY_MODE: return getQualityModeDescription(); case TAG_IMAGE_SIZE: return getImageSizeDescription(); case TAG_FOCUS_MODE_1: return getFocusMode1Description(); case TAG_ISO_SENSITIVITY: return getIsoSensitivityDescription(); case TAG_WHITE_BALANCE_1: return getWhiteBalance1Description(); case TAG_FOCAL_LENGTH: return getFocalLengthDescription(); case TAG_SATURATION: return getSaturationDescription(); case TAG_CONTRAST: return getContrastDescription(); case TAG_SHARPNESS: return getSharpnessDescription(); case TAG_PRINT_IMAGE_MATCHING_INFO: return getPrintImageMatchingInfoDescription(); case TAG_PREVIEW_THUMBNAIL: return getCasioPreviewThumbnailDescription(); case TAG_WHITE_BALANCE_BIAS: return getWhiteBalanceBiasDescription(); case TAG_WHITE_BALANCE_2: return getWhiteBalance2Description(); case TAG_OBJECT_DISTANCE: return getObjectDistanceDescription(); case TAG_FLASH_DISTANCE: return getFlashDistanceDescription(); case TAG_RECORD_MODE: return getRecordModeDescription(); case TAG_SELF_TIMER: return getSelfTimerDescription(); case TAG_QUALITY: return getQualityDescription(); case TAG_FOCUS_MODE_2: return getFocusMode2Description(); case TAG_TIME_ZONE: return getTimeZoneDescription(); case TAG_CCD_ISO_SENSITIVITY: return getCcdIsoSensitivityDescription(); case TAG_COLOUR_MODE: return getColourModeDescription(); case TAG_ENHANCEMENT: return getEnhancementDescription(); case TAG_FILTER: return getFilterDescription(); default: return super.getDescription(tagType); } } @Nullable public String getFilterDescription() { return getIndexedDescription(TAG_FILTER, "Off"); } @Nullable public String getEnhancementDescription() { return getIndexedDescription(TAG_ENHANCEMENT, "Off"); } @Nullable public String getColourModeDescription() { return getIndexedDescription(TAG_COLOUR_MODE, "Off"); } @Nullable public String getCcdIsoSensitivityDescription() { return getIndexedDescription(TAG_CCD_ISO_SENSITIVITY, "Off", "On"); } @Nullable public String getTimeZoneDescription() { return _directory.getString(TAG_TIME_ZONE); } @Nullable public String getFocusMode2Description() { Integer value = _directory.getInteger(TAG_FOCUS_MODE_2); if (value == null) return null; switch (value) { case 1: return "Fixation"; case 6: return "Multi-Area Focus"; default: return "Unknown (" + value + ")"; } } @Nullable public String getQualityDescription() { return getIndexedDescription(TAG_QUALITY, 3, "Fine"); } @Nullable public String getSelfTimerDescription() { return getIndexedDescription(TAG_SELF_TIMER, 1, "Off"); } @Nullable public String getRecordModeDescription() { return getIndexedDescription(TAG_RECORD_MODE, 2, "Normal"); } @Nullable public String getFlashDistanceDescription() { return getIndexedDescription(TAG_FLASH_DISTANCE, "Off"); } @Nullable public String getObjectDistanceDescription() { Integer value = _directory.getInteger(TAG_OBJECT_DISTANCE); if (value == null) return null; return Integer.toString(value) + " mm"; } @Nullable public String getWhiteBalance2Description() { Integer value = _directory.getInteger(TAG_WHITE_BALANCE_2); if (value == null) return null; switch (value) { case 0: return "Manual"; case 1: return "Auto"; // unsure about this case 4: return "Flash"; // unsure about this case 12: return "Flash"; default: return "Unknown (" + value + ")"; } } @Nullable public String getWhiteBalanceBiasDescription() { return _directory.getString(TAG_WHITE_BALANCE_BIAS); } @Nullable public String getCasioPreviewThumbnailDescription() { final byte[] bytes = _directory.getByteArray(TAG_PREVIEW_THUMBNAIL); if (bytes == null) return null; return "<" + bytes.length + " bytes of image data>"; } @Nullable public String getPrintImageMatchingInfoDescription() { // TODO research PIM specification http://www.ozhiker.com/electronics/pjmt/jpeg_info/pim.html return _directory.getString(TAG_PRINT_IMAGE_MATCHING_INFO); } @Nullable public String getSharpnessDescription() { return getIndexedDescription(TAG_SHARPNESS, "-1", "Normal", "+1"); } @Nullable public String getContrastDescription() { return getIndexedDescription(TAG_CONTRAST, "-1", "Normal", "+1"); } @Nullable public String getSaturationDescription() { return getIndexedDescription(TAG_SATURATION, "-1", "Normal", "+1"); } @Nullable public String getFocalLengthDescription() { Double value = _directory.getDoubleObject(TAG_FOCAL_LENGTH); if (value == null) return null; return Double.toString(value / 10d) + " mm"; } @Nullable public String getWhiteBalance1Description() { return getIndexedDescription( TAG_WHITE_BALANCE_1, "Auto", "Daylight", "Shade", "Tungsten", "Florescent", "Manual" ); } @Nullable public String getIsoSensitivityDescription() { Integer value = _directory.getInteger(TAG_ISO_SENSITIVITY); if (value == null) return null; switch (value) { case 3: return "50"; case 4: return "64"; case 6: return "100"; case 9: return "200"; default: return "Unknown (" + value + ")"; } } @Nullable public String getFocusMode1Description() { return getIndexedDescription(TAG_FOCUS_MODE_1, "Normal", "Macro"); } @Nullable public String getImageSizeDescription() { Integer value = _directory.getInteger(TAG_IMAGE_SIZE); if (value == null) return null; switch (value) { case 0: return "640 x 480 pixels"; case 4: return "1600 x 1200 pixels"; case 5: return "2048 x 1536 pixels"; case 20: return "2288 x 1712 pixels"; case 21: return "2592 x 1944 pixels"; case 22: return "2304 x 1728 pixels"; case 36: return "3008 x 2008 pixels"; default: return "Unknown (" + value + ")"; } } @Nullable public String getQualityModeDescription() { return getIndexedDescription(TAG_QUALITY_MODE, 1, "Fine", "Super Fine"); } @Nullable public String getThumbnailOffsetDescription() { return _directory.getString(TAG_THUMBNAIL_OFFSET); } @Nullable public String getThumbnailSizeDescription() { Integer value = _directory.getInteger(TAG_THUMBNAIL_SIZE); if (value == null) return null; return Integer.toString(value) + " bytes"; } @Nullable public String getThumbnailDimensionsDescription() { int[] dimensions = _directory.getIntArray(TAG_THUMBNAIL_DIMENSIONS); if (dimensions == null || dimensions.length != 2) return _directory.getString(TAG_THUMBNAIL_DIMENSIONS); return dimensions[0] + " x " + dimensions[1] + " pixels"; } }
IfflakeDeveloper/Mystic
src/com/drew/metadata/exif/makernotes/CasioType2MakernoteDescriptor.java
Java
bsd-3-clause
10,104
<?php use yii\helpers\Html; use yii\widgets\ActiveForm; /* @var $this yii\web\View */ /* @var $model lukisongroup\master\models\DraftPlanGroup */ /* @var $form yii\widgets\ActiveForm */ ?> <div class="draft-plan-group-form"> <?php $form = ActiveForm::begin(); ?> <?= $form->field($model, 'TGL_START')->textInput() ?> <?= $form->field($model, 'SCL_NM')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'GEO_ID')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'LAYER_ID')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'DAY_ID')->textInput() ?> <?= $form->field($model, 'DAY_VALUE')->textInput() ?> <?= $form->field($model, 'USER_ID')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'STATUS')->textInput() ?> <?= $form->field($model, 'CREATED_BY')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'CREATED_AT')->textInput() ?> <?= $form->field($model, 'UPDATED_BY')->textInput(['maxlength' => true]) ?> <?= $form->field($model, 'UPDATED_AT')->textInput() ?> <div class="form-group"> <?= Html::submitButton($model->isNewRecord ? 'Create' : 'Update', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?> </div> <?php ActiveForm::end(); ?> </div>
adem-team/advanced
lukisongroup/master/views/draft-plan-group/_form.php
PHP
bsd-3-clause
1,327
package main import ( "github.com/GwentAPI/gwentapi/app" "github.com/GwentAPI/gwentapi/dataLayer/dal" "github.com/GwentAPI/gwentapi/dataLayer/factory" "github.com/GwentAPI/gwentapi/dataLayer/models" "github.com/GwentAPI/gwentapi/helpers" "github.com/goadesign/goa" "github.com/goadesign/goa/middleware" ) // CardController implements the card resource. type CardController struct { *goa.Controller } // NewCardController creates a card controller. func NewCardController(service *goa.Service) *CardController { return &CardController{Controller: service.NewController("CardController")} } // CardFaction runs the cardFaction action. func (c *CardController) CardFaction(ctx *app.CardFactionCardContext) error { // CardController_CardFaction: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalCard(dataStore) df := dal.NewDalFaction(dataStore) factionUUID, errFactionUUID := helpers.DecodeUUID(ctx.FactionID) if errFactionUUID != nil { return ctx.NotFound() } faction, errFaction := df.Fetch(factionUUID) collectionCount, err := dc.CountFromFaction(faction.ID) if helpers.IsNotFoundError(errFaction) || helpers.IsNotFoundError(err) { return ctx.NotFound() } limit, offset := helpers.ValidateLimitOffset(collectionCount, ctx.Limit, ctx.Offset) cards, err := dc.FetchFromFactionPaging(faction.ID, limit, offset) if err != nil || errFaction != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardFaction", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_CardFaction: end_implement res, lastModified, _ := factory.CreatePageCard(cards, "factions/"+ctx.FactionID, collectionCount, limit, offset, ctx.Lang) helpers.LastModified(ctx.ResponseData, lastModified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, lastModified) { return ctx.NotModified() } } return ctx.OK(res) } // CardFaction runs the cardFaction action. func (c *CardController) CardRarity(ctx *app.CardRarityCardContext) error { // CardController_CardRarity: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dv := dal.NewDalVariation(dataStore) dr := dal.NewDalRarity(dataStore) rarityUUID, errRarityUUID := helpers.DecodeUUID(ctx.RarityID) if errRarityUUID != nil { return ctx.NotFound() } rarity, errRarity := dr.Fetch(rarityUUID) collectionCount, err := dv.CountFromRarity(rarity.ID) if helpers.IsNotFoundError(errRarity) || helpers.IsNotFoundError(err) { return ctx.NotFound() } limit, offset := helpers.ValidateLimitOffset(collectionCount, ctx.Limit, ctx.Offset) cardIDs, err := dv.FetchCardIDFromRarityPaging(rarity.ID, limit, offset) dc := dal.NewDalCard(dataStore) cards, errCard := dc.FetchFromArray(*cardIDs) if err != nil || errRarity != nil || errCard != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardRarity", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_CardRarity: end_implement res, lastModified, _ := factory.CreatePageCard(cards, "rarities/"+ctx.RarityID, collectionCount, limit, offset, ctx.Lang) helpers.LastModified(ctx.ResponseData, lastModified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, lastModified) { return ctx.NotModified() } } return ctx.OK(res) } // CardLeader runs the cardLeader action. func (c *CardController) CardLeader(ctx *app.CardLeaderCardContext) error { // CardController_CardLeader: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalCard(dataStore) dg := dal.NewDalGroup(dataStore) group, errGroup := dg.FetchWithName("Leader") collectionCount, err := dc.CountLeader(group.ID) if helpers.IsNotFoundError(err) { return ctx.NotFound() } limit, offset := helpers.ValidateLimitOffset(collectionCount, ctx.Limit, ctx.Offset) cards, err := dc.FetchLeaderPaging(group.ID, limit, offset) if helpers.IsNotFoundError(err) { return ctx.NotFound() } else if err != nil || errGroup != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardLeader", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_CardLeader: end_implement res, lastModified, _ := factory.CreatePageCard(cards, "leaders", collectionCount, limit, offset, ctx.Lang) helpers.LastModified(ctx.ResponseData, lastModified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, lastModified) { return ctx.NotModified() } } return ctx.OK(res) } // CardVariation runs the cardVariation action. func (c *CardController) CardVariation(ctx *app.CardVariationCardContext) error { // CardController_CardVariation: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalVariation(dataStore) uuid, err := helpers.DecodeUUID(ctx.CardID) variationUUID, errVariation := helpers.DecodeUUID(ctx.VariationID) if err != nil || errVariation != nil { return ctx.NotFound() } variation, err := dc.Fetch(variationUUID) if helpers.IsNotFoundError(err) { return ctx.NotFound() } else if err != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardVariation", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_CardVariation: end_implement res, err := factory.CreateVariation(variation, uuid, dataStore) if err != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardVariation", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } helpers.LastModified(ctx.ResponseData, variation.Last_Modified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, variation.Last_Modified) { return ctx.NotModified() } } return ctx.OK(res) } // CardVariations runs the cardVariations action. func (c *CardController) CardVariations(ctx *app.CardVariationsCardContext) error { // CardController_CardVariations: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalCard(dataStore) dv := dal.NewDalVariation(dataStore) uuid, err := helpers.DecodeUUID(ctx.CardID) if err != nil { return ctx.NotFound() } card, err := dc.Fetch(uuid) variations, errVariation := dv.FetchFromCardID(card.ID) if helpers.IsNotFoundError(err) || helpers.IsNotFoundError(errVariation) { return ctx.NotFound() } else if err != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardVariations", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_CardVariations: end_implement res, lastModified, errVariation := factory.CreateVariationCollection(variations, card.UUID, dataStore) if errVariation != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "CardVariations", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", errVariation.Error()) return ctx.InternalServerError() } helpers.LastModified(ctx.ResponseData, lastModified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, lastModified) { return ctx.NotModified() } } return ctx.OK(res) } // List runs the list action. func (c *CardController) List(ctx *app.ListCardContext) error { // CardController_List: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalCard(dataStore) var cards *[]models.Card var serviceError error var resultCount int if ctx.Name != nil && len(*ctx.Name) >= 3 { query := dal.CardQuery{Name: *ctx.Name, Lang: ctx.Lang} cards, resultCount, serviceError = dc.FetchQueryPaging(ctx.Limit, ctx.Offset, query) } else { cards, resultCount, serviceError = dc.FetchAllPaging(ctx.Limit, ctx.Offset) } if serviceError != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "List", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", serviceError.Error()) return ctx.InternalServerError() } // CardController_List: end_implement res, lastModified, _ := factory.CreatePageCard(cards, "", resultCount, ctx.Limit, ctx.Offset, ctx.Lang) helpers.LastModified(ctx.ResponseData, lastModified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, lastModified) { return ctx.NotModified() } } return ctx.OK(res) } // Show runs the show action. func (c *CardController) Show(ctx *app.ShowCardContext) error { // CardController_Show: start_implement dataStore := &dal.DataStore{} dataStore.GetSession() // Close the session defer dataStore.Close() dc := dal.NewDalCard(dataStore) uuid, err := helpers.DecodeUUID(ctx.CardID) if err != nil { return ctx.NotFound() } card, err := dc.Fetch(uuid) if helpers.IsNotFoundError(err) { return ctx.NotFound() } else if err != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "Show", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", err.Error()) return ctx.InternalServerError() } // CardController_Show: end_implement res, errFactory := factory.CreateCard(card, dataStore, ctx.Lang) if errFactory != nil { ctx.ResponseData.Service.LogError("InternalServerError", "req_id", middleware.ContextRequestID(ctx), "ctrl", "Card", "action", "Show", ctx.RequestData.Request.Method, ctx.RequestData.Request.URL, "databaseError", errFactory.Error()) return ctx.InternalServerError() } helpers.LastModified(ctx.ResponseData, card.Last_Modified) if ctx.IfModifiedSince != nil { if !helpers.IsModified(*ctx.IfModifiedSince, card.Last_Modified) { return ctx.NotModified() } } return ctx.OK(res) }
GwentAPI/gwentapi
card.go
GO
bsd-3-clause
10,891
# Copyright (c) 2011 Tencent Inc. # All rights reserved. # # Author: Michaelpeng <michaelpeng@tencent.com> # Date: October 20, 2011 """ This is the cc_target module which is the super class of all of the scons cc targets, like cc_library, cc_binary. """ import os import subprocess import Queue import blade import configparse import console import build_rules from blade_util import var_to_list, stable_unique from target import Target class CcTarget(Target): """A scons cc target subclass. This class is derived from SconsTarget and it is the base class of cc_library, cc_binary etc. """ def __init__(self, name, target_type, srcs, deps, visibility, warning, defs, incs, export_incs, optimize, extra_cppflags, extra_linkflags, blade, kwargs): """Init method. Init the cc target. """ srcs = var_to_list(srcs) deps = var_to_list(deps) defs = var_to_list(defs) incs = var_to_list(incs) export_incs = var_to_list(export_incs) opt = var_to_list(optimize) extra_cppflags = var_to_list(extra_cppflags) extra_linkflags = var_to_list(extra_linkflags) Target.__init__(self, name, target_type, srcs, deps, visibility, blade, kwargs) self.data['warning'] = warning self.data['defs'] = defs self.data['incs'] = incs self.data['export_incs'] = export_incs self.data['optimize'] = opt self.data['extra_cppflags'] = extra_cppflags self.data['extra_linkflags'] = extra_linkflags self._check_defs() self._check_incorrect_no_warning() def _check_deprecated_deps(self): """Check whether it depends upon a deprecated library. """ for key in self.deps: dep = self.target_database.get(key) if dep and dep.data.get('deprecated'): replaced_deps = dep.deps if replaced_deps: console.warning('%s: //%s has been deprecated, ' 'please depends on //%s:%s' % ( self.fullname, dep.fullname, replaced_deps[0][0], replaced_deps[0][1])) def _prepare_to_generate_rule(self): """Should be overridden. """ self._check_deprecated_deps() self._clone_env() def _clone_env(self): """Select env. """ env_name = self._env_name() warning = self.data.get('warning', '') if warning == 'yes': self._write_rule('%s = env_with_error.Clone()' % env_name) else: self._write_rule('%s = env_no_warning.Clone()' % env_name) __cxx_keyword_list = frozenset([ 'and', 'and_eq', 'alignas', 'alignof', 'asm', 'auto', 'bitand', 'bitor', 'bool', 'break', 'case', 'catch', 'char', 'char16_t', 'char32_t', 'class', 'compl', 'const', 'constexpr', 'const_cast', 'continue', 'decltype', 'default', 'delete', 'double', 'dynamic_cast', 'else', 'enum', 'explicit', 'export', 'extern', 'false', 'float', 'for', 'friend', 'goto', 'if', 'inline', 'int', 'long', 'mutable', 'namespace', 'new', 'noexcept', 'not', 'not_eq', 'nullptr', 'operator', 'or', 'or_eq', 'private', 'protected', 'public', 'register', 'reinterpret_cast', 'return', 'short', 'signed', 'sizeof', 'static', 'static_assert', 'static_cast', 'struct', 'switch', 'template', 'this', 'thread_local', 'throw', 'true', 'try', 'typedef', 'typeid', 'typename', 'union', 'unsigned', 'using', 'virtual', 'void', 'volatile', 'wchar_t', 'while', 'xor', 'xor_eq']) def _check_defs(self): """_check_defs. It will warn if user defines cpp keyword in defs list. """ defs_list = self.data.get('defs', []) for macro in defs_list: pos = macro.find('=') if pos != -1: macro = macro[0:pos] if macro in CcTarget.__cxx_keyword_list: console.warning('DO NOT define c++ keyword %s as macro' % macro) def _check_incorrect_no_warning(self): """check if warning=no is correctly used or not. """ warning = self.data.get('warning', 'yes') srcs = self.srcs if not srcs or warning != 'no': return keywords_list = self.blade.get_sources_keyword_list() for keyword in keywords_list: if keyword in self.path: return illegal_path_list = [] for keyword in keywords_list: illegal_path_list += [s for s in srcs if not keyword in s] if illegal_path_list: console.warning("//%s:%s : warning='no' is only allowed " "for code in thirdparty." % ( self.key[0], self.key[1])) def _objs_name(self): """_objs_name. Concatenating target path, target name to be objs var and returns. """ return 'objs_%s' % self._generate_variable_name(self.path, self.name) def _prebuilt_cc_library_path(self, prefer_dynamic=False): """ Return source and target path of the prebuilt cc library. When both .so and .a exist, return .so if prefer_dynamic is True. Otherwise return the existing one. """ a_src_path = self._prebuilt_cc_library_pathname(dynamic=False) so_src_path = self._prebuilt_cc_library_pathname(dynamic=True) libs = (a_src_path, so_src_path) # Ordered by priority if prefer_dynamic: libs = (so_src_path, a_src_path) source = '' for lib in libs: if os.path.exists(lib): source = lib break if not source: console.error_exit('%s: Can not find either %s or %s' % ( self.fullname, libs[0], libs[1])) target = self._target_file_path(os.path.basename(source)) return source, target def _prebuilt_cc_library_pathname(self, dynamic=False): options = self.blade.get_options() suffix = 'a' if dynamic: suffix = 'so' return os.path.join(self.path, 'lib%s_%s' % (options.m, options.profile), 'lib%s.%s' % (self.name, suffix)) def _prebuilt_cc_library_dynamic_soname(self, so): """Get the soname of prebuilt shared library. """ soname = None output = subprocess.check_output('objdump -p %s' % so, shell=True) for line in output.splitlines(): parts = line.split() if len(parts) == 2 and parts[0] == 'SONAME': soname = parts[1] break return soname def _setup_cc_flags(self): """_setup_cc_flags. """ env_name = self._env_name() flags_from_option, incs_list = self._get_cc_flags() if flags_from_option: self._write_rule('%s.Append(CPPFLAGS=%s)' % (env_name, flags_from_option)) if incs_list: self._write_rule('%s.Append(CPPPATH=%s)' % (env_name, incs_list)) def _setup_as_flags(self): """_setup_as_flags. """ env_name = self._env_name() as_flags, aspp_flags = self._get_as_flags() if as_flags: self._write_rule('%s.Append(ASFLAGS=%s)' % (env_name, as_flags)) if aspp_flags: self._write_rule('%s.Append(ASPPFLAGS=%s)' % (env_name, aspp_flags)) def _setup_link_flags(self): """linkflags. """ extra_linkflags = self.data.get('extra_linkflags') if extra_linkflags: self._write_rule('%s.Append(LINKFLAGS=%s)' % (self._env_name(), extra_linkflags)) def _get_optimize_flags(self): """get optimize flags such as -O2""" oflags = [] opt_list = self.data.get('optimize') if not opt_list: cc_config = configparse.blade_config.get_config('cc_config') opt_list = cc_config['optimize'] if opt_list: for flag in opt_list: if flag.startswith('-'): oflags.append(flag) else: oflags.append('-' + flag) else: oflags = ['-O2'] return oflags def _get_cc_flags(self): """_get_cc_flags. Return the cpp flags according to the BUILD file and other configs. """ cpp_flags = [] # Warnings if self.data.get('warning', '') == 'no': cpp_flags.append('-w') # Defs defs = self.data.get('defs', []) cpp_flags += [('-D' + macro) for macro in defs] # Optimize flags if (self.blade.get_options().profile == 'release' or self.data.get('always_optimize')): cpp_flags += self._get_optimize_flags() # Add -fno-omit-frame-pointer to optimize mode for easy debugging. cpp_flags += ['-fno-omit-frame-pointer'] cpp_flags += self.data.get('extra_cppflags', []) # Incs incs = self.data.get('incs', []) + self.data.get('export_incs', []) incs = [os.path.normpath(os.path.join(self.path, inc)) for inc in incs] incs += self._export_incs_list() # Remove duplicate items in incs list and keep the order incs = stable_unique(incs) return (cpp_flags, incs) def _get_as_flags(self): """_get_as_flags. Return the as flags according to the build architecture. """ options = self.blade.get_options() as_flags = ['-g', '--' + options.m] aspp_flags = ['-Wa,--' + options.m] return as_flags, aspp_flags def _export_incs_list(self): """_export_incs_list. TODO """ deps = self.expanded_deps inc_list = [] for lib in deps: # system lib if lib[0] == '#': continue target = self.target_database[lib] for inc in target.data.get('export_incs', []): path = os.path.normpath(os.path.join(target.path, inc)) inc_list.append(path) return inc_list def _static_deps_list(self): """_static_deps_list. Returns ----------- link_all_symbols_lib_list: the libs to link all its symbols into target lib_list: the libs list to be statically linked into static library Description ----------- It will find the libs needed to be linked into the target statically. """ build_targets = self.blade.get_build_targets() lib_list = [] link_all_symbols_lib_list = [] for dep in self.expanded_deps: dep_target = build_targets[dep] if dep_target.type == 'cc_library' and not dep_target.srcs: continue # system lib if dep_target.type == 'system_library': lib_name = "'%s'" % dep_target.name else: lib_name = dep_target.data.get('static_cc_library_var') if lib_name: if dep_target.data.get('link_all_symbols'): link_all_symbols_lib_list.append(lib_name) else: lib_list.append(lib_name) return (link_all_symbols_lib_list, lib_list) def _dynamic_deps_list(self): """_dynamic_deps_list. Returns ----------- lib_list: the libs list to be dynamically linked into dynamic library Description ----------- It will find the libs needed to be linked into the target dynamically. """ build_targets = self.blade.get_build_targets() lib_list = [] for lib in self.expanded_deps: dep_target = build_targets[lib] if (dep_target.type == 'cc_library' and not dep_target.srcs): continue # system lib if lib[0] == '#': lib_name = "'%s'" % lib[1] else: lib_name = dep_target.data.get('dynamic_cc_library_var') if lib_name: lib_list.append(lib_name) return lib_list def _get_static_deps_lib_list(self): """Returns a tuple that needed to write static deps rules. """ (link_all_symbols_lib_list, lib_list) = self._static_deps_list() lib_str = 'LIBS=[%s]' % ','.join(lib_list) whole_link_flags = [] if link_all_symbols_lib_list: whole_link_flags = ['"-Wl,--whole-archive"'] for i in link_all_symbols_lib_list: whole_link_flags.append(i) whole_link_flags.append('"-Wl,--no-whole-archive"') return (link_all_symbols_lib_list, lib_str, ', '.join(whole_link_flags)) def _get_dynamic_deps_lib_list(self): """Returns the libs string. """ lib_list = self._dynamic_deps_list() return 'LIBS=[%s]' % ','.join(lib_list) def _prebuilt_cc_library_is_depended(self): build_targets = self.blade.get_build_targets() for key in build_targets: target = build_targets[key] if (self.key in target.expanded_deps and target.type != 'prebuilt_cc_library'): return True return False def _prebuilt_cc_library_rules(self, var_name, target, source): """Generate scons rules for prebuilt cc library. """ if source.endswith('.a'): self._write_rule('%s = top_env.File("%s")' % (var_name, source)) else: self._write_rule('%s = top_env.Command("%s", "%s", ' 'Copy("$TARGET", "$SOURCE"))' % ( var_name, target, source)) def _prebuilt_cc_library(self): """Prebuilt cc library rules. """ # We allow a prebuilt cc_library doesn't exist if it is not used. # So if this library is not depended by any target, don't generate any # rule to avoid runtime error and also avoid unnecessary runtime cost. if not self._prebuilt_cc_library_is_depended(): return # Paths for static linking, may be a dynamic library! static_src_path, static_target_path = self._prebuilt_cc_library_path() var_name = self._var_name() self._prebuilt_cc_library_rules(var_name, static_target_path, static_src_path) self.data['static_cc_library_var'] = var_name dynamic_src_path, dynamic_target_path = '', '' if self._need_dynamic_library(): dynamic_src_path, dynamic_target_path = self._prebuilt_cc_library_path( prefer_dynamic=True) # Avoid copy twice if has only one kind of library if dynamic_target_path != static_target_path: var_name = self._var_name('dynamic') self._prebuilt_cc_library_rules(var_name, dynamic_target_path, dynamic_src_path) self.data['dynamic_cc_library_var'] = var_name # Make a symbol link if either lib is a so self.file_and_link = None so_src, so_target = '', '' if static_target_path.endswith('.so'): so_src = static_src_path so_target = static_target_path elif dynamic_target_path.endswith('.so'): so_src = dynamic_src_path so_target = dynamic_target_path if so_src: soname = self._prebuilt_cc_library_dynamic_soname(so_src) if soname: self.file_and_link = (so_target, soname) def _static_cc_library(self): """_cc_library. It will output the cc_library rule into the buffer. """ env_name = self._env_name() var_name = self._var_name() self._write_rule('%s = %s.Library("%s", %s)' % ( var_name, env_name, self._target_file_path(), self._objs_name())) self.data['static_cc_library_var'] = var_name self._add_default_target_var('a', var_name) def _dynamic_cc_library(self): """_dynamic_cc_library. It will output the dynamic_cc_library rule into the buffer. """ self._setup_link_flags() var_name = self._var_name('dynamic') env_name = self._env_name() lib_str = self._get_dynamic_deps_lib_list() if self.srcs or self.expanded_deps: if not self.data.get('allow_undefined'): self._write_rule('%s.Append(LINKFLAGS=["-Xlinker", "--no-undefined"])' % env_name) self._write_rule('%s = %s.SharedLibrary("%s", %s, %s)' % ( var_name, env_name, self._target_file_path(), self._objs_name(), lib_str)) self.data['dynamic_cc_library_var'] = var_name self._add_target_var('so', var_name) def _need_dynamic_library(self): options = self.blade.get_options() config = configparse.blade_config.get_config('cc_library_config') return (getattr(options, 'generate_dynamic') or self.data.get('build_dynamic') or config.get('generate_dynamic')) def _cc_library(self): self._static_cc_library() if self._need_dynamic_library(): self._dynamic_cc_library() def _generate_generated_header_files_depends(self, var_name): """Generate dependencies to targets that generate header files. """ env_name = self._env_name() q = Queue.Queue(0) for key in self.deps: q.put(key) keys = set() while not q.empty(): key = q.get() if key not in keys: keys.add(key) dep = self.target_database[key] if dep._generate_header_files(): if dep.srcs: self._write_rule('%s.Depends(%s, %s)' % ( env_name, var_name, dep._var_name())) else: for k in dep.deps: q.put(k) def _cc_objects_rules(self): """_cc_objects_rules. Generate the cc objects rules for the srcs in srcs list. """ target_types = ['cc_library', 'cc_binary', 'cc_test', 'cc_plugin'] if not self.type in target_types: console.error_exit('logic error, type %s err in object rule' % self.type) objs_name = self._objs_name() env_name = self._env_name() self._setup_cc_flags() objs = [] for src in self.srcs: obj = '%s_%s_object' % (self._var_name_of(src), self._regular_variable_name(self.name)) target_path = self._target_file_path() + '.objs/%s' % src source_path = self._target_file_path(src) # Also find generated files rule_args = ('target = "%s" + top_env["OBJSUFFIX"], source = "%s"' % (target_path, source_path)) if self.data.get('secure'): rule_args += ', CXX = "$SECURECXX"' self._write_rule('%s = %s.SharedObject(%s)' % (obj, env_name, rule_args)) if self.data.get('secure'): self._securecc_object_rules(obj, source_path) objs.append(obj) self._write_rule('%s = [%s]' % (objs_name, ','.join(objs))) self._generate_generated_header_files_depends(objs_name) if objs: objs_dirname = self._target_file_path() + '.objs' self._write_rule('%s.Clean([%s], "%s")' % (env_name, objs_name, objs_dirname)) def _securecc_object_rules(self, obj, src): """Touch the source file if needed and generate specific object rules for securecc. """ env_name = self._env_name() self._write_rule('%s.AlwaysBuild(%s)' % (env_name, obj)) if not os.path.exists(src): dir = os.path.dirname(src) if not os.path.isdir(dir): os.makedirs(dir) open(src, 'w').close() class CcLibrary(CcTarget): """A cc target subclass. This class is derived from SconsTarget and it generates the library rules including dynamic library rules according to user option. """ def __init__(self, name, srcs, deps, visibility, warning, defs, incs, export_incs, optimize, always_optimize, prebuilt, link_all_symbols, deprecated, extra_cppflags, extra_linkflags, allow_undefined, secure, blade, kwargs): """Init method. Init the cc library. """ CcTarget.__init__(self, name, 'cc_library', srcs, deps, visibility, warning, defs, incs, export_incs, optimize, extra_cppflags, extra_linkflags, blade, kwargs) if prebuilt: self.type = 'prebuilt_cc_library' self.srcs = [] self.data['link_all_symbols'] = link_all_symbols self.data['always_optimize'] = always_optimize self.data['deprecated'] = deprecated self.data['allow_undefined'] = allow_undefined self.data['secure'] = secure def _rpath_link(self, dynamic): path = self._prebuilt_cc_library_path(dynamic)[1] if path.endswith('.so'): return os.path.dirname(path) return None def scons_rules(self): """scons_rules. It outputs the scons rules according to user options. """ if self.type == 'prebuilt_cc_library': self._check_deprecated_deps() self._prebuilt_cc_library() elif self.srcs: self._prepare_to_generate_rule() self._cc_objects_rules() self._cc_library() def cc_library(name, srcs=[], deps=[], visibility=None, warning='yes', defs=[], incs=[], export_incs=[], optimize=[], always_optimize=False, pre_build=False, prebuilt=False, link_all_symbols=False, deprecated=False, extra_cppflags=[], extra_linkflags=[], allow_undefined=False, secure=False, **kwargs): """cc_library target. """ target = CcLibrary(name, srcs, deps, visibility, warning, defs, incs, export_incs, optimize, always_optimize, prebuilt or pre_build, link_all_symbols, deprecated, extra_cppflags, extra_linkflags, allow_undefined, secure, blade.blade, kwargs) if pre_build: console.warning("//%s:%s: 'pre_build' has been deprecated, " "please use 'prebuilt'" % (target.path, target.name)) blade.blade.register_target(target) build_rules.register_function(cc_library) class CcBinary(CcTarget): """A scons cc target subclass. This class is derived from SconsCCTarget and it generates the cc_binary rules according to user options. """ def __init__(self, name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, extra_cppflags, extra_linkflags, export_dynamic, blade, kwargs): """Init method. Init the cc binary. """ CcTarget.__init__(self, name, 'cc_binary', srcs, deps, None, warning, defs, incs, [], optimize, extra_cppflags, extra_linkflags, blade, kwargs) self.data['embed_version'] = embed_version self.data['dynamic_link'] = dynamic_link self.data['export_dynamic'] = export_dynamic cc_binary_config = configparse.blade_config.get_config('cc_binary_config') # add extra link library link_libs = var_to_list(cc_binary_config['extra_libs']) self._add_hardcode_library(link_libs) def _allow_duplicate_source(self): return True def _get_rpath_links(self): """Get rpath_links from dependencies""" dynamic_link = self.data['dynamic_link'] build_targets = self.blade.get_build_targets() rpath_links = [] for lib in self.expanded_deps: if build_targets[lib].type == 'prebuilt_cc_library': path = build_targets[lib]._rpath_link(dynamic_link) if path and path not in rpath_links: rpath_links.append(path) return rpath_links def _write_rpath_links(self): rpath_links = self._get_rpath_links() if rpath_links: for rpath_link in rpath_links: self._write_rule('%s.Append(LINKFLAGS="-Wl,--rpath-link=%s")' % (self._env_name(), rpath_link)) def _cc_binary(self): """_cc_binary rules. """ env_name = self._env_name() var_name = self._var_name() platform = self.blade.get_scons_platform() if platform.get_gcc_version() > '4.5': link_flag_list = ['-static-libgcc', '-static-libstdc++'] self._write_rule('%s.Append(LINKFLAGS=%s)' % (env_name, link_flag_list)) (link_all_symbols_lib_list, lib_str, whole_link_flags) = self._get_static_deps_lib_list() if whole_link_flags: self._write_rule( '%s.Append(LINKFLAGS=[%s])' % (env_name, whole_link_flags)) if self.data.get('export_dynamic'): self._write_rule( '%s.Append(LINKFLAGS="-rdynamic")' % env_name) self._setup_link_flags() self._write_rule('%s = %s.Program("%s", %s, %s)' % ( var_name, env_name, self._target_file_path(), self._objs_name(), lib_str)) self._add_default_target_var('bin', var_name) if link_all_symbols_lib_list: self._write_rule('%s.Depends(%s, [%s])' % ( env_name, var_name, ', '.join(link_all_symbols_lib_list))) self._write_rpath_links() if self.data['embed_version']: self._write_rule('%s.Append(LINKFLAGS=str(version_obj[0]))' % env_name) self._write_rule('%s.Requires(%s, version_obj)' % (env_name, var_name)) def _dynamic_cc_binary(self): """_dynamic_cc_binary. """ env_name = self._env_name() var_name = self._var_name() if self.data.get('export_dynamic'): self._write_rule('%s.Append(LINKFLAGS="-rdynamic")' % env_name) self._setup_link_flags() lib_str = self._get_dynamic_deps_lib_list() self._write_rule('%s = %s.Program("%s", %s, %s)' % ( var_name, env_name, self._target_file_path(), self._objs_name(), lib_str)) self._add_default_target_var('bin', var_name) if self.data['embed_version']: self._write_rule('%s.Append(LINKFLAGS=str(version_obj[0]))' % env_name) self._write_rule('%s.Requires(%s, version_obj)' % (env_name, var_name)) self._write_rpath_links() def scons_rules(self): """scons_rules. It outputs the scons rules according to user options. """ self._prepare_to_generate_rule() self._cc_objects_rules() if self.data['dynamic_link']: self._dynamic_cc_binary() else: self._cc_binary() def cc_binary(name, srcs=[], deps=[], warning='yes', defs=[], incs=[], embed_version=True, optimize=[], dynamic_link=False, extra_cppflags=[], extra_linkflags=[], export_dynamic=False, **kwargs): """cc_binary target. """ cc_binary_target = CcBinary(name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, extra_cppflags, extra_linkflags, export_dynamic, blade.blade, kwargs) blade.blade.register_target(cc_binary_target) build_rules.register_function(cc_binary) def cc_benchmark(name, deps=[], **kwargs): """cc_benchmark target. """ cc_config = configparse.blade_config.get_config('cc_config') benchmark_libs = cc_config['benchmark_libs'] benchmark_main_libs = cc_config['benchmark_main_libs'] deps = var_to_list(deps) + benchmark_libs + benchmark_main_libs cc_binary(name=name, deps=deps, **kwargs) build_rules.register_function(cc_benchmark) class CcPlugin(CcTarget): """A scons cc target subclass. This class is derived from SconsCCTarget and it generates the cc_plugin rules according to user options. """ def __init__(self, name, srcs, deps, warning, defs, incs, optimize, prefix, suffix, extra_cppflags, extra_linkflags, allow_undefined, blade, kwargs): """Init method. Init the cc plugin target. """ CcTarget.__init__(self, name, 'cc_plugin', srcs, deps, None, warning, defs, incs, [], optimize, extra_cppflags, extra_linkflags, blade, kwargs) self.prefix = prefix self.suffix = suffix self.data['allow_undefined'] = allow_undefined def scons_rules(self): """scons_rules. It outputs the scons rules according to user options. """ self._prepare_to_generate_rule() env_name = self._env_name() var_name = self._var_name() self._cc_objects_rules() self._setup_link_flags() (link_all_symbols_lib_list, lib_str, whole_link_flags) = self._get_static_deps_lib_list() if whole_link_flags: self._write_rule( '%s.Append(LINKFLAGS=[%s])' % (env_name, whole_link_flags)) if self.prefix is not None: self._write_rule( '%s.Replace(SHLIBPREFIX="%s")' % (env_name, self.prefix)) if self.suffix is not None: self._write_rule( '%s.Replace(SHLIBSUFFIX="%s")' % (env_name, self.suffix)) if not self.data['allow_undefined']: self._write_rule('%s.Append(LINKFLAGS=["-Xlinker", "--no-undefined"])' % env_name) if self.srcs or self.expanded_deps: self._write_rule('%s = %s.SharedLibrary("%s", %s, %s)' % ( var_name, env_name, self._target_file_path(), self._objs_name(), lib_str)) self._add_default_target_var('so', var_name) if link_all_symbols_lib_list: self._write_rule('%s.Depends(%s, [%s])' % ( env_name, var_name, ', '.join(link_all_symbols_lib_list))) def cc_plugin(name, srcs=[], deps=[], warning='yes', defs=[], incs=[], optimize=[], prefix=None, suffix=None, extra_cppflags=[], extra_linkflags=[], allow_undefined=True, **kwargs): """cc_plugin target. """ target = CcPlugin(name, srcs, deps, warning, defs, incs, optimize, prefix, suffix, extra_cppflags, extra_linkflags, allow_undefined, blade.blade, kwargs) blade.blade.register_target(target) build_rules.register_function(cc_plugin) # See http://google-perftools.googlecode.com/svn/trunk/doc/heap_checker.html HEAP_CHECK_VALUES = set([ '', 'minimal', 'normal', 'strict', 'draconian', 'as-is', 'local', ]) class CcTest(CcBinary): """A scons cc target subclass. This class is derived from SconsCCTarget and it generates the cc_test rules according to user options. """ def __init__(self, name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, testdata, extra_cppflags, extra_linkflags, export_dynamic, always_run, exclusive, heap_check, heap_check_debug, blade, kwargs): """Init method. Init the cc test. """ cc_test_config = configparse.blade_config.get_config('cc_test_config') if dynamic_link is None: dynamic_link = cc_test_config['dynamic_link'] CcBinary.__init__(self, name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, extra_cppflags, extra_linkflags, export_dynamic, blade, kwargs) self.type = 'cc_test' self.data['testdata'] = var_to_list(testdata) self.data['always_run'] = always_run self.data['exclusive'] = exclusive gtest_lib = var_to_list(cc_test_config['gtest_libs']) gtest_main_lib = var_to_list(cc_test_config['gtest_main_libs']) # Hardcode deps rule to thirdparty gtest main lib. self._add_hardcode_library(gtest_lib) self._add_hardcode_library(gtest_main_lib) if heap_check is None: heap_check = cc_test_config.get('heap_check', '') else: if heap_check not in HEAP_CHECK_VALUES: console.error_exit('//%s:%s: heap_check can only be in %s' % ( self.path, self.name, HEAP_CHECK_VALUES)) perftools_lib = var_to_list(cc_test_config['gperftools_libs']) perftools_debug_lib = var_to_list(cc_test_config['gperftools_debug_libs']) if heap_check: self.data['heap_check'] = heap_check if heap_check_debug: perftools_lib_list = perftools_debug_lib else: perftools_lib_list = perftools_lib self._add_hardcode_library(perftools_lib_list) def cc_test(name, srcs=[], deps=[], warning='yes', defs=[], incs=[], embed_version=False, optimize=[], dynamic_link=None, testdata=[], extra_cppflags=[], extra_linkflags=[], export_dynamic=False, always_run=False, exclusive=False, heap_check=None, heap_check_debug=False, **kwargs): """cc_test target. """ cc_test_target = CcTest(name, srcs, deps, warning, defs, incs, embed_version, optimize, dynamic_link, testdata, extra_cppflags, extra_linkflags, export_dynamic, always_run, exclusive, heap_check, heap_check_debug, blade.blade, kwargs) blade.blade.register_target(cc_test_target) build_rules.register_function(cc_test)
project-zerus/blade
src/blade/cc_targets.py
Python
bsd-3-clause
39,092
# -*- coding: utf-8 -*- # # Copyright (C) 2006-2008 Edgewall Software # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://genshi.edgewall.org/wiki/License. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://genshi.edgewall.org/log/. import doctest import unittest import sys from genshi.core import Attrs, Stream, QName from genshi.input import HTML, XML from genshi.output import DocType, XMLSerializer, XHTMLSerializer, \ HTMLSerializer, EmptyTagFilter class XMLSerializerTestCase(unittest.TestCase): def test_with_xml_decl(self): stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))]) output = stream.render(XMLSerializer, doctype='xhtml', encoding=None) self.assertEqual('<?xml version="1.0"?>\n' '<!DOCTYPE html PUBLIC ' '"-//W3C//DTD XHTML 1.0 Strict//EN" ' '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n', output) def test_doctype_in_stream(self): stream = Stream([(Stream.DOCTYPE, DocType.HTML_STRICT, (None, -1, -1))]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<!DOCTYPE html PUBLIC ' '"-//W3C//DTD HTML 4.01//EN" ' '"http://www.w3.org/TR/html4/strict.dtd">\n', output) def test_doctype_in_stream_no_sysid(self): stream = Stream([(Stream.DOCTYPE, ('html', '-//W3C//DTD HTML 4.01//EN', None), (None, -1, -1))]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">\n', output) def test_doctype_in_stream_no_pubid(self): stream = Stream([ (Stream.DOCTYPE, ('html', None, 'http://www.w3.org/TR/html4/strict.dtd'), (None, -1, -1)) ]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<!DOCTYPE html SYSTEM ' '"http://www.w3.org/TR/html4/strict.dtd">\n', output) def test_doctype_in_stream_no_pubid_or_sysid(self): stream = Stream([(Stream.DOCTYPE, ('html', None, None), (None, -1, -1))]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<!DOCTYPE html>\n', output) def test_serializer_doctype(self): stream = Stream([]) output = stream.render(XMLSerializer, doctype=DocType.HTML_STRICT, encoding=None) self.assertEqual('<!DOCTYPE html PUBLIC ' '"-//W3C//DTD HTML 4.01//EN" ' '"http://www.w3.org/TR/html4/strict.dtd">\n', output) def test_doctype_one_and_only(self): stream = Stream([ (Stream.DOCTYPE, ('html', None, None), (None, -1, -1)) ]) output = stream.render(XMLSerializer, doctype=DocType.HTML_STRICT, encoding=None) self.assertEqual('<!DOCTYPE html PUBLIC ' '"-//W3C//DTD HTML 4.01//EN" ' '"http://www.w3.org/TR/html4/strict.dtd">\n', output) def test_comment(self): stream = Stream([(Stream.COMMENT, 'foo bar', (None, -1, -1))]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<!--foo bar-->', output) def test_processing_instruction(self): stream = Stream([(Stream.PI, ('python', 'x = 2'), (None, -1, -1))]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual('<?python x = 2?>', output) def test_nested_default_namespaces(self): stream = Stream([ (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('http://example.org/}div'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)) ]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual("""<div xmlns="http://example.org/"> <p/> <p/> </div>""", output) def test_nested_bound_namespaces(self): stream = Stream([ (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('http://example.org/}div'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)) ]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual("""<x:div xmlns:x="http://example.org/"> <x:p/> <x:p/> </x:div>""", output) def test_multiple_default_namespaces(self): stream = Stream([ (Stream.START, (QName('div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('div'), (None, -1, -1)), ]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual("""<div> <p xmlns="http://example.org/"/> <p xmlns="http://example.org/"/> </div>""", output) def test_multiple_bound_namespaces(self): stream = Stream([ (Stream.START, (QName('div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('http://example.org/}p'), Attrs()), (None, -1, -1)), (Stream.END, QName('http://example.org/}p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('div'), (None, -1, -1)), ]) output = stream.render(XMLSerializer, encoding=None) self.assertEqual("""<div> <x:p xmlns:x="http://example.org/"/> <x:p xmlns:x="http://example.org/"/> </div>""", output) def test_atom_with_xhtml(self): text = """<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en"> <id>urn:uuid:c60843aa-0da8-4fa6-bbe5-98007bc6774e</id> <updated>2007-01-28T11:36:02.807108-06:00</updated> <title type="xhtml"> <div xmlns="http://www.w3.org/1999/xhtml">Example</div> </title> <subtitle type="xhtml"> <div xmlns="http://www.w3.org/1999/xhtml">Bla bla bla</div> </subtitle> <icon/> </feed>""" output = XML(text).render(XMLSerializer, encoding=None) self.assertEqual(text, output) class XHTMLSerializerTestCase(unittest.TestCase): def test_xml_decl_dropped(self): stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))]) output = stream.render(XHTMLSerializer, doctype='xhtml', encoding=None) self.assertEqual('<!DOCTYPE html PUBLIC ' '"-//W3C//DTD XHTML 1.0 Strict//EN" ' '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n', output) def test_xml_decl_included(self): stream = Stream([(Stream.XML_DECL, ('1.0', None, -1), (None, -1, -1))]) output = stream.render(XHTMLSerializer, doctype='xhtml', drop_xml_decl=False, encoding=None) self.assertEqual('<?xml version="1.0"?>\n' '<!DOCTYPE html PUBLIC ' '"-//W3C//DTD XHTML 1.0 Strict//EN" ' '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n', output) def test_xml_lang(self): text = '<p xml:lang="en">English text</p>' output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual('<p lang="en" xml:lang="en">English text</p>', output) def test_xml_lang_nodup(self): text = '<p xml:lang="en" lang="en">English text</p>' output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual('<p xml:lang="en" lang="en">English text</p>', output) def test_textarea_whitespace(self): content = '\nHey there. \n\n I am indented.\n' stream = XML('<textarea name="foo">%s</textarea>' % content) output = stream.render(XHTMLSerializer, encoding=None) self.assertEqual('<textarea name="foo">%s</textarea>' % content, output) def test_pre_whitespace(self): content = '\nHey <em>there</em>. \n\n I am indented.\n' stream = XML('<pre>%s</pre>' % content) output = stream.render(XHTMLSerializer, encoding=None) self.assertEqual('<pre>%s</pre>' % content, output) def test_xml_space(self): text = '<foo xml:space="preserve"> Do not mess \n\n with me </foo>' output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual('<foo> Do not mess \n\n with me </foo>', output) def test_empty_script(self): text = """<html xmlns="http://www.w3.org/1999/xhtml"> <script src="foo.js" /> </html>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual("""<html xmlns="http://www.w3.org/1999/xhtml"> <script src="foo.js"></script> </html>""", output) def test_script_escaping(self): text = """<script>/*<![CDATA[*/ if (1 < 2) { alert("Doh"); } /*]]>*/</script>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_script_escaping_with_namespace(self): text = """<script xmlns="http://www.w3.org/1999/xhtml">/*<![CDATA[*/ if (1 < 2) { alert("Doh"); } /*]]>*/</script>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_style_escaping(self): text = """<style>/*<![CDATA[*/ html > body { display: none; } /*]]>*/</style>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_style_escaping_with_namespace(self): text = """<style xmlns="http://www.w3.org/1999/xhtml">/*<![CDATA[*/ html > body { display: none; } /*]]>*/</style>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_embedded_svg(self): text = """<html xmlns="http://www.w3.org/1999/xhtml" xmlns:svg="http://www.w3.org/2000/svg"> <body> <button> <svg:svg width="600px" height="400px"> <svg:polygon id="triangle" points="50,50 50,300 300,300"></svg:polygon> </svg:svg> </button> </body> </html>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_xhtml_namespace_prefix(self): text = """<div xmlns="http://www.w3.org/1999/xhtml"> <strong>Hello</strong> </div>""" output = XML(text).render(XHTMLSerializer, encoding=None) self.assertEqual(text, output) def test_nested_default_namespaces(self): stream = Stream([ (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('p'), Attrs()), (None, -1, -1)), (Stream.END, QName('p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('p'), Attrs()), (None, -1, -1)), (Stream.END, QName('p'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('div'), (None, -1, -1)), (Stream.END_NS, '', (None, -1, -1)) ]) output = stream.render(XHTMLSerializer, encoding=None) self.assertEqual("""<div xmlns="http://example.org/"> <p></p> <p></p> </div>""", output) def test_nested_bound_namespaces(self): stream = Stream([ (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('div'), Attrs()), (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('p'), Attrs()), (None, -1, -1)), (Stream.END, QName('p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.START_NS, ('x', 'http://example.org/'), (None, -1, -1)), (Stream.START, (QName('p'), Attrs()), (None, -1, -1)), (Stream.END, QName('p'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)), (Stream.TEXT, '\n ', (None, -1, -1)), (Stream.END, QName('div'), (None, -1, -1)), (Stream.END_NS, 'x', (None, -1, -1)) ]) output = stream.render(XHTMLSerializer, encoding=None) self.assertEqual("""<div xmlns:x="http://example.org/"> <p></p> <p></p> </div>""", output) def test_html5_doctype(self): stream = HTML(u'<html></html>') output = stream.render(XHTMLSerializer, doctype=DocType.HTML5, encoding=None) self.assertEqual('<!DOCTYPE html>\n<html></html>', output) class HTMLSerializerTestCase(unittest.TestCase): def test_xml_lang(self): text = '<p xml:lang="en">English text</p>' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<p lang="en">English text</p>', output) def test_xml_lang_nodup(self): text = '<p lang="en" xml:lang="en">English text</p>' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<p lang="en">English text</p>', output) def test_textarea_whitespace(self): content = '\nHey there. \n\n I am indented.\n' stream = XML('<textarea name="foo">%s</textarea>' % content) output = stream.render(HTMLSerializer, encoding=None) self.assertEqual('<textarea name="foo">%s</textarea>' % content, output) def test_pre_whitespace(self): content = '\nHey <em>there</em>. \n\n I am indented.\n' stream = XML('<pre>%s</pre>' % content) output = stream.render(HTMLSerializer, encoding=None) self.assertEqual('<pre>%s</pre>' % content, output) def test_xml_space(self): text = '<foo xml:space="preserve"> Do not mess \n\n with me </foo>' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<foo> Do not mess \n\n with me </foo>', output) def test_empty_script(self): text = '<script src="foo.js" />' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<script src="foo.js"></script>', output) def test_script_escaping(self): text = '<script>if (1 &lt; 2) { alert("Doh"); }</script>' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<script>if (1 < 2) { alert("Doh"); }</script>', output) def test_script_escaping_with_namespace(self): text = """<script xmlns="http://www.w3.org/1999/xhtml"> if (1 &lt; 2) { alert("Doh"); } </script>""" output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual("""<script> if (1 < 2) { alert("Doh"); } </script>""", output) def test_style_escaping(self): text = '<style>html &gt; body { display: none; }</style>' output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual('<style>html > body { display: none; }</style>', output) def test_style_escaping_with_namespace(self): text = """<style xmlns="http://www.w3.org/1999/xhtml"> html &gt; body { display: none; } </style>""" output = XML(text).render(HTMLSerializer, encoding=None) self.assertEqual("""<style> html > body { display: none; } </style>""", output) def test_html5_doctype(self): stream = HTML(u'<html></html>') output = stream.render(HTMLSerializer, doctype=DocType.HTML5, encoding=None) self.assertEqual('<!DOCTYPE html>\n<html></html>', output) class EmptyTagFilterTestCase(unittest.TestCase): def test_empty(self): stream = XML('<elem></elem>') | EmptyTagFilter() self.assertEqual([EmptyTagFilter.EMPTY], [ev[0] for ev in stream]) def test_text_content(self): stream = XML('<elem>foo</elem>') | EmptyTagFilter() self.assertEqual([Stream.START, Stream.TEXT, Stream.END], [ev[0] for ev in stream]) def test_elem_content(self): stream = XML('<elem><sub /><sub /></elem>') | EmptyTagFilter() self.assertEqual([Stream.START, EmptyTagFilter.EMPTY, EmptyTagFilter.EMPTY, Stream.END], [ev[0] for ev in stream]) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(XMLSerializerTestCase, 'test')) suite.addTest(unittest.makeSuite(XHTMLSerializerTestCase, 'test')) suite.addTest(unittest.makeSuite(HTMLSerializerTestCase, 'test')) suite.addTest(unittest.makeSuite(EmptyTagFilterTestCase, 'test')) suite.addTest(doctest.DocTestSuite(XMLSerializer.__module__)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
dag/genshi
genshi/tests/output.py
Python
bsd-3-clause
21,084
<?php class KT_Switch_Field extends KT_Field { const FIELD_TYPE = "switch switch-toggle"; const YES = "1"; const NO = "0"; /** * Založení objektu typu Switch * * @param string $name - hash v poli * @param string $label - popisek v html * @return self */ public function __construct($name, $label) { parent::__construct($name, $label); $this->addAttrClass(self::FIELD_TYPE); $this->setValue(self::NO); return $this; } /** * Provede výpis fieldu pomocí echo $this->getField() * * @author Tomáš Kocifaj * @link http://www.ktstudio.cz * */ public function renderField() { echo $this->getField(); } /** * Vrátí HTML strukturu pro zobrazní fieldu * * @author Tomáš Kocifaj * @link http://www.ktstudio.cz * * @return string */ public function getField() { $html = ""; $html .= "<div {$this->getAttrClassString()}>"; $html .= "<span for=\"{$this->getAttrValueByName("id")}\" {$this->getAttrClassString()} title=\"{$this->getAfterNameValue("title")}\"></span>"; $html .= "<input type=\"hidden\" "; $html .= $this->getBasicHtml(); $html .= " value=\"{$this->getValue()}\" "; $html .= "/>"; $html .= "</div>"; if ($this->hasErrorMsg()) { $html .= parent::getHtmlErrorMsg(); } return $html; } /** * Vrátí hodnotu ve fieldu * * @author Tomáš Kocifaj * @link http://www.ktstudio.cz * * @param bolean $original - má vrátít originální hodnotu v DB nebo hodnotou pro zobrazení * @return null */ public function getConvertedValue() { $fieldValue = parent::getValue(); return self::getSwitchConvertedValue($fieldValue); } /** * Vrátí typ fieldu * * @author Tomáš Kocifaj * @link http://www.ktstudio.cz * * @return string */ public function getFieldType() { return self::FIELD_TYPE; } // --- statické funkce ----------------- /** * Převod logického hodnoty na hodnotu pro KT_Switch_Field * * @author Martin Hlaváč * @link http://www.ktstudio.cz * * @param boolean $value * @return string * @throws InvalidArgumentException */ public static function convertBooleanToSwitch($value) { if (KT::issetAndNotEmpty($value)) { if ($value == true) { return KT_Switch_Field::YES; } elseif ($value == false) { return KT_Switch_Field::NO; } throw new InvalidArgumentException(__("Hodnota \"$value\" není logického typu", KT_DOMAIN)); } return null; } /** * Převod KT_Switch_Field hodnoty na logickou hodnotu * * @author Martin Hlaváč * @link http://www.ktstudio.cz * * @param string $value * @return boolean * @throws InvalidArgumentException */ public static function convertSwitchToBoolean($value) { if (KT::issetAndNotEmpty($value)) { if ($value == KT_Switch_Field::YES) { return true; } elseif ($value == KT_Switch_Field::NO) { return false; } throw new InvalidArgumentException(__("Hodnota \"$value\" není typu KT Switch pole", KT_DOMAIN)); } return null; } /** * Vypíše hodnotu KT_Switch_Field, či boolean jako text, tedy Ano/Ne * * @author Martin Hlaváč * @link http://www.ktstudio.cz * * @param string|boolean $value * @throws InvalidArgumentException */ public static function getSwitchConvertedValue($value) { if ($value == KT_Switch_Field::YES || $value === true || $value === 1) { return __("Ano", KT_DOMAIN); } elseif ($value == KT_Switch_Field::NO || $value === false || $value === 0) { return __("Ne", KT_DOMAIN); } else { echo KT_EMPTY_SYMBOL; } } }
ktstudio/WPFW-Theme
wpfw/kt/core/classes/fields/kt_switch_field.inc.php
PHP
bsd-3-clause
4,148
var FormFileUpload = function () { return { //main function to initiate the module init: function () { // Initialize the jQuery File Upload widget: $('#fileupload').fileupload({ // Uncomment the following to send cross-domain cookies: //xhrFields: {withCredentials: true}, url: '/upload/index' }); // Enable iframe cross-domain access via redirect option: $('#fileupload').fileupload( 'option', 'redirect', window.location.href.replace( /\/[^\/]*$/, '/cors/result.html?%s' ) ); // Demo settings: $('#fileupload').fileupload('option', { url: $('#fileupload').fileupload('option', 'url'), // Enable image resizing, except for Android and Opera, // which actually support image resizing, but fail to // send Blob objects via XHR requests: disableImageResize: /Android(?!.*Chrome)|Opera/ .test(window.navigator.userAgent), maxFileSize: 5000000, acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i }); // Upload server status check for browsers with CORS support: if ($.support.cors) { $.ajax({ url: '/upload/index', type: 'POST' }).fail(function () { $('<div class="alert alert-danger"/>') .text('Upload server currently unavailable - ' + new Date()) .appendTo('#fileupload'); }); } //////////////////// // Initialize the jQuery File Upload widget: $('#fileupload').fileupload({ // Uncomment the following to send cross-domain cookies: //xhrFields: {withCredentials: true}, autoUpload: false, url: '/upload/index' }); // initialize uniform checkboxes App.initUniform('.fileupload-toggle-checkbox'); } }; }();
duanduan2288/vr
web/js/form-fileupload.js
JavaScript
bsd-3-clause
2,326
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/google_apis/drive_api_operations.h" #include "base/json/json_writer.h" #include "base/values.h" namespace google_apis { namespace { const char kContentTypeApplicationJson[] = "application/json"; const char kDirectoryMimeType[] = "application/vnd.google-apps.folder"; // etag matching header. const char kIfMatchAllHeader[] = "If-Match: *"; } // namespace //============================== GetAboutOperation ============================= GetAboutOperation::GetAboutOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator) { DCHECK(!callback.is_null()); } GetAboutOperation::~GetAboutOperation() {} GURL GetAboutOperation::GetURL() const { return url_generator_.GetAboutUrl(); } //============================== GetApplistOperation =========================== GetApplistOperation::GetApplistOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator) { DCHECK(!callback.is_null()); } GetApplistOperation::~GetApplistOperation() {} GURL GetApplistOperation::GetURL() const { return url_generator_.GetApplistUrl(); } //============================ GetChangelistOperation ========================== GetChangelistOperation::GetChangelistOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const GURL& url, int64 start_changestamp, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), url_(url), start_changestamp_(start_changestamp) { DCHECK(!callback.is_null()); } GetChangelistOperation::~GetChangelistOperation() {} GURL GetChangelistOperation::GetURL() const { return url_generator_.GetChangelistUrl(url_, start_changestamp_); } //============================= GetFlielistOperation =========================== GetFilelistOperation::GetFilelistOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const GURL& url, const std::string& search_string, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), url_(url), search_string_(search_string) { DCHECK(!callback.is_null()); } GetFilelistOperation::~GetFilelistOperation() {} GURL GetFilelistOperation::GetURL() const { return url_generator_.GetFilelistUrl(url_, search_string_); } //=============================== GetFlieOperation ============================= GetFileOperation::GetFileOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& file_id, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), file_id_(file_id) { DCHECK(!callback.is_null()); } GetFileOperation::~GetFileOperation() {} GURL GetFileOperation::GetURL() const { return url_generator_.GetFileUrl(file_id_); } namespace drive { //========================== CreateDirectoryOperation ========================== CreateDirectoryOperation::CreateDirectoryOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& parent_resource_id, const std::string& directory_name, const GetDataCallback& callback) : GetDataOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), parent_resource_id_(parent_resource_id), directory_name_(directory_name) { DCHECK(!callback.is_null()); } CreateDirectoryOperation::~CreateDirectoryOperation() {} GURL CreateDirectoryOperation::GetURL() const { if (parent_resource_id_.empty() || directory_name_.empty()) { return GURL(); } return url_generator_.GetFilelistUrl(GURL(), ""); } net::URLFetcher::RequestType CreateDirectoryOperation::GetRequestType() const { return net::URLFetcher::POST; } bool CreateDirectoryOperation::GetContentData(std::string* upload_content_type, std::string* upload_content) { *upload_content_type = kContentTypeApplicationJson; base::DictionaryValue root; root.SetString("title", directory_name_); { base::DictionaryValue* parent_value = new base::DictionaryValue; parent_value->SetString("id", parent_resource_id_); base::ListValue* parent_list_value = new base::ListValue; parent_list_value->Append(parent_value); root.Set("parents", parent_list_value); } root.SetString("mimeType", kDirectoryMimeType); base::JSONWriter::Write(&root, upload_content); DVLOG(1) << "CreateDirectory data: " << *upload_content_type << ", [" << *upload_content << "]"; return true; } //=========================== RenameResourceOperation ========================== RenameResourceOperation::RenameResourceOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& resource_id, const std::string& new_name, const EntryActionCallback& callback) : EntryActionOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), resource_id_(resource_id), new_name_(new_name) { DCHECK(!callback.is_null()); } RenameResourceOperation::~RenameResourceOperation() {} net::URLFetcher::RequestType RenameResourceOperation::GetRequestType() const { return net::URLFetcher::PATCH; } std::vector<std::string> RenameResourceOperation::GetExtraRequestHeaders() const { std::vector<std::string> headers; headers.push_back(kIfMatchAllHeader); return headers; } GURL RenameResourceOperation::GetURL() const { return url_generator_.GetFileUrl(resource_id_); } bool RenameResourceOperation::GetContentData(std::string* upload_content_type, std::string* upload_content) { *upload_content_type = kContentTypeApplicationJson; base::DictionaryValue root; root.SetString("title", new_name_); base::JSONWriter::Write(&root, upload_content); DVLOG(1) << "RenameResource data: " << *upload_content_type << ", [" << *upload_content << "]"; return true; } //=========================== TrashResourceOperation =========================== TrashResourceOperation::TrashResourceOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& resource_id, const EntryActionCallback& callback) : EntryActionOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), resource_id_(resource_id) { DCHECK(!callback.is_null()); } TrashResourceOperation::~TrashResourceOperation() {} GURL TrashResourceOperation::GetURL() const { return url_generator_.GetFileTrashUrl(resource_id_); } net::URLFetcher::RequestType TrashResourceOperation::GetRequestType() const { return net::URLFetcher::POST; } //========================== InsertResourceOperation =========================== InsertResourceOperation::InsertResourceOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& parent_resource_id, const std::string& resource_id, const EntryActionCallback& callback) : EntryActionOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), parent_resource_id_(parent_resource_id), resource_id_(resource_id) { DCHECK(!callback.is_null()); } InsertResourceOperation::~InsertResourceOperation() {} GURL InsertResourceOperation::GetURL() const { return url_generator_.GetChildrenUrl(parent_resource_id_); } net::URLFetcher::RequestType InsertResourceOperation::GetRequestType() const { return net::URLFetcher::POST; } bool InsertResourceOperation::GetContentData(std::string* upload_content_type, std::string* upload_content) { *upload_content_type = kContentTypeApplicationJson; base::DictionaryValue root; root.SetString("id", resource_id_); base::JSONWriter::Write(&root, upload_content); DVLOG(1) << "InsertResource data: " << *upload_content_type << ", [" << *upload_content << "]"; return true; } //========================== DeleteResourceOperation =========================== DeleteResourceOperation::DeleteResourceOperation( OperationRegistry* registry, net::URLRequestContextGetter* url_request_context_getter, const DriveApiUrlGenerator& url_generator, const std::string& parent_resource_id, const std::string& resource_id, const EntryActionCallback& callback) : EntryActionOperation(registry, url_request_context_getter, callback), url_generator_(url_generator), parent_resource_id_(parent_resource_id), resource_id_(resource_id) { DCHECK(!callback.is_null()); } DeleteResourceOperation::~DeleteResourceOperation() {} GURL DeleteResourceOperation::GetURL() const { return url_generator_.GetChildrenUrlForRemoval( parent_resource_id_, resource_id_); } net::URLFetcher::RequestType DeleteResourceOperation::GetRequestType() const { return net::URLFetcher::DELETE_REQUEST; } } // namespace drive } // namespace google_apis
zcbenz/cefode-chromium
chrome/browser/google_apis/drive_api_operations.cc
C++
bsd-3-clause
10,254
<head> <link rel="stylesheet" type="text/css" href="/Proyecto2015/modules/admwiki/views/css/styles.css"> </head> <?php use yii\helpers\Html; use yii\widgets\ListView; use app\models\Empleados; /** * @var $this yii\web\View * @var $dataProvider yii\data\ActiveDataProvider * @var $searchModel app\modules\admwiki\models\ArticulosSearch * @var $model Articulos */ $this->title = 'Articulos'; $this->params['breadcrumbs'][] = ['label' => 'Articulos', 'url' => ['index']]; $this->params['breadcrumbs'][] = "Listado de Historicos"; ?> <a href='/Proyecto2015/web/index.php/admwiki/'><< VOLVER ADM-WIKI</a> <br><br> <div id="indexhistorico-encabezado"> <div id="indexhistorico-encabezado-volver"> <a href="javascript:history.back()"><< Volver al Articulo Vigente</a> </div> <div id="indexhistorico-encabezado-botonera"> <!-- <?= Html::a('Crear Articulo', ['create'], ['class' => 'btn btn-success']) ?> --> </div> </div> <div id="indexhistorico-contenedor"> <div id="indexhistorico-titulo"> LISTADO DE LOS HISTORICOS DEL ARTICULO SELECCIONADO </div> <?php //echo $this->render('_search', ['model' => $searchModel]); ?> <div id="indexhistorico-contenido"> <?= ListView::widget([ 'dataProvider' => $dataProvider, 'itemView'=>'_view_historico', ]); ?> </div> </div>
programacionav/Proyecto2015
modules/admwiki/views/articulos/index_historico.php
PHP
bsd-3-clause
1,294
/* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #include "ServiceInfo.h" #include <functional> #include <string> #include <unordered_map> #include <vector> #include <folly/Format.h> #include <folly/json.h> #include <folly/Memory.h> #include <folly/Range.h> #include "mcrouter/config-impl.h" #include "mcrouter/config.h" #include "mcrouter/lib/fbi/cpp/globals.h" #include "mcrouter/lib/fbi/cpp/util.h" #include "mcrouter/lib/McRequestList.h" #include "mcrouter/lib/network/ThriftMessageList.h" #include "mcrouter/lib/RouteHandleTraverser.h" #include "mcrouter/McrouterFiberContext.h" #include "mcrouter/McrouterInstance.h" #include "mcrouter/options.h" #include "mcrouter/proxy.h" #include "mcrouter/ProxyConfigBuilder.h" #include "mcrouter/routes/ProxyRoute.h" #include "mcrouter/standalone_options.h" namespace facebook { namespace memcache { namespace mcrouter { struct ServiceInfo::ServiceInfoImpl { proxy_t* proxy_; ProxyRoute& proxyRoute_; std::unordered_map< std::string, std::function<std::string(const std::vector<folly::StringPiece>& args)>> commands_; ServiceInfoImpl(proxy_t* proxy, const ProxyConfig& config); template <class Request> void handleRequest( folly::StringPiece req, const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx) const; template <class Request> void handleRouteCommand( const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, const std::vector<folly::StringPiece>& args) const; template <class Request, class Operation> void handleRouteCommandForOp( const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, std::string keyStr, Operation) const; template <class Request> void routeCommandHelper( folly::StringPiece op, folly::StringPiece key, const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, McOpList::Item<0>) const; template <class Request, int op_id> void routeCommandHelper( folly::StringPiece op, folly::StringPiece key, const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, McOpList::Item<op_id>) const; }; template <class Request, class Operation> void ServiceInfo::ServiceInfoImpl::handleRouteCommandForOp( const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, std::string keyStr, Operation) const { proxy_->fiberManager.addTaskFinally( [this, keyStr, proxy = proxy_]() { auto destinations = folly::make_unique<std::vector<std::string>>(); folly::fibers::Baton baton; auto rctx = ProxyRequestContext::createRecordingNotify( *proxy, baton, [&destinations](folly::StringPiece, size_t, const AccessPoint& dest) { destinations->push_back(dest.toHostPortString()); } ); TypedThriftRequest<typename TypeFromOp<Operation::mc_op, RequestOpMapping>::type> recordingReq(keyStr); fiber_local::runWithLocals([ctx = std::move(rctx), &recordingReq, &proxyRoute = proxyRoute_]() mutable { fiber_local::setSharedCtx(std::move(ctx)); /* ignore the reply */ proxyRoute.route(recordingReq); }); baton.wait(); return destinations; }, [ctx](folly::Try<std::unique_ptr<std::vector<std::string>>>&& data) { std::string str; const auto& destinations = *data; for (const auto& d : *destinations) { if (!str.empty()) { str.push_back('\r'); str.push_back('\n'); } str.append(d); } ReplyT<Request> reply(mc_res_found); reply.setValue(str); ctx->sendReply(std::move(reply)); } ); } template <int op_id> inline std::string routeHandlesCommandHelper(folly::StringPiece op, folly::StringPiece key, const ProxyRoute& proxyRoute, McOpList::Item<op_id>) { if (op == mc_op_to_string(McOpList::Item<op_id>::op::mc_op)) { std::string tree; int level = 0; RouteHandleTraverser<McrouterRouteHandleIf> t( [&tree, &level](const McrouterRouteHandleIf& rh) { tree.append(std::string(level, ' ') + rh.routeName() + '\n'); ++level; }, [&level]() { --level; } ); proxyRoute.traverse( TypedThriftRequest<typename TypeFromOp<McOpList::Item<op_id>::op::mc_op, RequestOpMapping>::type>(key), t); return tree; } return routeHandlesCommandHelper( op, key, proxyRoute, McOpList::Item<op_id-1>()); } inline std::string routeHandlesCommandHelper( folly::StringPiece op, folly::StringPiece key, const ProxyRoute& proxyRoute, McOpList::Item<0>) { throw std::runtime_error("route_handles: unknown op " + op.str()); } template <class Request> void ServiceInfo::ServiceInfoImpl::routeCommandHelper( folly::StringPiece op, folly::StringPiece, const std::shared_ptr<ProxyRequestContextTyped<Request>>&, McOpList::Item<0>) const { throw std::runtime_error("route: unknown op " + op.str()); } template <class Request, int op_id> void ServiceInfo::ServiceInfoImpl::routeCommandHelper( folly::StringPiece op, folly::StringPiece key, const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, McOpList::Item<op_id>) const { if (op == mc_op_to_string(McOpList::Item<op_id>::op::mc_op)) { handleRouteCommandForOp(ctx, key.str(), typename McOpList::Item<op_id>::op()); return; } routeCommandHelper(op, key, ctx, McOpList::Item<op_id-1>()); } /* Must be here since unique_ptr destructor needs to know complete ServiceInfoImpl type */ ServiceInfo::~ServiceInfo() { } ServiceInfo::ServiceInfo(proxy_t* proxy, const ProxyConfig& config) : impl_(folly::make_unique<ServiceInfoImpl>(proxy, config)) { } ServiceInfo::ServiceInfoImpl::ServiceInfoImpl(proxy_t* proxy, const ProxyConfig& config) : proxy_(proxy), proxyRoute_(config.proxyRoute()) { commands_.emplace("version", [] (const std::vector<folly::StringPiece>& args) { return MCROUTER_PACKAGE_STRING; } ); commands_.emplace("config_age", [proxy] (const std::vector<folly::StringPiece>& args) { /* capturing this and accessing proxy_ crashes gcc-4.7 */ return std::to_string(stat_get_config_age(proxy->stats, time(nullptr))); } ); commands_.emplace("config_file", [this] (const std::vector<folly::StringPiece>& args) { folly::StringPiece configStr = proxy_->router().opts().config; if (configStr.startsWith(ConfigApi::kFilePrefix)) { configStr.removePrefix(ConfigApi::kFilePrefix); return configStr.str(); } if (proxy_->router().opts().config_file.empty()) { throw std::runtime_error("no config file found!"); } return proxy_->router().opts().config_file; } ); commands_.emplace("options", [this] (const std::vector<folly::StringPiece>& args) { if (args.size() > 1) { throw std::runtime_error("options: 0 or 1 args expected"); } auto optDict = proxy_->router().getStartupOpts(); if (args.size() == 1) { auto it = optDict.find(args[0].str()); if (it == optDict.end()) { throw std::runtime_error("options: option " + args[0].str() + " not found"); } return it->second; } // Print all options in order listed in the file auto optData = McrouterOptions::getOptionData(); auto startupOpts = McrouterStandaloneOptions::getOptionData(); optData.insert(optData.end(), startupOpts.begin(), startupOpts.end()); std::string str; for (auto& opt : optData) { if (optDict.find(opt.name) != optDict.end()) { str.append(opt.name + " " + optDict[opt.name] + "\n"); } } return str; } ); /* This is a special case and handled separately below {"route", ... }, */ commands_.emplace("route_handles", [this] (const std::vector<folly::StringPiece>& args) { if (args.size() != 2) { throw std::runtime_error("route_handles: 2 args expected"); } auto op = args[0]; auto key = args[1]; return routeHandlesCommandHelper(op, key, proxyRoute_, McOpList::LastItem()); } ); commands_.emplace("config_md5_digest", [&config] (const std::vector<folly::StringPiece>& args) { if (config.getConfigMd5Digest().empty()) { throw std::runtime_error("no config md5 digest found!"); } return config.getConfigMd5Digest(); } ); commands_.emplace("config_sources_info", [this] (const std::vector<folly::StringPiece>& args) { auto configInfo = proxy_->router().configApi().getConfigSourcesInfo(); return toPrettySortedJson(configInfo); } ); commands_.emplace("preprocessed_config", [this] (const std::vector<folly::StringPiece>& args) { std::string confFile; std::string path; if (!proxy_->router().configApi().getConfigFile(confFile, path)) { throw std::runtime_error("Can not load config from " + path); } ProxyConfigBuilder builder(proxy_->router().opts(), proxy_->router().configApi(), confFile); return toPrettySortedJson(builder.preprocessedConfig()); } ); commands_.emplace("hostid", [] (const std::vector<folly::StringPiece>& args) { return folly::to<std::string>(globals::hostid()); } ); commands_.emplace("verbosity", [] (const std::vector<folly::StringPiece>& args) { if (args.size() == 1) { auto before = FLAGS_v; FLAGS_v = folly::to<int>(args[0]); return folly::sformat("{} -> {}", before, FLAGS_v); } else if (args.empty()) { return folly::to<std::string>(FLAGS_v); } throw std::runtime_error("expected at most 1 argument, got " + folly::to<std::string>(args.size())); } ); } template <class Request> void ServiceInfo::ServiceInfoImpl::handleRequest( folly::StringPiece key, const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx) const { auto p = key.find('('); auto cmd = key; folly::StringPiece argsStr(key.end(), key.end()); if (p != folly::StringPiece::npos && key.back() == ')') { assert(key.size() - p >= 2); cmd = folly::StringPiece(key.begin(), key.begin() + p); argsStr = folly::StringPiece(key.begin() + p + 1, key.begin() + key.size() - 1); } std::vector<folly::StringPiece> args; if (!argsStr.empty()) { folly::split(',', argsStr, args); } std::string replyStr; try { if (cmd == "route") { /* Route is a special case since it involves background requests */ handleRouteCommand(ctx, args); return; } auto it = commands_.find(cmd.str()); if (it == commands_.end()) { throw std::runtime_error("unknown command: " + cmd.str()); } replyStr = it->second(args); if (!replyStr.empty() && replyStr.back() == '\n') { replyStr = replyStr.substr(0, replyStr.size() - 1); } } catch (const std::exception& e) { replyStr = std::string("ERROR: ") + e.what(); } ReplyT<Request> reply(mc_res_found); reply.setValue(std::move(replyStr)); ctx->sendReply(std::move(reply)); } template <class Request> void ServiceInfo::ServiceInfoImpl::handleRouteCommand( const std::shared_ptr<ProxyRequestContextTyped<Request>>& ctx, const std::vector<folly::StringPiece>& args) const { if (args.size() != 2) { throw std::runtime_error("route: 2 args expected"); } auto op = args[0]; auto key = args[1]; routeCommandHelper(op, key, ctx, McOpList::LastItem()); } void ServiceInfo::handleRequest( folly::StringPiece key, const std::shared_ptr<ProxyRequestContextTyped< TypedThriftRequest<cpp2::McGetRequest>>>& ctx) const { impl_->handleRequest(key, ctx); } }}} // facebook::memcache::mcrouter
yqzhang/mcrouter
mcrouter/ServiceInfo.cpp
C++
bsd-3-clause
12,548
<?php namespace app\modules\users\models\search; use app\modules\core\data\ActiveDataProvider; use Yii; use yii\base\Model; use app\modules\users\models\ar\User; class UserSearch extends User { /** * @inheritdoc */ public function rules() { return [ [['id', 'superadmin', 'status', 'created_at', 'updated_at', 'email_confirmed'], 'integer'], [['registration_ip', 'email'], 'string'], [['is_from_service'], 'safe'] ]; } /** * @param array $params * @return ActiveDataProvider */ public function search($params = []) { $query = self::find(); $query->with(['profile.userAuthClient']); if (!Yii::$app->user->isSuperadmin) { $query->where(['superadmin' => 0]); } $dataProvider = new ActiveDataProvider([ 'query' => $query, 'pagination' => [ //'pageSize' => \roman444uk\yii\widgets\WidgetPageSize::getPageSize(), //'defaultPageLast' => true ], 'sort' => [ 'defaultOrder' => [ 'id' => SORT_ASC, ], ], ]); if (!($this->load($params) && $this->validate())) { return $dataProvider; } $query->andFilterWhere([ 'id' => $this->id, 'superadmin' => $this->superadmin, 'status' => $this->status, 'registration_ip' => $this->registration_ip, 'created_at' => $this->created_at, 'updated_at' => $this->updated_at, 'email_confirmed' => $this->email_confirmed, 'is_from_service' => $this->is_from_service, ]); $query->andFilterWhere(['like', 'email', $this->email]); return $dataProvider; } }
roman444uk/adverts
modules/users/models/search/UserSearch.php
PHP
bsd-3-clause
1,852
from mrjob.job import MRJob from itertools import combinations class MRStatistics(MRJob): def mapper(self, key, line): account_id, user_id, purchased, session_start_time, session_end_time = line.split() purchased = int(purchased) session_duration = int(session_end_time) - int(session_start_time) # y^0, y^1, y^2 - session count, purchased, purchased yield (account_id, 'conversion rate'), (1, purchased, purchased) # purchased ^ 2 = purchased # y^0, y^1, y^2 - session count, sum session times, sum of squares of session times yield (account_id, 'average session length'), (1, session_duration, session_duration ** 2) def reducer(self, metric, metric_values): # for metric, yield [sum(y^0), sum(y^1), sum(y^2)] yield metric, reduce(lambda x, y: map(sum, zip(x, y)), metric_values) if __name__ == '__main__': MRStatistics.run()
jepatti/mrjob_recipes
statistic_rollup/statistic_summarize.py
Python
bsd-3-clause
920
/** * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met : * * . Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * . Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * . The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package cppstyle.checks; import static org.easymock.EasyMock.expect; import java.io.StringReader; import java.util.Properties; import cppast.ParseException; import cppast.Parser; import cpptools.EasyMockTestCase; /** * @author Mathieu Champlon */ public final class TypeNameCheckTest extends EasyMockTestCase { private static final String FORMAT = "^[A-Z][a-zA-Z]*$"; /** * Mock objects. */ private CheckListener listener; private Properties properties; protected void setUp() throws Exception { listener = createMock( CheckListener.class ); properties = createMock( Properties.class ); } private void check( final String data, final String regex ) throws ParseException { expect( properties.getProperty( "format" ) ).andReturn( regex ); replay(); final TypeNameCheck check = new TypeNameCheck( listener, properties ); check.visit( new Parser( new StringReader( data ) ).translation_unit(), null ); } public void testMissingFormatPropertyThrows() { expect( properties.getProperty( "format" ) ).andReturn( null ); replay(); try { new TypeNameCheck( listener, properties ); } catch( final Exception e ) { return; } fail( "should have thrown" ); } public void testClassNameMatchingFormatIsValid() throws ParseException { check( "class ValidClassName {};", FORMAT ); } public void testClassNameNonMachingFormatIsFailure() throws ParseException { listener.fail( "invalid type name", 1 ); check( "class invalidClassName {};", FORMAT ); } public void testAnonymousClassIsSkipped() throws ParseException { check( "class {} myClass;", FORMAT ); } }
harveyt/cppncss
src/test/java/cppstyle/checks/TypeNameCheckTest.java
Java
bsd-3-clause
3,405
using System; using System.Web; using System.Web.Routing; using MbUnit.Framework; using Moq; using Ninject; using Subtext.Framework.Routing; using Subtext.Framework.Web.HttpModules; using Subtext.Infrastructure; namespace UnitTests.Subtext.Framework.Routing { [TestFixture] public class DirectoryRouteTests { [Test] public void GetVirtualPath_WithoutSubolder_ReturnsUrlWithoutSubfolder() { //arrange var route = new DirectoryRoute("admin", new Mock<IServiceLocator>().Object); var httpContext = new Mock<HttpContextBase>(); httpContext.FakeRequest("~/admin/posts/foo.aspx"); var blogRequest = new BlogRequest("localhost", null, new Uri("http://localhost"), true); var routeData = new RouteData(); var requestContext = new RequestContext(httpContext.Object, routeData); //act VirtualPathData virtualPath = route.GetVirtualPath(requestContext , new RouteValueDictionary( new {pathInfo = "posts/foo.aspx"})); //assert Assert.IsNotNull(virtualPath); Assert.AreEqual("admin/posts/foo.aspx", virtualPath.VirtualPath); } [Test] public void GetVirtualPath_WithSubolder_ReturnsUrlWithSubfolder() { //arrange var route = new DirectoryRoute("admin", new Mock<IServiceLocator>().Object); var httpContext = new Mock<HttpContextBase>(); httpContext.FakeRequest("~/subfolder/admin/"); var blogRequest = new BlogRequest("localhost", "subfolder", new Uri("http://localhost"), false); var routeData = new RouteData(); routeData.Values.Add("subfolder", "subfolder"); var requestContext = new RequestContext(httpContext.Object, routeData); //act VirtualPathData virtualPath = route.GetVirtualPath(requestContext , new RouteValueDictionary( new {pathInfo = "posts/foo.aspx"})); //assert Assert.IsNotNull(virtualPath); Assert.AreEqual("subfolder/admin/posts/foo.aspx", virtualPath.VirtualPath); } [Test] public void Ctor_WithDirectoryNameArg_AppendsPathInfoCatchAll() { //arrange, act var route = new DirectoryRoute("dir", new Mock<IServiceLocator>().Object); ; //assert Assert.AreEqual("dir/{*pathInfo}", route.Url); } [Test] public void Ctor_WithDirectoryNameArg_SetsDirectoryName() { //arrange, act var route = new DirectoryRoute("dir", new Mock<IServiceLocator>().Object); ; //assert Assert.AreEqual("dir", route.DirectoryName); } [Test] public void GetRouteData_MatchingTheImplicitSubfolderRoute_ReturnsParentDirectoryRoute() { //arrange var route = new DirectoryRoute("dir", new Mock<IServiceLocator>().Object); ; var httpContext = new Mock<HttpContextBase>(); httpContext.FakeRequest("~/subfolder/dir/foo.aspx"); var blogRequest = new BlogRequest("localhost", "subfolder", new Uri("http://localhost"), false); //act RouteData routeData = route.GetRouteData(httpContext.Object, blogRequest); //assert Assert.AreEqual(route, routeData.Route); } } }
Dashboard-X/SubText-2.5.2.0.src
UnitTests.Subtext/Framework/Routing/DirectoryRouteTests.cs
C#
bsd-3-clause
3,846
/* -*- Mode: C++; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */ /* * Copyright 2017 Couchbase, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "doc_pre_expiry.h" #include <memcached/protocol_binary.h> #include <platform/sized_buffer.h> #include <xattr/blob.h> #include <xattr/utils.h> bool document_pre_expiry(item_info& itm_info) { if (!mcbp::datatype::is_xattr(itm_info.datatype)) { // The object does not contain any XATTRs so we should remove // the entire content return false; } const auto xattr_size = cb::xattr::get_body_offset( {static_cast<const char*>(itm_info.value[0].iov_base), itm_info.value[0].iov_len}); cb::byte_buffer payload{static_cast<uint8_t*>(itm_info.value[0].iov_base), xattr_size}; cb::xattr::Blob blob(payload); blob.prune_user_keys(); auto pruned = blob.finalize(); if (pruned.len == 0) { // The old payload only contained user xattrs and // we removed everything return false; } // Pruning the user keys should just repack the data internally // without any allocations, but to be on the safe side we should // just check to verify, and if it happened to have done any // reallocations we could copy the data into our buffer if it fits. // (or we could throw an exception here, but I'd hate to get that // 2AM call if we can avoid that with a simple fallback check) if (pruned.buf != payload.buf) { if (pruned.len > payload.len) { throw std::logic_error("pre_expiry_document: the pruned object " "won't fit!"); } std::copy(pruned.buf, pruned.buf + pruned.len, payload.buf); } // Update the length field of the item itm_info.nbytes = pruned.len; itm_info.value[0].iov_len = pruned.len; return true; }
daverigby/memcached
daemon/doc_pre_expiry.cc
C++
bsd-3-clause
2,452
import Button from "@material-ui/core/Button"; import ClickAwayListener from "@material-ui/core/ClickAwayListener"; import Grow from "@material-ui/core/Grow"; import IconButton from "@material-ui/core/IconButton"; import Paper from "@material-ui/core/Paper"; import Popper from "@material-ui/core/Popper"; import { createStyles, Theme, withStyles, WithStyles } from "@material-ui/core/styles"; import DeleteIcon from "@material-ui/icons/Delete"; import { ContentState } from "draft-js"; import * as React from "react"; import i18n from "../../i18n"; import Anchor from "../Anchor"; import Toggle from "../Toggle"; interface ImageEntityProps { children: React.ReactNode; contentState: ContentState; entityKey: string; onEdit: (entityKey: string) => void; onRemove: (entityKey: string) => void; } const styles = (theme: Theme) => createStyles({ anchor: { display: "inline-block" }, container: { alignItems: "center", display: "flex" }, image: { maxWidth: "100%" }, inline: { display: "inline-block" }, root: { alignItems: "center", display: "flex", minHeight: 72, padding: theme.spacing.unit * 1.5 } }); const ImageEntity = withStyles(styles, { name: "ImageEntity" })( ({ classes, contentState, entityKey, onEdit, onRemove }: ImageEntityProps & WithStyles<typeof styles>) => ( <Toggle> {(isOpened, { disable, toggle }) => ( <> <Anchor> {anchor => ( <div className={classes.anchor} ref={anchor}> <Popper open={isOpened} anchorEl={anchor.current} transition disablePortal placement="bottom" > {({ TransitionProps, placement }) => ( <Grow {...TransitionProps} style={{ transformOrigin: placement }} > <Paper className={classes.root}> <ClickAwayListener onClickAway={disable} mouseEvent="onClick" > <div className={classes.container}> <Button onClick={() => { disable(); onEdit(entityKey); }} color="primary" variant="flat" > {i18n.t("Replace")} </Button> <IconButton onClick={() => onRemove(entityKey)}> <DeleteIcon color="primary" /> </IconButton> </div> </ClickAwayListener> </Paper> </Grow> )} </Popper> </div> )} </Anchor> <img className={classes.image} src={contentState.getEntity(entityKey).getData().href} onClick={toggle} /> </> )} </Toggle> ) ); export default ImageEntity;
UITools/saleor
saleor/static/dashboard-next/components/RichTextEditor/ImageEntity.tsx
TypeScript
bsd-3-clause
3,408
package com.nonagon.modular.dynamicform.client; import com.google.gwt.core.client.JavaScriptObject; public class FormInstanceValue extends JavaScriptObject { public static FormInstanceValue create() { FormInstanceValue instance = ((FormInstanceValue)JavaScriptObject.createObject()); instance.init(); return instance; } protected final native void init()/*-{ this.__type = "Nonagon.Modular.DynamicForm.FormInstanceValue, Nonagon.Modular.DynamicForm"; }-*/; protected FormInstanceValue() {} public final native int getId()/*-{ return this.Id; }-*/; public final native int getFormInstanceId()/*-{ return this.FormInstanceId; }-*/; public final native void setFormInstanceId(int value)/*-{ this.FormInstanceId = value; }-*/; public final native int getFieldId()/*-{ return this.FieldId; }-*/; public final native void setFieldId(int value)/*-{ this.FieldId = value; }-*/; public final native String getFieldCode()/*-{ return this.FieldCode; }-*/; public final native void setFieldCode(String value)/*-{ this.FieldCode = value; }-*/; public final native String getFieldValue()/*-{ return this.Value; }-*/; public final native void setFieldValue(String value)/*-{ this.Value = value; }-*/; }
Nonagon-x/Nonagon.Modular.DynamicForm
Frontend/GWT/gwt-modular-dynamicform/src/com/nonagon/modular/dynamicform/client/FormInstanceValue.java
Java
bsd-3-clause
1,233
<?php namespace common\components; use Yii; use yii\base\Object; use common\models\Subject; use common\models\ScreeningForm; use common\models\ScreeningQuestion; use common\models\ScreeningEntry; use common\models\ScreeningResponse; use common\models\Resource; use common\components\Types; class PdfComponent extends Object { private $_font = 'Helvetica'; /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ public function init() { require_once(\yii::$app->basePath . "/../vendor/setasign/fpdf/fpdf.php"); require_once(\yii::$app->basePath . "/../vendor/setasign/fpdi/fpdi.php"); require_once(\yii::$app->basePath . "/../vendor/setasign/fpdi_pdf-parser/pdf_parser.php"); require_once(\yii::$app->basePath . "/../vendor/setasign/setapdf-signer/library/SetaPDF/Autoload.php"); return parent::init(); } /* ******************************************************************************************************* */ public function createScreeningPdf($hash) { $screening_entry_model = ScreeningEntry::findOne(['hash'=>$hash]); $screening_form_model = ScreeningForm::findOne(['id'=>$screening_entry_model->screening_form_id]); $subject_model = Subject::findOne(['id'=>$screening_entry_model->subject_id]); $count =1; //$permissions = \SetaPDF_Core_SecHandler::PERM_DIGITAL_PRINT ; $this->_font= 'Helvetica'; //class_exists('TCPDF', true); // trigger Composers autoloader to load the TCPDF class $pdf = new \FPDI(); $pdf->SetAutoPageBreak(true); // add a page $pdf->SetTopMargin(30); $pdf->AddPage(); $pdf->setSourceFile(\yii::$app->basePath . "/../letterhead-mini-header.pdf"); $tplIdx = $pdf->importPage(1); // use the imported page and place it at point 10,10 with a width of 100 mm $pdf->useTemplate($tplIdx, 0, 0); $pdf->SetFont($this->_font, '',9); $pdf->SetXY(10,6); $pdf->Cell(0,3 ,'Confidential - Participant screening form' ); $pdf->SetFont($this->_font, '',12); $pdf->SetXY(10,14); $pdf->MultiCell(150,3 ,yii::$app->DateComponent->timestampToUkDate($screening_entry_model->created_at), 0, 'R'); $pdf->MultiCell(100,3 ,$screening_entry_model->screening_form_title, 0,'' ); $pdf->Ln(); $pdf->SetFont($this->_font, '',9); $pdf->Cell(100,4,sprintf('Participant: %s %s (dob %s)', $screening_entry_model->subject->first_name, $screening_entry_model->subject->last_name, yii::$app->DateComponent->isoToUkDate($screening_entry_model->subject->dob) ) ); $pdf->Cell(50,4,sprintf('Identifier: %s', $screening_entry_model->subject->cubric_id),0,'','R' ); $pdf->Ln(); $pdf->Cell(100,4,sprintf('Researcher: %s %s (project %s)', $screening_entry_model->researcher->first_name, $screening_entry_model->researcher->last_name, $screening_entry_model->project->code ) ); $pdf->Cell(50,4,sprintf('Resource: %s', $screening_entry_model->resource_title) , 0, '', 'R' ); $pdf->SetXY(10,38); $pdf->SetFont($this->_font, '',12); $pdf->Cell(150,4,sprintf('Responses') ); $pdf->SetFont($this->_font, '',9); $pdf->Ln(); foreach (yii::$app->screeningresponse->getResponses($hash) as $response) { if (strlen($response['caption']) > 0) { $pdf->Ln(); $pdf->MultiCell( 180, 4 ,sprintf('%s ' , $response['caption']) , 0 , 'U'); $count = 1; } $pdf->MultiCell( 180, 4 ,sprintf('%s. %s ', $count , $response['content'])); $pdf->SetFont($this->_font, 'B',9); if ($response['response'] === null) $response['response'] = 'Not specified / Unknown.'; $pdf->MultiCell( 180, 4 ,sprintf('%s ', $response['response'])); $pdf->SetFont($this->_font, '',9); $count++; $pdf->Ln(); } $pdf->Ln(); $pdf->SetFont($this->_font, '',12); $pdf->Cell(180,4,sprintf('Signatures') ); $pdf->Ln(); $pdf->Ln(); $pdf->SetFont($this->_font, '',9); $pdf->Cell( 100, 4 ,'Participant '); $pdf->Cell( 100, 4 ,'Researcher '); $pdf->Ln(); $pdf->Ln(); $currentX = $pdf->GetX(); $currentY = $pdf->GetY(); $pdf->Image( sprintf('/tmp/subject-%s.png' , $hash) , $currentX , $currentY ); $currentX = $pdf->GetX(); $pdf->Image( sprintf('/tmp/researcher-%s.png' , $hash) , $currentX +100 , $currentY ); // now write some text above the imported page // NOW SET ScreeningEntry::progress_id = PUBLISHED so it cannot be edited again. // $pdfData = $pdf->Output('S'); // create a writer // create a Http writer //$writer = new \SetaPDF_Core_Writer_Http("fpdf-sign-demo.pdf", true); // load document by filename //$document = new \SetaPDF_Core_Document::loadByString($pdfData, $writer); //$document = new \SetaPDF_Core_Reader_File($pdf->Output(), $writer); $writer = new \SetaPDF_Core_Writer_File("/Users/Spiro/tmp/myPDF.pdf"); $document = \SetaPDF_Core_Document::loadByString($pdf->Output("S"), $writer); // let's prepare the temporary file writer: \SetaPDF_Core_Writer_TempFile::setTempDir("/tmp/"); // create a signer instance for the document $signer = new \SetaPDF_Signer($document); // add a field with the name "Signature" to the top left of page 1 $signer->addSignatureField( \SetaPDF_Signer_SignatureField::DEFAULT_FIELD_NAME, // Name of the signature field 1, // put appearance on page 1 \SetaPDF_Signer_SignatureField::POSITION_LEFT_BOTTOM, array('x'=>10, 'y'=>10), // Translate the position (x 50, y -80 -> 50 points right, 80 points down) 180, // Width - 180 points 50 // Height - 50 points ); // set some signature properties $signer->setReason('Integrity'); $signer->setLocation('CUBRIC'); $signer->setContactInfo('+44 2920 703859'); // ccreate an OpenSSL module instance $module = new \SetaPDF_Signer_Signature_Module_OpenSsl(); // set the sign certificate $module->setCertificate(file_get_contents("/Users/spiro/Sites/projects/certs/certificate.pem")); // set the private key for the sign certificate $module->setPrivateKey(array(file_get_contents("/Users/spiro/Sites/projects/certs/key.pem"),"")); // create a Signature appearance $visibleAppearance = new \SetaPDF_Signer_Signature_Appearance_Dynamic($module); // choose a document to get the background from and convert the art box to an xObject $backgroundDocument = \SetaPDF_Core_Document::loadByFilename(Yii::getAlias('@frontend/web/img/cubric-logo.pdf') ); $backgroundXObject = $backgroundDocument->getCatalog()->getPages()->getPage(1)->toXObject($document); // format the date $visibleAppearance->setShowFormat( \SetaPDF_Signer_Signature_Appearance_Dynamic::CONFIG_DATE, 'd-m-Y H:i:s' ); // disable the distinguished name $visibleAppearance->setShow( \SetaPDF_Signer_Signature_Appearance_Dynamic::CONFIG_DISTINGUISHED_NAME, false ); // set the background with 50% opacity $visibleAppearance->setGraphic($backgroundXObject, .5); //$visibleAppearance->setBackgroundLogo($backgroundXObject, .5); // sign/certify the document // define the appearance $signer->setAppearance($visibleAppearance); $signer->sign($module); if (file_exists(sprintf('/tmp/subject-%s.png', $hash))) ; // unlink(sprintf('/tmp/subject-%s.png' , $hash)); if (file_exists(sprintf('/tmp/researcher-%s.png',$hash))) ; // unlink(sprintf('/tmp/researcher-%s.png' , $hash)); return true; } /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ /* ******************************************************************************************************* */ }
spiro-stathakis/projects
common/components/PdfComponent.php
PHP
bsd-3-clause
10,305
/* Copyright (c) 2011, The Mineserver Project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the The Mineserver Project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //#include <iostream> //#include <iomanip> #include <limits> #include <ctime> #ifdef WIN32 #define _WINSOCKAPI_ #define NOMINMAX #include <windows.h> #include <wincrypt.h> #else #include <fstream> #endif #include "mineserver.h" #include "random.h" seed_type prng_seed = 0; MyRNG prng; MyUniform m_uniformUINT(0, std::numeric_limits<MyUniform::result_type>::max()); void initPRNG() { bool seedsuccess = false; #ifdef WIN32 HCRYPTPROV hCryptProv; BYTE* pbData = reinterpret_cast<BYTE*>(&prng_seed); if (CryptAcquireContext(&hCryptProv, NULL, NULL, PROV_RSA_FULL, 0)) { if (CryptGenRandom(hCryptProv, sizeof(prng_seed), pbData)) { seedsuccess = true; } CryptReleaseContext(hCryptProv, 0); } #else std::ifstream urandom("/dev/urandom"); if (urandom) { urandom.read(reinterpret_cast<char*>(&prng_seed), sizeof(prng_seed)); seedsuccess = true; } #endif if (!seedsuccess) { prng_seed = static_cast<seed_type>(std::time(NULL)); } //std::cout << "Seeding the PRNG with: 0x" << std::hex << std::uppercase << std::setfill('0') // << std::setw(2 * sizeof(MyRNG::result_type)) << prng_seed << std::endl; prng.seed(prng_seed); } double uniform01() { return double(m_uniformUINT(prng)) / double(std::numeric_limits<MyUniform::result_type>::max()); }
fador/mineserver
src/random.cpp
C++
bsd-3-clause
2,862
import numpy as np print '*******numpy array***********' randArray = np.random.rand(4,4) randMat = np.mat(randArray) irandMat = randMat.I a1=np.array(range(10,30,5)) a11=a1.reshape((2,2)) a111 = np.arange(12).reshape(3,4) a2=np.linspace(0,2,10) b=np.zeros((3,4)) c=np.ones((2,3,4),dtype='int16') d=np.empty((2,3)) print a1,a11,a2,b,c,d A1=np.arange(12) print A1 A1.shape=(3,4) #A.reshape(3,4) M=np.mat(A1.copy()) #Vector Stacking x = np.arange(0,10,2) # x=([0,2,4,6,8]) y = np.arange(5) # y=([0,1,2,3,4]) m = np.vstack([x,y]) # m=([[0,2,4,6,8], # [0,1,2,3,4]]) xy = np.hstack([x,y]) # xy =([0,2,4,6,8,0,1,2,3,4]) A=np.array([[1,1],[0,1]]) B=np.array([[2,0],[3,4]]) C=A*B # elementwise product D=np.dot(A,B) # matrix product a = np.random.random((2,3)) asum=a.sum() amin=a.min() amax=a.max() print a,asum,amin,amax print '*******numpy matrix***********' A=np.matrix('1.0,2.0;3.0,4.0') AT=A.T B=A*AT print A,AT,B
likeucode/PythonLearning
sciComputing/matrixComputing.py
Python
bsd-3-clause
1,067
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/Users for the canonical source repository * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Users; use Zend\ModuleManager\Feature\AutoloaderProviderInterface; use Zend\Mvc\ModuleRouteListener; use Users\Model\userstable; use Zend\Db\ResultSet\ResultSet; use Zend\Db\TableGateway\TableGateway; use Zend\ModuleManager\Feature\ServiceProviderInterface; class Module implements AutoloaderProviderInterface { public function getAutoloaderConfig() { return array( 'Zend\Loader\ClassMapAutoloader' => array( __DIR__ . '/autoload_classmap.php', ), 'Zend\Loader\StandardAutoloader' => array( 'namespaces' => array( // if we're in a namespace deeper than one level we need to fix the \ in the path __NAMESPACE__ => __DIR__ . '/src/' . str_replace('\\', '/' , __NAMESPACE__), ), ), ); } public function getConfig() { return include __DIR__ . '/config/module.config.php'; } public function onBootstrap($e) { // You may not need to do this if you're doing it elsewhere in your // application $eventManager = $e->getApplication()->getEventManager(); $moduleRouteListener = new ModuleRouteListener(); $moduleRouteListener->attach($eventManager); } // Add this method: public function getServiceConfig() { return array( 'factories' => array( 'Users\Model\userstable' => function ($sm) { $dbAdapter = $sm->get('Zend\Db\Adapter\Adapter'); $table = new userstable($dbAdapter); return $table; } ) ); } }
sriducati/download
module/Users/Module.php
PHP
bsd-3-clause
1,980
using System; using System.ComponentModel.DataAnnotations; using System.Data.Common; using System.Data.Entity; using System.Linq; using System.Threading.Tasks; using NUnit.Framework; using PeanutButter.RandomGenerators; using PeanutButter.TestUtils.Entity; // ReSharper disable AccessToDisposedClosure namespace PeanutButter.Utils.Entity.Tests { [TestFixture] public class TestDbContextExtensions: EntityPersistenceTestFixtureBase<TestDbContextExtensions.ThingContext> { public class Thing { public int Id { get; set; } [MaxLength(50)] public string Name { get; set; } public string Notes { get; set; } } public class ThingContext: DbContext { public IDbSet<Thing> Things { get; set; } public ThingContext(DbConnection connection): base(connection, true) { } } [OneTimeSetUp] public void OneTimeSetup() { Configure(false, s => new DbSchemaImporter(s, "create table Things(id int primary key identity, Name nvarchar(50), Notes nvarchar(100));")); } [Test] public void SaveChangesWithErrorReporting_WhenNoError_ShouldSave() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(2, 10); var expectedNotes = RandomValueGen.GetRandomString(2, 20); int id; using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- ctx.SaveChangesWithErrorReporting(); id = newThing.Id; } using (var ctx = GetContext()) { //---------------Test Result ----------------------- var persisted = ctx.Things.FirstOrDefault(o => o.Id == id); Assert.IsNotNull(persisted); Assert.AreEqual(expectedName, persisted.Name); Assert.AreEqual(expectedNotes, persisted.Notes); } } [Test] public async Task SaveChangesWithErrorReportingAsync_WhenNoError_ShouldSave() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(2, 10); var expectedNotes = RandomValueGen.GetRandomString(2, 20); int id; using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- await ctx.SaveChangesWithErrorReportingAsync(); id = newThing.Id; } using (var ctx = GetContext()) { //---------------Test Result ----------------------- var persisted = ctx.Things.FirstOrDefault(o => o.Id == id); Assert.IsNotNull(persisted); Assert.AreEqual(expectedName, persisted.Name); Assert.AreEqual(expectedNotes, persisted.Notes); } } [Test] public void SaveChangesWithErrorReporting_WhenValidationError_ShouldThrowAndLogToOutput() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(60, 100); var expectedNotes = RandomValueGen.GetRandomString(2, 20); using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- var ex = Assert.Throws<Exception>(() => ctx.SaveChangesWithErrorReporting()); StringAssert.Contains("Error whilst trying to persist to the database:", ex.Message); StringAssert.Contains("maximum length", ex.Message); } } [Test] public void SaveChangesWithErrorReportingAsync_WhenValidationError_ShouldThrowAndLogToOutput() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(60, 100); var expectedNotes = RandomValueGen.GetRandomString(2, 20); using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- var ex = Assert.ThrowsAsync<Exception>(() => ctx.SaveChangesWithErrorReportingAsync()); StringAssert.Contains("Error whilst trying to persist to the database:", ex.Message); StringAssert.Contains("maximum length", ex.Message); } } [Test] public void SaveChangesWithErrorReporting_WhenSqlError_ShouldThrowAndLogToOutput() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(10, 15); var expectedNotes = RandomValueGen.GetRandomString(128, 150); using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- var ex = Assert.Throws<Exception>(() => ctx.SaveChangesWithErrorReporting()); StringAssert.Contains("DBUpdate Error:", ex.Message); StringAssert.Contains("truncated", ex.Message); } } [Test] public void SaveChangesWithErrorReportingAsync_WhenSqlError_ShouldThrowAndLogToOutput() { //---------------Set up test pack------------------- var expectedName = RandomValueGen.GetRandomString(10, 15); var expectedNotes = RandomValueGen.GetRandomString(128, 150); using (var ctx = GetContext()) { ctx.Things.Clear(); var newThing = new Thing() { Name = expectedName, Notes = expectedNotes }; ctx.Things.Add(newThing); //---------------Assert Precondition---------------- //---------------Execute Test ---------------------- var ex = Assert.ThrowsAsync<Exception>(() => ctx.SaveChangesWithErrorReportingAsync()); StringAssert.Contains("DBUpdate Error:", ex.Message); StringAssert.Contains("truncated", ex.Message); } } } }
fluffynuts/PeanutButter
source/Utils/PeanutButter.Utils.Entity.Tests/TestDbContextExtensions.cs
C#
bsd-3-clause
7,946
/* global Buffer, exports, require */ /* jshint -W097 */ 'use strict'; exports['null'] = null; exports.instantToString = function(i) { return new Date(i).toUTCString(); }; exports.instantFromString = function(Left) { return function(Right) { return function(s) { try { return Right(Date.parse(s)); } catch(e) { return Left("Date string parsing failed: \"" + s + "\", with: " + e); } }; }; }; exports.unsafeIsBuffer = function(x) { return x instanceof Buffer; };
rightfold/purescript-postgresql-client
src/Database/PostgreSQL/Value.js
JavaScript
bsd-3-clause
521
# coding: utf-8 """ Utilities for dealing with text encodings """ #----------------------------------------------------------------------------- # Copyright (C) 2008-2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import sys import locale import warnings # to deal with the possibility of sys.std* not being a stream at all def get_stream_enc(stream, default=None): """Return the given stream's encoding or a default. There are cases where ``sys.std*`` might not actually be a stream, so check for the encoding attribute prior to returning it, and return a default if it doesn't exist or evaluates as False. ``default`` is None if not provided. """ if not hasattr(stream, 'encoding') or not stream.encoding: return default else: return stream.encoding # Less conservative replacement for sys.getdefaultencoding, that will try # to match the environment. # Defined here as central function, so if we find better choices, we # won't need to make changes all over IPython. def getdefaultencoding(): """Return IPython's guess for the default encoding for bytes as text. Asks for stdin.encoding first, to match the calling Terminal, but that is often None for subprocesses. Fall back on locale.getpreferredencoding() which should be a sensible platform default (that respects LANG environment), and finally to sys.getdefaultencoding() which is the most conservative option, and usually ASCII. """ enc = get_stream_enc(sys.stdin) if not enc or enc=='ascii': try: # There are reports of getpreferredencoding raising errors # in some cases, which may well be fixed, but let's be conservative here. enc = locale.getpreferredencoding() except Exception: pass enc = enc or sys.getdefaultencoding() # On windows `cp0` can be returned to indicate that there is no code page. # Since cp0 is an invalid encoding return instead cp1252 which is the # Western European default. if enc == 'cp0': warnings.warn( "Invalid code page cp0 detected - using cp1252 instead." "If cp1252 is incorrect please ensure a valid code page " "is defined for the process.", RuntimeWarning) return 'cp1252' return enc DEFAULT_ENCODING = getdefaultencoding()
marcoantoniooliveira/labweb
oscar/lib/python2.7/site-packages/IPython/utils/encoding.py
Python
bsd-3-clause
2,724
#! /usr/bin/env python # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2014, Nicolas P. Rougier. All Rights Reserved. # Distributed under the (new) BSD License. # ----------------------------------------------------------------------------- import numpy as np from glumpy import app, gl, gloo from glumpy.transforms import Position, OrthographicProjection, PanZoom # Create window window = app.Window(width=1024, height=512) quad_vertex = """ attribute vec2 position; void main (void) { gl_Position = vec4(position,0,1); } """ quad_fragment = """ void main(void) { gl_FragColor = vec4(1,1,1,1.0/128.0); } """ line_vertex = """ attribute vec2 position; void main (void) { gl_Position = vec4(position,0,1); } """ line_fragment = """ void main(void) { gl_FragColor = vec4(0,0,0,1); } """ @window.event def on_draw(dt): global time time += np.random.uniform(0,dt) quad.draw(gl.GL_TRIANGLE_STRIP) line.draw(gl.GL_LINE_STRIP) window.swap() quad.draw(gl.GL_TRIANGLE_STRIP) line.draw(gl.GL_LINE_STRIP) X = line["position"][:,0] scale = np.random.uniform(0.1,0.5) frequency = np.random.uniform(3,5) noise = 0.01*np.random.uniform(-1,+1,n) line["position"][:,1] = scale*np.cos(frequency*X + time) + noise @window.event def on_init(): gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_DST_ALPHA) @window.event def on_resize(width, height): window.clear() window.swap() window.clear() n = 512 line = gloo.Program(line_vertex, line_fragment, count=n) line["position"][:,0] = np.linspace(-1,1,n) line["position"][:,1] = np.random.uniform(-0.5,0.5,n) quad = gloo.Program(quad_vertex, quad_fragment, count=4) quad['position'] = [(-1,-1), (-1,+1), (+1,-1), (+1,+1)] time = 0 app.run()
duyuan11/glumpy
examples/gloo-trace.py
Python
bsd-3-clause
1,814
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/ZendSkeletonApplication for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Application; return array( 'router' => array( 'routes' => array( 'home' => array( 'type' => 'Zend\Mvc\Router\Http\Literal', 'options' => array( 'route' => '/', 'defaults' => array( 'controller' => 'Application\Controller\Index', 'action' => 'index', ), ), ), // The following is a route to simplify getting started creating // new controllers and actions without needing to create a new // module. Simply drop new controllers in, and you can access them // using the path /application/:controller/:action 'application' => array( 'type' => 'Literal', 'options' => array( 'route' => '/application', 'defaults' => array( '__NAMESPACE__' => 'Application\Controller', 'controller' => 'Index', 'action' => 'index', ), ), 'may_terminate' => true, 'child_routes' => array( 'default' => array( 'type' => 'Segment', 'options' => array( 'route' => '/[:controller[/:action]]', 'constraints' => array( 'controller' => '[a-zA-Z][a-zA-Z0-9_-]*', 'action' => '[a-zA-Z][a-zA-Z0-9_-]*', ), 'defaults' => array( ), ), ), ), ), ), ), 'service_manager' => array( 'abstract_factories' => array( 'Zend\Cache\Service\StorageCacheAbstractServiceFactory', 'Zend\Log\LoggerAbstractServiceFactory', ), 'factories' => array( 'translator' => 'Zend\Mvc\Service\TranslatorServiceFactory', 'ZendCacheStorageFactory' => function() { return \Zend\Cache\StorageFactory::factory( array( 'adapter' => array( 'name' => 'filesystem', 'options' => array( 'ttl' => 3600, 'dirLevel' => 2, 'cacheDir' => 'data/cache', 'dirPermission' => 0755, 'filePermission' => 0666, 'namespaceSeparator' => '-db-', 'key_pattern' => '/.*/', ), ), 'plugins' => array('serializer'), ) ); }, ), 'aliases' => array( 'cache' => 'ZendCacheStorageFactory', ), ), 'translator' => array( 'locale' => 'en_US', 'translation_file_patterns' => array( array( 'type' => 'gettext', 'base_dir' => __DIR__ . '/../language', 'pattern' => '%s.mo', ), ), ), 'controllers' => array( 'invokables' => array( 'Application\Controller\Index' => Controller\IndexController::class ), ), 'view_manager' => array( 'display_not_found_reason' => true, 'display_exceptions' => true, 'doctype' => 'HTML5', 'not_found_template' => 'error/404', 'exception_template' => 'error/index', 'template_map' => array( 'layout/layout' => __DIR__ . '/../view/layout/layout.phtml', 'application/index/index' => __DIR__ . '/../view/application/index/index.phtml', 'error/404' => __DIR__ . '/../view/error/404.phtml', 'error/index' => __DIR__ . '/../view/error/index.phtml', ), 'template_path_stack' => array( __DIR__ . '/../view', ), ), // Placeholder for console routes 'console' => array( 'router' => array( 'routes' => array( ), ), ), 'doctrine' => array( 'driver' => array( 'applicationEnt' => array( 'class' => 'Doctrine\ORM\Mapping\Driver\AnnotationDriver', 'paths' => __DIR__ . '/../src/Application/Entity', ), 'orm_default' => array( 'drivers' => array( 'Application\Entity' => 'applicationEnt', ), ), ), ), );
usitnikov/testproject
module/Application/config/module.config.php
PHP
bsd-3-clause
5,248
#!/usr/bin/env python import os import time from threading import Timer from pprint import pprint from datetime import datetime import gobject import glob import dbus import dbus.service from dbus.mainloop.glib import DBusGMainLoop DBusGMainLoop(set_as_default=True) gobject.threads_init() # Multithreaded python programs must call this before using threads. bus = dbus.SessionBus() loop = gobject.MainLoop() """ This is a shared interface between two objects. """ InterfaceA = "nl.ict.AABUnitTest.A" """ Objects who have the InterfaceOnBulkTransfer interface must implement onBulkRequest(String fifoToPayload, String fifoToAndroid, String requestedBulkData) """ InterfaceOnBulkTransfer = "nl.ict.aapbridge.bulk" """ bus-name : nl.ict.AABUnitTest objectpaths: /nl/ict/AABUnitTestB /nl/ict/AABUnitTestC /nl/ict/AABUnitTest/bulk/echo1 /nl/ict/AABUnitTest/bulk/echo2 interfaces : nl.ict.AABUnitTest.B nl.ict.AABUnitTest.Methods nl.ict.AABUnitTest.Signals nl.ict.aapbridge.bulk """ class AABUnitTestB(dbus.service.Object): InterfaceB = "nl.ict.AABUnitTest.Methods" def __init__(self, object_path, bus_name): dbus.service.Object.__init__(self, bus_name, object_path) @dbus.service.method(InterfaceA, in_signature='', out_signature='') def LocalEcho(self): print(str(datetime.now()) + " Local echo from AABUnitTestB") @dbus.service.method(InterfaceB, in_signature='y', out_signature='y') def ExpectingByte(self, val): print(str(datetime.now()) + " Expecting: y Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='b', out_signature='b') def ExpectingBoolean(self, val): print(str(datetime.now()) + " Expecting: b Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='n', out_signature='n') def ExpectingInt16(self, val): print(str(datetime.now()) + " Expecting: n Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='q', out_signature='q') def ExpectingUint16(self, val): print(str(datetime.now()) + " Expecting: q Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='i', out_signature='i') def ExpectingInt32(self, val): print(str(datetime.now()) + " Expecting: i Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='u', out_signature='u') def ExpectingUint32(self, val): print(str(datetime.now()) + " Expecting: u Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='x', out_signature='x') def ExpectingInt64(self, val): print(str(datetime.now()) + " Expecting: x Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='t', out_signature='t') def ExpectingUint64(self, val): print(str(datetime.now()) + " Expecting: t Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='d', out_signature='d') def ExpectingDouble(self, val): print(str(datetime.now()) + " Expecting: d Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='s', out_signature='s') def ExpectingString(self, val): print(str(datetime.now()) + " Expecting: s Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='o', out_signature='o') def ExpectingObjectPath(self, val): print(str(datetime.now()) + " Expecting: o Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='g', out_signature='g') def ExpectingSignature(self, val): print(str(datetime.now()) + " Expecting: g Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='ai', out_signature='ai') def ExpectingArrayInt32(self, val): print(str(datetime.now()) + " Expecting: ai Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='(isi)', out_signature='(isi)') def ExpectingStruct1(self, val): print(str(datetime.now()) + " Expecting: (isi) Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='a{si}', out_signature='a{si}') def ExpectingDict(self, val): print(str(datetime.now()) + " Expecting: a{si} Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='h', out_signature='h') def ExpectingFd(self, val): print(str(datetime.now()) + " Expecting: h Got: "+repr(val) ) return val; @dbus.service.method(InterfaceB, in_signature='ssss', out_signature='ssss') def ExpectingMultiString(self, uno, duo, tres, dos ): print(str(datetime.now()) + " Expecting: ssss Got: "+repr( (uno, duo, tres, dos) )) return (uno, duo, tres, dos); @dbus.service.method(InterfaceB, in_signature='yyiyx', out_signature='yyiyx') def ExpectingComplex1(self, byte1,byte2,i,byte3,x ): print(str(datetime.now()) + " Expecting: yyiyx Got: "+repr( (byte1,byte2,i,byte3,x) )) return (byte1,byte2,i,byte3,x); @dbus.service.method(InterfaceB, in_signature='', out_signature='') def ExceptionThrower1(self): raise NotImplementedError, "I diddent had time to implement this function" @dbus.service.method(InterfaceB, in_signature='', out_signature='') def ExceptionThrower2(self): raise TypeError, "Yeah well... no." @dbus.service.method(InterfaceB, in_signature='', out_signature='') def ReturnsNothing(self): pass class AABUnitTestC(dbus.service.Object): InterfaceC = "nl.ict.AABUnitTest.Signals" def __init__(self, object_path, bus_name): dbus.service.Object.__init__(self, bus_name, object_path) @dbus.service.method(InterfaceA, in_signature='', out_signature='') def LocalEcho(self): print(str(datetime.now()) + " Local echo from AABUnitTestC") @dbus.service.signal(InterfaceC, signature='y') def Byte(self,val): pass @dbus.service.signal(InterfaceC, signature='b') def Boolean(self,val): pass @dbus.service.signal(InterfaceC, signature='n') def Int16(self,val): pass @dbus.service.signal(InterfaceC, signature='q') def Uint32(self,val): pass @dbus.service.signal(InterfaceC, signature='i') def Int32(self,val): pass @dbus.service.signal(InterfaceC, signature='d') def Double(self,val): pass @dbus.service.signal(InterfaceC, signature='s') def String(self,val): pass @dbus.service.signal(InterfaceC, signature='sd') def Sensor(self,name,value): pass @dbus.service.signal(InterfaceC, signature='ysdyi') def Complex1(self,var1,var2,var3,var4,var5): pass def Emit(self): self.Byte(2) time.sleep(5) self.Boolean(True) time.sleep(5) self.Int32(3) time.sleep(5) self.String("The only real advantage to punk music is that nobody can whistle it.") time.sleep(5) self.Double(5.5) time.sleep(5) self.Sensor("humidity1",9.923) time.sleep(5) self.Complex1(8,"Never do today what you can put off until tomorrow.",45.00000003,9,9084) @dbus.service.method(InterfaceC, in_signature='', out_signature='') def StartEmittingSignals(self): print("Starting to emit signals") emitter = Timer(5, AABUnitTestC.Emit, [self]) emitter.start() def onEchoIOReady(source, cb_condition, fifoToPayload, fifoToAndroid): if(cb_condition is gobject.IO_HUP): fifoToAndroid.close() return False try: fifoToAndroid.write(os.read(fifoToPayload, 5000)) fifoToAndroid.flush() except: fifoToAndroid.close() return False return True def onBulkEchoRequest(fifoToPayloadPath, fifoToAndroidPath, requestedBulkData): print("Opening fifo's") fifoToPayload = os.open(fifoToPayloadPath, os.O_RDONLY ) fifoToAndroid = open(fifoToAndroidPath, 'w') print("Fifo's are open") gobject.io_add_watch(fifoToPayload, gobject.IO_IN | gobject.IO_HUP, onEchoIOReady, fifoToPayload, fifoToAndroid) class BulkTransferEcho(dbus.service.Object): def __init__(self, object_path, bus_name): dbus.service.Object.__init__(self, bus_name, object_path) @dbus.service.method(InterfaceOnBulkTransfer, in_signature='sss', out_signature='') def onBulkRequest(self, fifoToPayloadPath, fifoToAndroidPath, requestedBulkData): print("onBulkRequest: "+fifoToPayloadPath+" "+fifoToAndroidPath+" "+requestedBulkData) gobject.idle_add(onBulkEchoRequest, fifoToPayloadPath, fifoToAndroidPath, requestedBulkData) bus_name = dbus.service.BusName('nl.ict.AABUnitTest', bus) serviceB = AABUnitTestB('/nl/ict/AABUnitTest/B',bus_name) serviceC = AABUnitTestC('/nl/ict/AABUnitTest/C',bus_name) bulkEcho1 = BulkTransferEcho('/nl/ict/AABUnitTest/bulk/echo1',bus_name) bulkEcho2 = BulkTransferEcho('/nl/ict/AABUnitTest/bulk/echo2',bus_name) print("Starting event loop") loop.run()
Benny-/android-accessory-protocol-bridge
Accessory/Payloads/testStub.py
Python
bsd-3-clause
9,363
/* * Copyright (c) 2012 University of Tartu */ package org.qsardb.model; public enum ByteOrderMask { UTF_8(new byte[]{(byte)0xEF, (byte)0xBB, (byte)0xBF}, "UTF-8"), UTF_16LE(new byte[]{(byte)0xFF, (byte)0xFE}, "UTF-16LE"), UTF_16BE(new byte[]{(byte)0xFE, (byte)0xFF}, "UTF-16BE"), ; private byte[] bytes = null; private String encoding = null; ByteOrderMask(byte[] bytes, String encoding){ setBytes(bytes); setEncoding(encoding); } public byte[] getBytes(){ return this.bytes; } private void setBytes(byte[] bytes){ this.bytes = bytes; } public String getEncoding(){ return this.encoding; } private void setEncoding(String encoding){ this.encoding = encoding; } public byte[] prependTo(byte[] bytes){ byte[] result = new byte[this.bytes.length + bytes.length]; System.arraycopy(this.bytes, 0, result, 0, this.bytes.length); System.arraycopy(bytes, 0, result, this.bytes.length, bytes.length); return result; } static public ByteOrderMask valueOf(byte[] bytes){ return valueOf(bytes, 0, bytes.length); } static public ByteOrderMask valueOf(byte[] bytes, int offset, int length){ ByteOrderMask[] values = ByteOrderMask.values(); values: for(ByteOrderMask value : values){ if(value.bytes.length <= length){ for(int i = 0; i < value.bytes.length; i++){ if(value.bytes[i] != bytes[offset + i]){ continue values; } } return value; } } return null; } }
qsardb/qsardb
model/src/main/java/org/qsardb/model/ByteOrderMask.java
Java
bsd-3-clause
1,457
/** * @file dtb.hpp * @author Bill March (march@gatech.edu) * * DTBStat is the StatisticType used by trees when performing EMST. */ #ifndef __MLPACK_METHODS_EMST_DTB_STAT_HPP #define __MLPACK_METHODS_EMST_DTB_STAT_HPP #include <mlpack/core.hpp> namespace mlpack { namespace emst { /** * A statistic for use with MLPACK trees, which stores the upper bound on * distance to nearest neighbors and the component which this node belongs to. */ class DTBStat { private: //! Upper bound on the distance to the nearest neighbor of any point in this //! node. double maxNeighborDistance; //! Lower bound on the distance to the nearest neighbor of any point in this //! node. double minNeighborDistance; //! Total bound for pruning. double bound; //! The index of the component that all points in this node belong to. This //! is the same index returned by UnionFind for all points in this node. If //! points in this node are in different components, this value will be //! negative. int componentMembership; public: /** * A generic initializer. Sets the maximum neighbor distance to its default, * and the component membership to -1 (no component). */ DTBStat() : maxNeighborDistance(DBL_MAX), minNeighborDistance(DBL_MAX), bound(DBL_MAX), componentMembership(-1) { } /** * This is called when a node is finished initializing. We set the maximum * neighbor distance to its default, and if possible, we set the component * membership of the node (if it has only one point and no children). * * @param node Node that has been finished. */ template<typename TreeType> DTBStat(const TreeType& node) : maxNeighborDistance(DBL_MAX), minNeighborDistance(DBL_MAX), bound(DBL_MAX), componentMembership( ((node.NumPoints() == 1) && (node.NumChildren() == 0)) ? node.Point(0) : -1) { } //! Get the maximum neighbor distance. double MaxNeighborDistance() const { return maxNeighborDistance; } //! Modify the maximum neighbor distance. double& MaxNeighborDistance() { return maxNeighborDistance; } //! Get the minimum neighbor distance. double MinNeighborDistance() const { return minNeighborDistance; } //! Modify the minimum neighbor distance. double& MinNeighborDistance() { return minNeighborDistance; } //! Get the total bound for pruning. double Bound() const { return bound; } //! Modify the total bound for pruning. double& Bound() { return bound; } //! Get the component membership of this node. int ComponentMembership() const { return componentMembership; } //! Modify the component membership of this node. int& ComponentMembership() { return componentMembership; } }; // class DTBStat } // namespace emst } // namespace mlpack #endif // __MLPACK_METHODS_EMST_DTB_STAT_HPP
stereomatchingkiss/mlpack
src/mlpack/methods/emst/dtb_stat.hpp
C++
bsd-3-clause
2,858
<?php namespace LizardsAndPumpkins\Import; use LizardsAndPumpkins\Messaging\Event\DomainEventHandler; use LizardsAndPumpkins\Messaging\Queue\Message; class CatalogImportWasTriggeredDomainEventHandler implements DomainEventHandler { /** * @var CatalogImport */ private $catalogImport; public function __construct(CatalogImport $catalogImport) { $this->catalogImport = $catalogImport; } public function process(Message $message): void { $catalogImportWasTriggeredEvent = CatalogImportWasTriggeredDomainEvent::fromMessage($message); $this->catalogImport->importFile( $catalogImportWasTriggeredEvent->getCatalogImportFilePath(), $catalogImportWasTriggeredEvent->getDataVersion() ); } }
lizards-and-pumpkins/catalog
src/Import/CatalogImportWasTriggeredDomainEventHandler.php
PHP
bsd-3-clause
785
<?php use yii\helpers\Html; /* @var $this yii\web\View */ /* @var $model backend\models\FootballTeam */ $this->title = 'Create Football Team'; $this->params['breadcrumbs'][] = ['label' => 'Football Teams', 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="football-team-create"> <h1><?= Html::encode($this->title) ?></h1> <?= $this->render('_form', [ 'model' => $model, ]) ?> </div>
Unlife86/telecom-fc
backend/views/football-team/create.php
PHP
bsd-3-clause
443
#!/usr/bin/env php <?php /** * This file is the main script which should be run on the command line in order to perform database migrations. * If you want to use this script like so: ./migrate.php -- you will need to give it executable permissions (chmod +x migrate.php) and ensure the top line of this script points to the actual location of your PHP binary. * * @package mysql_php_migrations * @subpackage Globals * @license http://www.opensource.org/licenses/bsd-license.php The New BSD License * @link http://code.google.com/p/mysql-php-migrations/ */ // we want to see any errors ini_set('display_errors', 1); error_reporting(E_ALL); /** * Define the full path to this file. */ define('MPM_PATH', dirname(__FILE__)); /** * Version Number - for reference */ define('MPM_VERSION', '2.0.1'); /** * Include the init script. */ require_once(MPM_PATH . '/lib/init.php'); // get the proper controller, do the action, and exit the script $obj = MpmControllerFactory::getInstance($argv); $obj->doAction(); exit; ?>
Billiam/MySQL-PHP-AutoMigrations
migrate.php
PHP
bsd-3-clause
1,046
""" Model definition for weakly supervised object localization with pytorch ===================================================================== *Author*: Yu Zhang, Northwestern Polytechnical University """ import torch import torch.nn as nn import numpy as np import os # import shutil import torchvision.models as models from spn.modules import SoftProposal import spn_codes.spatialpooling as spatialpooling class WSL(nn.Module): def __init__(self, num_classes=20, num_maps=1024): super(WSL, self).__init__() model = models.vgg16(pretrained=True) num_features = model.features[28].out_channels self.features = nn.Sequential(*list(model.features.children())[:-1]) # self.spatial_pooling = pooling self.addconv = nn.Conv2d(num_features, num_maps, kernel_size=3, stride=1, padding=1, groups=2, bias=True) self.maps = nn.ReLU() self.sp = SoftProposal() self.sum = spatialpooling.SpatialSumOverMap() # classification layer self.classifier = nn.Sequential( nn.Dropout(0.5), nn.Linear(num_maps, num_classes) ) def forward(self, x): x = self.features(x) x = self.addconv(x) x = self.maps(x) sp = self.sp(x) x = self.sum(sp) x = x.view(x.size(0), -1) x = self.classifier(x) return x def get_att_map(self, x): x = self.features(x) x = self.addconv(x) x = self.maps(x) sp = self.sp(x) x = self.sum(sp) x = x.view(x.size(0), -1) x = self.classifier(x) return x, sp # def load_pretrained_vgg(self, fname): # vgg_param = np.load(fname, encoding='latin1').item() # vgg16 # net_param = self.state_dict() # para_keys = list(net_param.keys()) # for idx in range(26): # name = para_keys[idx] # val = net_param[name] # i, j = int(name[4]), int(name[6]) + 1 # ptype = 'weights' if name[-1] == 't' else 'biases' # key = 'conv{}_{}'.format(i, j) # param = torch.from_numpy(vgg_param[key][ptype]) # if ptype == 'weights': # param = param.permute(3, 2, 0, 1) # val.copy_(param) def load_checkpoint(self, fname): if os.path.isfile(fname): print('loading checkpoint {}'.format(fname)) checkpt = torch.load(fname) self.load_state_dict(checkpt['state_dict']) else: print('{} not found'.format(fname)) class ConvReLU(nn.Module): def __init__(self, in_ch, out_ch, kernel_sz, stride=1, relu=True, pd=True, bn=False): super(ConvReLU, self).__init__() padding = int((kernel_sz - 1) / 2) if pd else 0 # same spatial size by default self.conv = nn.Conv2d(in_ch, out_ch, kernel_sz, stride, padding=padding) self.bn = nn.BatchNorm2d(out_ch, eps=0.001, momentum=0, affine=True) if bn else None self.relu = nn.ReLU(inplace=True) if relu else None def forward(self, x): x = self.conv(x) if self.bn is not None: x = self.bn(x) if self.relu is not None: x = self.relu(x) return x class ClsConv(nn.Module): """docstring for ClsConv""" def __init__(self, in_ch=512, bn=False): super(ClsConv, self).__init__() self.conv_layers = nn.Sequential(ConvReLU(in_ch, 256, 3, pd=True, bn=bn), ConvReLU(256, 128, 3, pd=True, bn=bn), ConvReLU(128, 64, 3, pd=True, bn=bn), nn.Conv2d(64, 1, kernel_size=3, stride=1, padding=1)) def forward(self, feature): return self.conv_layers(feature) def save_checkpoint(state, filename='checkpoint.pth.tar'): torch.save(state, filename) def load_pretrained(model, fname, optimizer=None): """ resume training from previous checkpoint :param fname: filename(with path) of checkpoint file :return: model, optimizer, checkpoint epoch for train or only model for test """ if os.path.isfile(fname): print("=> loading checkpoint '{}'".format(fname)) checkpoint = torch.load(fname) model.load_state_dict(checkpoint['state_dict']) if optimizer is not None: optimizer.load_state_dict(checkpoint['optimizer']) return model, optimizer, checkpoint['epoch'] else: return model else: raise Exception("=> no checkpoint found at '{}'".format(fname))
zhangyuygss/WSL
model/model.py
Python
bsd-3-clause
4,628
from django.conf import settings from django.core.urlresolvers import get_mod_func REGISTRY = {} backends = getattr(settings, 'SMSGATEWAY_BACKENDS', ()) for entry in backends: module_name, class_name = get_mod_func(entry) backend_class = getattr(__import__(module_name, {}, {}, ['']), class_name) instance = backend_class() REGISTRY[instance.get_slug()] = instance def get_backend(slug): return REGISTRY.get(slug, None)
peterayeni/django-smsgateway
smsgateway/backends/__init__.py
Python
bsd-3-clause
444
<?php namespace app\models; use Yii; use yii\base\Model; use common\models\User; /** * @todo Rename notifications user setting. * * User notifications settings form model. * * @author Gani Georgiev <gani.georgiev@gmail.com> */ class UserNotificationsForm extends Model { /** * @var boolean */ public $notifications = true; /** * @var boolean */ public $mentions = true; /** * @var User */ private $user; /** * Model constructor. * @param User $user * @param array $config */ public function __construct(User $user, $config = []) { $this->loadUser($user); parent::__construct($config); } /** * @inheritdoc */ public function attributeLabels() { return [ 'notifications' => Yii::t('app', 'Receive an email when a new screen comment is added'), 'mentions' => Yii::t('app', 'Receive an email when someone mentions you'), ]; } /** * @inheritdoc */ public function rules() { return [ [['notifications', 'mentions'], 'boolean'], ]; } /** * Helper to load some of the form model parameters from a User model. * @param User $user */ public function loadUser(User $user) { $this->user = $user; $this->notifications = $this->user->getSetting(User::NOTIFICATIONS_SETTING_KEY, true); $this->mentions = $this->user->getSetting(User::MENTIONS_SETTING_KEY, true); } /** * Saves model settings to the user model. * @return boolean */ public function save() { if ($this->validate()) { $user = $this->user; $user->setSetting(User::NOTIFICATIONS_SETTING_KEY, $this->notifications ? true : false); $user->setSetting(User::MENTIONS_SETTING_KEY, $this->mentions ? true : false); return $user->save(); } return false; } }
ganigeorgiev/presentator
app/models/UserNotificationsForm.php
PHP
bsd-3-clause
2,021
<?php use yii\helpers\Html; /* @var $this yii\web\View */ /* @var $model backend\models\customer\ThemeRecord */ $this->title = 'Create Theme Record'; $this->params['breadcrumbs'][] = ['label' => 'Theme Records', 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="theme-record-create"> <h1><?= Html::encode($this->title) ?></h1> <?= $this->render('_form', [ 'model' => $model, ]) ?> </div>
croxsanchez/yii2build
backend/views/themes/create.php
PHP
bsd-3-clause
448
/*L * Copyright Northwestern University. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.io/psc/LICENSE.txt for details. */ package edu.northwestern.bioinformatics.studycalendar.web; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import java.util.List; import java.util.Map; /** * @author Rhett Sutphin */ public abstract class ControllerTestCase extends WebTestCase { protected static void assertNoBindingErrorsFor(String fieldName, Map<String, Object> model) { BindingResult result = (BindingResult) model.get(BindingResult.MODEL_KEY_PREFIX + "command"); List<FieldError> fieldErrors = result.getFieldErrors(fieldName); assertEquals("There were errors for field " + fieldName + ": " + fieldErrors, 0, fieldErrors.size()); } }
NCIP/psc
web/src/test/java/edu/northwestern/bioinformatics/studycalendar/web/ControllerTestCase.java
Java
bsd-3-clause
865
/** * BSD 3-Clause License * * Copyright (C) 2018 Steven Atkinson <steven@nowucca.com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package business.product; import java.util.Date; /** * */ public class Product { private long productId; private String name; private int price; private Date lastUpdate; public Product(long productId, String name, int price, Date lastUpdate) { this.productId = productId; this.name = name; this.price = price; this.lastUpdate = lastUpdate; } public long getProductId() { return productId; } public String getName() { return name; } public int getPrice() { return price; } public Date getLastUpdate() { return lastUpdate; } @Override public String toString() { return "business.product.Product[product_id=" + productId + "]"; } }
nowucca/SimpleAffableBean
src/main/java/business/product/Product.java
Java
bsd-3-clause
2,404
// **************************************************************************** // // Copyright (c) 2014, Seth Billings, Russell Taylor, Johns Hopkins University // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // **************************************************************************** #include "cisstTriangleClosestPointSolver.h" #include <fstream> vct3 cisstTriangleClosestPointSolver::ProjectOnSegment(const vct3& c,const vct3& p, const vct3& r) { vct3 pc=c-p; vct3 pr=r-p; double lam = (pc*pr)/(pr*pr); if (lam<=0.0) { return p;}; if (lam>1.0) {return r;}; return p+pr*lam; }; cisstTriangleClosestPointSolver::cisstTriangleClosestPointSolver() : A(3,2,VCT_COL_MAJOR), h(3), P(3), g(3), b(3), x(3), LeastSquaresSolver(3,2,1,VCT_COL_MAJOR), B(3,1,VCT_COL_MAJOR) {}; void cisstTriangleClosestPointSolver::SolveLamMuNu(const vct3& a, const vct3& p, const vct3& q, const vct3& r, double &lambda, double &mu, double &nu) { vct3 pa=a-p; // b(0)=pa.x; b(1)=pa.y; b(2)=pa.z; vct3 pq=q-p; // A(0,0)=pq.x; A(1,0)=pq.y; A(2,0)=pq.z; vct3 pr=r-p; // A(0,1)=pr.x; A(1,1)=pr.y; A(2,1)=pr.z; for (int i=0;i<3;i++) { b(i)=pa(i); A(i,0)=pq(i); A(i,1)=pr(i); B(i,0)=b(i); // because using dumb interface to solver }; LeastSquaresSolver.Solve(A,B); // replace with HFTI call when can do so lambda=B(0,0); mu=B(1,0); // ditto // nmrAlgorithmHFTI(A,h,P,Tau,g,b,x,0); // x := lambda and mu // lambda = x(0); // mu = x(1); nu = 1.-lambda-mu; }; // TODO: can this algorithm fail for obtuse triangle if both mu & lambda < 0? // // rht this function was inlined int cisstTriangleClosestPointSolver::FindClosestPointOnTriangle( const vct3& a, const vct3& p, const vct3& q, const vct3& r, double distBound, // -1 if ignore vct3& ret) { vct3 pa=a-p; // b(0)=pa.x; b(1)=pa.y; b(2)=pa.z; vct3 pq=q-p; // A(0,0)=pq.x; A(1,0)=pq.y; A(2,0)=pq.z; vct3 pr=r-p; // A(0,1)=pr.x; A(1,1)=pr.y; A(2,1)=pr.z; for (int i=0;i<3;i++) { b(i)=pa(i); A(i,0)=pq(i); A(i,1)=pr(i); B(i,0)=b(i); // because using dumb interface to solver }; LeastSquaresSolver.Solve(A,B); // replace with HFTI call when can do so double lambda=B(0,0); double mu=B(1,0); // ditto // nmrAlgorithmHFTI(A,h,P,Tau,g,b,x,0); // x := lambda and mu double lambda = x(0); // double lambda=x(0); // double mu = x(1); vct3 c = p+pq*lambda+pr*mu; if (distBound >= 0.0) { double dist = (c-a).Norm(); if (dist>distBound) { ret=c; return 0; }; }; if (lambda<=0.0) { ret = ProjectOnSegment(c,r,p); return 1; }; if (mu <=0.0) { ret = ProjectOnSegment(c,p,q); return 2; }; if (lambda+mu> 1.0 ) { ret = ProjectOnSegment(c,q,r); return 3; }; ret= c; return 4; };
sbillin/IMLP
cisstICP/cisstTriangleClosestPointSolver.cpp
C++
bsd-3-clause
4,449
/* * SOLID - Software Library for Interference Detection * * Copyright (C) 2001-2003 Dtecta. All rights reserved. * * This library may be distributed under the terms of the Q Public License * (QPL) as defined by Trolltech AS of Norway and appearing in the file * LICENSE.QPL included in the packaging of this file. * * This library may be distributed and/or modified under the terms of the * GNU General Public License (GPL) version 2 as published by the Free Software * Foundation and appearing in the file LICENSE.GPL included in the * packaging of this file. * * This library is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. * * Commercial use or any other use of this library not covered by either * the QPL or the GPL requires an additional license from Dtecta. * Please contact info@dtecta.com for enquiries about the terms of commercial * use of this library. */ #include "DT_Convex.h" #include "GEN_MinMax.h" //#define DEBUG #define SAFE_EXIT #include "DT_GJK.h" #include "DT_PenDepth.h" #include <algorithm> #include <new> #include "MT_BBox.h" #include "DT_Sphere.h" #include "DT_Minkowski.h" #include "DT_Accuracy.h" #ifdef STATISTICS int num_iterations = 0; int num_irregularities = 0; #endif MT_BBox DT_Convex::bbox() const { MT_Point3 min(-supportH(MT_Vector3(-1.0f, 0.0f, 0.0f)), -supportH(MT_Vector3(0.0f, -1.0f, 0.0f)), -supportH(MT_Vector3(0.0f, 0.0f, -1.0f))); MT_Point3 max( supportH(MT_Vector3(1.0f, 0.0f, 0.0f)), supportH(MT_Vector3(0.0f, 1.0f, 0.0f)), supportH(MT_Vector3(0.0f, 0.0f, 1.0f))); return MT_BBox(min, max); } MT_BBox DT_Convex::bbox(const MT_Matrix3x3& basis) const { MT_Point3 min(-supportH(-basis[0]), -supportH(-basis[1]), -supportH(-basis[2])); MT_Point3 max( supportH( basis[0]), supportH( basis[1]), supportH( basis[2])); return MT_BBox(min, max); } MT_BBox DT_Convex::bbox(const MT_Transform& t, MT_Scalar margin) const { MT_Point3 min(t.getOrigin()[0] - supportH(-t.getBasis()[0]) - margin, t.getOrigin()[1] - supportH(-t.getBasis()[1]) - margin, t.getOrigin()[2] - supportH(-t.getBasis()[2]) - margin); MT_Point3 max(t.getOrigin()[0] + supportH( t.getBasis()[0]) + margin, t.getOrigin()[1] + supportH( t.getBasis()[1]) + margin, t.getOrigin()[2] + supportH( t.getBasis()[2]) + margin); return MT_BBox(min, max); } bool DT_Convex::ray_cast(const MT_Point3& source, const MT_Point3& target, MT_Scalar& lambda, MT_Vector3& normal) const { // Still working on this one... return false; } bool intersect(const DT_Convex& a, const DT_Convex& b, MT_Vector3& v) { DT_GJK gjk; #ifdef STATISTICS num_iterations = 0; #endif MT_Scalar dist2 = MT_INFINITY; do { MT_Point3 p = a.support(-v); MT_Point3 q = b.support(v); MT_Vector3 w = p - q; if (v.dot(w) > MT_Scalar(0.0)) { return false; } gjk.addVertex(w); if (gjk.isAffinelyDependent()) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef STATISTICS ++num_iterations; #endif if (!gjk.closest(v)) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef SAFE_EXIT MT_Scalar prev_dist2 = dist2; #endif dist2 = v.length2(); #ifdef SAFE_EXIT if (prev_dist2 - dist2 <= MT_EPSILON * prev_dist2) { return false; } #endif } while (!gjk.fullSimplex() && dist2 > DT_Accuracy::tol_error * gjk.maxVertex()); v.setValue(MT_Scalar(0.0), MT_Scalar(0.0), MT_Scalar(0.0)); return true; } bool common_point(const DT_Convex& a, const DT_Convex& b, MT_Vector3& v, MT_Point3& pa, MT_Point3& pb) { DT_GJK gjk; #ifdef STATISTICS num_iterations = 0; #endif MT_Scalar dist2 = MT_INFINITY; do { MT_Point3 p = a.support(-v); MT_Point3 q = b.support(v); MT_Vector3 w = p - q; if (v.dot(w) > MT_Scalar(0.0)) { return false; } gjk.addVertex(w, p, q); if (gjk.isAffinelyDependent()) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef STATISTICS ++num_iterations; #endif if (!gjk.closest(v)) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef SAFE_EXIT MT_Scalar prev_dist2 = dist2; #endif dist2 = v.length2(); #ifdef SAFE_EXIT if (prev_dist2 - dist2 <= MT_EPSILON * prev_dist2) { return false; } #endif } while (!gjk.fullSimplex() && dist2 > DT_Accuracy::tol_error * gjk.maxVertex()); gjk.compute_points(pa, pb); v.setValue(MT_Scalar(0.0), MT_Scalar(0.0), MT_Scalar(0.0)); return true; } bool penetration_depth(const DT_Convex& a, const DT_Convex& b, MT_Vector3& v, MT_Point3& pa, MT_Point3& pb) { DT_GJK gjk; #ifdef STATISTICS num_iterations = 0; #endif MT_Scalar dist2 = MT_INFINITY; do { MT_Point3 p = a.support(-v); MT_Point3 q = b.support(v); MT_Vector3 w = p - q; if (v.dot(w) > MT_Scalar(0.0)) { return false; } gjk.addVertex(w, p, q); if (gjk.isAffinelyDependent()) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef STATISTICS ++num_iterations; #endif if (!gjk.closest(v)) { #ifdef STATISTICS ++num_irregularities; #endif return false; } #ifdef SAFE_EXIT MT_Scalar prev_dist2 = dist2; #endif dist2 = v.length2(); #ifdef SAFE_EXIT if (prev_dist2 - dist2 <= MT_EPSILON * prev_dist2) { return false; } #endif } while (!gjk.fullSimplex() && dist2 > DT_Accuracy::tol_error * gjk.maxVertex()); return penDepth(gjk, a, b, v, pa, pb); } bool hybrid_penetration_depth(const DT_Convex& a, MT_Scalar a_margin, const DT_Convex& b, MT_Scalar b_margin, MT_Vector3& v, MT_Point3& pa, MT_Point3& pb) { MT_Scalar margin = a_margin + b_margin; if (margin > MT_Scalar(0.0)) { MT_Scalar margin2 = margin * margin; DT_GJK gjk; #ifdef STATISTICS num_iterations = 0; #endif MT_Scalar dist2 = MT_INFINITY; do { MT_Point3 p = a.support(-v); MT_Point3 q = b.support(v); MT_Vector3 w = p - q; MT_Scalar delta = v.dot(w); if (delta > MT_Scalar(0.0) && delta * delta > dist2 * margin2) { return false; } if (gjk.inSimplex(w) || dist2 - delta <= dist2 * DT_Accuracy::rel_error2) { gjk.compute_points(pa, pb); MT_Scalar s = MT_sqrt(dist2); assert(s > MT_Scalar(0.0)); pa -= v * (a_margin / s); pb += v * (b_margin / s); return true; } gjk.addVertex(w, p, q); if (gjk.isAffinelyDependent()) { #ifdef STATISTICS ++num_irregularities; #endif gjk.compute_points(pa, pb); MT_Scalar s = MT_sqrt(dist2); assert(s > MT_Scalar(0.0)); pa -= v * (a_margin / s); pb += v * (b_margin / s); return true; } #ifdef STATISTICS ++num_iterations; #endif if (!gjk.closest(v)) { #ifdef STATISTICS ++num_irregularities; #endif gjk.compute_points(pa, pb); MT_Scalar s = MT_sqrt(dist2); assert(s > MT_Scalar(0.0)); pa -= v * (a_margin / s); pb += v * (b_margin / s); return true; } #ifdef SAFE_EXIT MT_Scalar prev_dist2 = dist2; #endif dist2 = v.length2(); #ifdef SAFE_EXIT if (prev_dist2 - dist2 <= MT_EPSILON * prev_dist2) { gjk.backup_closest(v); dist2 = v.length2(); gjk.compute_points(pa, pb); MT_Scalar s = MT_sqrt(dist2); assert(s > MT_Scalar(0.0)); pa -= v * (a_margin / s); pb += v * (b_margin / s); return true; } #endif } while (!gjk.fullSimplex() && dist2 > DT_Accuracy::tol_error * gjk.maxVertex()); } // Second GJK phase. compute points on the boundary of the offset object return penetration_depth((a_margin > MT_Scalar(0.0) ? static_cast<const DT_Convex&>(DT_Minkowski(a, DT_Sphere(a_margin))) : static_cast<const DT_Convex&>(a)), (b_margin > MT_Scalar(0.0) ? static_cast<const DT_Convex&>(DT_Minkowski(b, DT_Sphere(b_margin))) : static_cast<const DT_Convex&>(b)), v, pa, pb); } MT_Scalar closest_points(const DT_Convex& a, const DT_Convex& b, MT_Scalar max_dist2, MT_Point3& pa, MT_Point3& pb) { MT_Vector3 v(MT_Scalar(0.0), MT_Scalar(0.), MT_Scalar(-0.1)); DT_GJK gjk; #ifdef STATISTICS num_iterations = 0; #endif MT_Scalar dist2 = MT_INFINITY; do { MT_Point3 p = a.support(-v); MT_Point3 q = b.support(v); MT_Vector3 w = p - q; MT_Scalar delta = v.dot(w); if (delta > MT_Scalar(0.0) && delta * delta > dist2 * max_dist2) { return MT_INFINITY; } if (gjk.inSimplex(w) || dist2 - delta <= dist2 * DT_Accuracy::rel_error2) { break; } gjk.addVertex(w, p, q); if (gjk.isAffinelyDependent()) { #ifdef STATISTICS ++num_irregularities; #endif break; } #ifdef STATISTICS ++num_iterations; if (num_iterations > 1000) { std::cout << "v: " << v << " w: " << w << std::endl; } #endif if (!gjk.closest(v)) { #ifdef STATISTICS ++num_irregularities; #endif break; } #ifdef SAFE_EXIT MT_Scalar prev_dist2 = dist2; #endif dist2 = v.length2(); #ifdef SAFE_EXIT if (prev_dist2 - dist2 <= MT_EPSILON * prev_dist2) { gjk.backup_closest(v); dist2 = v.length2(); break; } #endif } while (!gjk.fullSimplex() && dist2 > DT_Accuracy::tol_error * gjk.maxVertex()); assert(!gjk.emptySimplex()); if (dist2 <= max_dist2) { gjk.compute_points(pa, pb); } return dist2; }
krishauser/KrisLibrary
geometry/solid3/src/convex/DT_Convex.cpp
C++
bsd-3-clause
10,132
/* Gearman server and library * Copyright (C) 2008 Brian Aker, Eric Day * All rights reserved. * * Use and distribution licensed under the BSD license. See * the COPYING file in the parent directory for full text. */ #include <config.h> #include <libtest/test.hpp> using namespace libtest; #include <assert.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <libgearman/gearman.h> #include <tests/basic.h> #include <tests/context.h> #ifndef __INTEL_COMPILER #pragma GCC diagnostic ignored "-Wold-style-cast" #endif static test_return_t gearmand_basic_option_test(void *) { const char *args[]= { "--check-args", "--queue-type=libtokyocabinet", "--libtokyocabinet-file=var/tmp/gearman_basic.tcb", "--libtokyocabinet-optimize", 0 }; unlink("var/tmp/gearman.tcb"); test_compare(EXIT_SUCCESS, exec_cmdline(gearmand_binary(), args, true)); return TEST_SUCCESS; } static test_return_t collection_init(void *object) { const char *argv[]= { "--libtokyocabinet-file=var/tmp/gearman.tcb", "--queue-type=libtokyocabinet", 0 }; unlink("var/tmp/gearman.tcb"); Context *test= (Context *)object; assert(test); test_truth(test->initialize(2, argv)); return TEST_SUCCESS; } static test_return_t collection_cleanup(void *object) { Context *test= (Context *)object; test->reset(); unlink("var/tmp/gearman.tcb"); return TEST_SUCCESS; } static void *world_create(server_startup_st& servers, test_return_t& error) { if (HAVE_LIBTOKYOCABINET == 0) { error= TEST_SKIPPED; return NULL; } unlink("var/tmp/gearman.tcb"); return new Context(libtest::default_port(), servers); } static bool world_destroy(void *object) { Context *test= (Context *)object; unlink("var/tmp/gearman.tcb"); delete test; return TEST_SUCCESS; } test_st gearmand_basic_option_tests[] ={ {"--libtokyocabinet-file=var/tmp/gearman_basic.tcb --libtokyocabinet-optimize", 0, gearmand_basic_option_test }, {0, 0, 0} }; test_st tests[] ={ {"gearman_client_echo()", 0, client_echo_test }, {"gearman_client_echo() fail", 0, client_echo_fail_test }, {"gearman_worker_echo()", 0, worker_echo_test }, {"clean", 0, queue_clean }, {"add", 0, queue_add }, {"worker", 0, queue_worker }, {0, 0, 0} }; collection_st collection[] ={ {"gearmand options", 0, 0, gearmand_basic_option_tests}, {"tokyocabinet queue", collection_init, collection_cleanup, tests}, {0, 0, 0, 0} }; void get_world(libtest::Framework *world) { world->collections(collection); world->create(world_create); world->destroy(world_destroy); }
kitech/gearmand
tests/tokyocabinet_test.cc
C++
bsd-3-clause
2,626
<?php namespace Vh\Controller; use Zend\Mvc\Controller\AbstractActionController; use Zend\View\Model\ViewModel; class VhController extends AbstractActionController { protected $vhTable; public function indexAction() { return new ViewModel(array( 'albums' => $this->getVhTable()->fetchAll(), )); } public function getVhTable() { if (!$this->vhTable) { $sm = $this->getServiceLocator(); $this->vhTable = $sm->get('Vh\Model\VhTable'); } return $this->vhTable; } }
seventy6ix/ZF2
module/Vh/src/Vh/Controller/VhController.php
PHP
bsd-3-clause
568
<?php namespace nglasl\mediawesome; use SilverStripe\Assets\File; use SilverStripe\Assets\Image; use SilverStripe\Control\HTTPResponse_Exception; use SilverStripe\Core\Injector\Injector; use SilverStripe\Forms\DateField; use SilverStripe\Forms\FileHandleField; use SilverStripe\Forms\GridField\GridField; use SilverStripe\Forms\GridField\GridFieldAddNewButton; use SilverStripe\Forms\GridField\GridFieldConfig_RecordEditor; use SilverStripe\Forms\ListboxField; use SilverStripe\Forms\LiteralField; use SilverStripe\Forms\ReadonlyField; use SilverStripe\Forms\TextareaField; use SilverStripe\Forms\TextField; use SilverStripe\ORM\DB; use SilverStripe\ORM\Queries\SQLDelete; use SilverStripe\ORM\Queries\SQLSelect; use SilverStripe\ORM\Queries\SQLUpdate; use SilverStripe\Versioned\Versioned; use SilverStripe\View\Requirements; /** * Displays customised media content relating to the respective media type. * @author Nathan Glasl <nathan@symbiote.com.au> */ class MediaPage extends \Page { private static $table_name = 'MediaPage'; private static $db = array( 'ExternalLink' => 'Varchar(255)', 'Abstract' => 'Text', 'Date' => 'Date' ); private static $has_one = array( 'MediaType' => MediaType::class ); private static $many_many = array( 'MediaAttributes' => array( 'through' => MediaPageAttribute::class, // This is essentially the versioned join. 'from' => 'MediaPage', 'to' => 'MediaAttribute' ), 'Images' => Image::class, 'Attachments' => File::class, 'Categories' => MediaTag::class, 'Tags' => MediaTag::class ); private static $owns = array( 'MediaPageAttributes', 'Images', 'Attachments' ); private static $defaults = array( 'ShowInMenus' => 0 ); private static $searchable_fields = array( 'Title', 'ExternalLink', 'Abstract', 'Tagging' ); private static $can_be_root = false; private static $allowed_children = 'none'; private static $default_parent = MediaHolder::class; private static $description = 'Blog, Event, News, Publication <strong>or Custom Media</strong>'; private static $icon = 'nglasl/silverstripe-mediawesome: client/images/page.png'; /** * The default media types and their respective attributes. */ private static $type_defaults = array(); public function requireDefaultRecords() { parent::requireDefaultRecords(); // Determine whether this requires an SS3 to SS4 migration. if(MediaAttribute::get()->filter('MediaTypeID', 0)->exists()) { // The problem is that class name mapping happens after this, but we need it right now to query pages. foreach(array( 'SiteTree', 'SiteTree_Live', 'SiteTree_Versions' ) as $table) { $update = new SQLUpdate( $table, array( 'ClassName' => MediaPage::class ), array( 'ClassName' => 'MediaPage' ) ); $update->execute(); } // Retrieve the existing media attributes. $attributes = new SQLSelect( '*', 'MediaAttribute', 'LinkID <> 0 AND MediaPageID <> 0', 'LinkID ASC' ); $attributes = $attributes->execute(); if(count($attributes)) { // With the results from above, delete these to prevent data integrity issues. $delete = new SQLDelete( 'MediaAttribute', 'LinkID <> 0 AND MediaPageID <> 0' ); $delete->execute(); // Migrate the existing media attributes. foreach($attributes as $existing) { $page = MediaPage::get()->byID($existing['MediaPageID']); if(!$page) { // This page may no longer exist. continue; } if($existing['LinkID'] == -1) { // Instantiate a new attribute for each "master" attribute. $attribute = MediaAttribute::create(); $attribute->ID = $existing['ID']; $attribute->Created = $existing['Created']; $attribute->Title = $existing['Title']; $attribute->OriginalTitle = $existing['OriginalTitle']; $attribute->MediaTypeID = $page->MediaTypeID; $attribute->write(); } else { $attribute = MediaAttribute::get()->byID($existing['LinkID']); } // Each page will have different content for a media attribute. $content = isset($existing['Content']) ? $existing['Content'] : null; $page->MediaAttributes()->add($attribute, array( 'Content' => $content )); // The attributes are versioned, but should only be published when it's considered safe to do so. if($page->isPublished() && !$page->isModifiedOnDraft()) { $page->publishRecursive(); } } } } // Retrieve existing "start time" attributes. $attributes = MediaAttribute::get()->filter(array( 'MediaType.Title' => 'Event', 'OriginalTitle' => 'Start Time' )); foreach($attributes as $attribute) { // These should now be "time" attributes. $attribute->Title = 'Time'; $attribute->OriginalTitle = 'Time'; $attribute->write(); } // Instantiate the default media types and their respective attributes. foreach($this->config()->type_defaults as $name => $attributes) { // Confirm that the media type doesn't already exist before creating it. $type = MediaType::get()->filter(array( 'Title' => $name ))->first(); if(!$type) { $type = MediaType::create(); $type->Title = $name; $type->write(); DB::alteration_message("\"{$name}\" Media Type", 'created'); } if(is_array($attributes)) { foreach($attributes as $attribute) { // Confirm that the media attributes don't already exist before creating them. if(!MediaAttribute::get()->filter(array( 'MediaTypeID' => $type->ID, 'OriginalTitle' => $attribute ))->first()) { $new = MediaAttribute::create(); $new->Title = $attribute; $new->MediaTypeID = $type->ID; $new->write(); DB::alteration_message("\"{$name}\" > \"{$attribute}\" Media Attribute", 'created'); } } } } } public function getCMSFields() { $fields = parent::getCMSFields(); // Display the media type as read only. $fields->addFieldToTab('Root.Main', ReadonlyField::create( 'Type', 'Type', $this->MediaType()->Title ), 'Title'); // Display a notification that the parent holder contains mixed children. $parent = $this->getParent(); if($parent && $parent->getMediaHolderChildren()->exists()) { Requirements::css('nglasl/silverstripe-mediawesome: client/css/mediawesome.css'); $fields->addFieldToTab('Root.Main', LiteralField::create( 'MediaNotification', "<p class='mediawesome notification'><strong>Mixed {$this->MediaType()->Title} Holder</strong></p>" ), 'Type'); } // Display the remaining media page fields. $fields->addFieldToTab('Root.Main', TextField::create( 'ExternalLink' )->setDescription('An <strong>optional</strong> redirect URL to the media source'), 'URLSegment'); $fields->addFieldToTab('Root.Main', DateField::create( 'Date' ), 'Content'); // Allow customisation of categories and tags respective to the current page. $tags = MediaTag::get()->map()->toArray(); $fields->findOrMakeTab('Root.CategoriesTags', 'Categories and Tags'); $fields->addFieldToTab('Root.CategoriesTags', $categoriesList = ListboxField::create( 'Categories', 'Categories', $tags )); $fields->addFieldToTab('Root.CategoriesTags', $tagsList = ListboxField::create( 'Tags', 'Tags', $tags )); if(!$tags) { $categoriesList->setAttribute('disabled', 'true'); $tagsList->setAttribute('disabled', 'true'); } // Display an abstract field for content summarisation. $fields->addfieldToTab('Root.Main', $abstract = TextareaField::create( 'Abstract' ), 'Content'); $abstract->setDescription('A concise summary of the content'); // Allow customisation of the media type attributes. $fields->addFieldToTab('Root.Main', GridField::create( 'MediaPageAttributes', "{$this->MediaType()->Title} Attributes", $this->MediaPageAttributes(), GridFieldConfig_RecordEditor::create()->removeComponentsByType(GridFieldAddNewButton::class) )->addExtraClass('pb-2'), 'Content'); // Allow customisation of images and attachments. $type = strtolower($this->MediaType()->Title); $fields->findOrMakeTab('Root.ImagesAttachments', 'Images and Attachments'); $fields->addFieldToTab('Root.ImagesAttachments', $images = Injector::inst()->create( FileHandleField::class, 'Images' )); $images->setAllowedFileCategories('image/supported'); $images->setFolderName("media-{$type}/{$this->ID}/images"); $fields->addFieldToTab('Root.ImagesAttachments', $attachments = Injector::inst()->create( FileHandleField::class, 'Attachments' )); $attachments->setFolderName("media-{$type}/{$this->ID}/attachments"); // Allow extension customisation. $this->extend('updateMediaPageCMSFields', $fields); return $fields; } /** * Confirm that the current page is valid. */ public function validate() { $parent = $this->getParent(); // The URL segment will conflict with a year/month/day/media format when numeric. if(is_numeric($this->URLSegment) || !($parent instanceof MediaHolder) || ($this->MediaTypeID && ($parent->MediaTypeID != $this->MediaTypeID))) { // Customise a validation error message. if(is_numeric($this->URLSegment)) { $message = '"URL Segment" must not be numeric!'; } else if(!($parent instanceof MediaHolder)) { $message = 'The parent needs to be a published media holder!'; } else { $message = "The media holder type doesn't match this!"; } $error = new HTTPResponse_Exception($message, 403); $error->getResponse()->addHeader('X-Status', rawurlencode($message)); // Allow extension customisation. $this->extend('validateMediaPage', $error); throw $error; } return parent::validate(); } public function onBeforeWrite() { parent::onBeforeWrite(); // Set the default media page date. if(!$this->Date) { $this->Date = date('Y-m-d'); } // Confirm that the external link exists. if($this->ExternalLink) { if(stripos($this->ExternalLink, 'http') === false) { $this->ExternalLink = 'http://' . $this->ExternalLink; } $file_headers = @get_headers($this->ExternalLink); if(!$file_headers || strripos($file_headers[0], '404 Not Found')) { $this->ExternalLink = null; } } // Apply the parent holder media type. $parent = $this->getParent(); if($parent) { $type = $parent->MediaType(); if($type->exists()) { $this->MediaTypeID = $type->ID; } } } public function onAfterWrite() { parent::onAfterWrite(); // This triggers for both a save and publish, causing duplicate attributes to appear. if(Versioned::get_stage() === 'Stage') { // The attributes of the respective type need to appear on this page. foreach($this->MediaType()->MediaAttributes() as $attribute) { $this->MediaAttributes()->add($attribute); } } } /** * Determine the URL by using the media holder's defined URL format. */ public function Link($action = null) { $parent = $this->getParent(); if(!$parent) { return null; } $date = ($parent->URLFormatting !== '-') ? $this->dbObject('Date')->Format($parent->URLFormatting ?: 'y/MM/dd/') : ''; $link = $parent->Link() . "{$date}{$this->URLSegment}/"; if($action) { $link .= "{$action}/"; } return $link; } /** * Determine the absolute URL by using the media holder's defined URL format. */ public function AbsoluteLink($action = null) { $parent = $this->getParent(); if(!$parent) { return null; } $date = ($parent->URLFormatting !== '-') ? $this->dbObject('Date')->Format($parent->URLFormatting ?: 'y/MM/dd/') : ''; $link = $parent->AbsoluteLink() . "{$date}{$this->URLSegment}/"; if($action) { $link .= "{$action}/"; } return $link; } /** * Retrieve the versioned attribute join records, since these are what we're editing. * * @return media page attribute */ public function MediaPageAttributes() { return MediaPageAttribute::get()->filter('MediaPageID', $this->ID); } /** * Retrieve a specific attribute for use in templates. * * @parameter <{ATTRIBUTE}> string * @return media attribute */ public function getAttribute($title) { return $this->MediaAttributes()->filter('OriginalTitle', $title)->first(); } /** * Retrieve a specific attribute for use in templates. * * @parameter <{ATTRIBUTE}> string * @return media attribute */ public function Attribute($title) { // This provides consistency when it comes to defining parameters from the template. return $this->getAttribute($title); } }
nyeholt/silverstripe-mediawesome
src/pages/MediaPage.php
PHP
bsd-3-clause
12,614
import os, sys; sys.path.insert(0, os.path.join("..", "..")) from pattern.web import Google, plaintext from pattern.web import SEARCH # The web module has a SearchEngine class with a search() method # that yields a list of Result objects. # Each Result has url, title, description, language, author and date and properties. # Subclasses of SearchEngine include: # Google, Yahoo, Bing, Twitter, Facebook, Wikipedia, Flickr. # This example retrieves results from Google based on a given query. # The Google search engine can handle SEARCH type searches. # Google's "Custom Search API" is a paid service. # The web module uses a test account with a 100 free queries per day, shared with all users. # If the limit is exceeded, SearchEngineLimitError is raised. # You can obtain your own license key at: https://code.google.com/apis/console/ # Activate "Custom Search API" under "Services" and get the key under "API Access". # Then use Google(license=[YOUR_KEY]).search(). # This will give you 100 personal free queries, or 5$ per 1000 queries. engine = Google(license=None, language="en") # Veale & Hao's method for finding simile using Google's wildcard (*) support. # http://afflatus.ucd.ie/Papers/LearningFigurative_CogSci07.pdf) # This will match results such as "as light as a feather", "as cute as a cupcake", etc. q = "as * as a *" # Google is very fast but you can only get up to 100 (10x10) results per query. for i in range(1,2): for result in engine.search(q, start=i, count=10, type=SEARCH): print plaintext(result.description) # plaintext() removes HTML formatting. print result.url print result.date print
piskvorky/pattern
examples/01-web/01-google.py
Python
bsd-3-clause
1,660
<?php return [ 'administrator' => 'Admin', 'agent' => 'Przedstawiciel', 'archive' => 'Archiwum', 'book_keeper' => 'Księgowy', 'book_keeper_delayed' => 'Księgowy - przeterminowane', 'calculation.to-create' => 'Rozliczenia - dodawanie', 'calculation.update' => 'Rozliczenia - edycja', 'calculation.problems' => 'Rozliczenia nieściągalne', 'calculation.pays' => 'Rozliczenia raty', 'client' => 'Klient', 'co-agent' => 'Przedstawiciel (wspólnik)', 'cost' => 'Koszty', 'cost.debt' => 'Koszty - Wierzytelność', 'czater' => 'Czater', 'customer_service' => 'Obsługa klienta', 'export' => 'Eksport', 'issue' => 'Sprawy', 'issue.delete' => 'Sprawy - usuwanie', 'issue.link-user' => 'Sprawy - przypisywanie użytkowników', 'issue.stage.change' => 'Sprawy - zmiana etapów', 'handicapped' => 'Niepełnosprawny', 'hint' => 'Wskazówki', 'lawyer' => 'Prawnik', 'lawyer_assistant' => 'Asystent Prawnika', 'lead' => 'Leads', 'lead.dialer' => 'Leads - Dialer', 'lead.duplicate' => 'Leads - Duplikaty', 'lead.import' => 'Leads - Import', 'lead.status' => 'Leads - Statusy', 'logs' => 'Logi', 'manager' => 'Manager (Panel Admina)', 'message.template' => 'Szablony Wiadomości', 'minor' => 'Nieletni', 'news' => 'Newsy', 'note' => 'Notatki', 'note.template' => 'Notatki - szablony', 'note.delete' => 'Notatki - usuwanie', 'pay' => 'Wpłaty', 'pay.all-paid' => 'Wszystkie opłacone (sprawy)', 'pay.part-payed' => 'Częściowo opłacone (sprawy)', 'pays.delayed' => 'Wpłaty - przeterminowane', 'pay.update' => 'Wpłaty - edycja', 'pay.paid' => 'Wpłaty - opłacanie', 'pay.received' => 'Wpłaty - odbieranie', 'provision' => 'Prowizje', 'provision.children.visible' => 'Prowizje - raporty podwładnych', 'recommending' => 'Polecający', 'shareholder' => 'Wspólnik', 'summon' => 'Zlecenia', 'summon.create' => 'Zlecenia - Dodawanie', 'summon.manager' => 'Zlecenia - manager', 'telemarketer' => 'Konsultant', 'user' => 'Użytkownik', 'workers' => 'Lista pracowników', 'victim' => 'Poszkodowany', ];
edzima/VestraTele
common/messages/pl/rbac.php
PHP
bsd-3-clause
2,044
<?php /** * Created by PhpStorm. * User: Administrator * Date: 2016/11/24 * Time: 15:56 */ namespace app\controllers; use yii; use app\models\Account; use app\models\Emailtype; class AccountController extends BaseController { /** * 主页面展示 */ public function actionIndex() { return $this->renderPartial("index"); } /** * 添加 * @return mixed */ public function actionAdd() { $model=new Account(); if(Yii::$app->request->isPost){ if($model->load(Yii::$app->request->post()) && $model->save()){ exit(json_encode(["status",10])); } $model->addError("status",20); exit(json_encode($model->getErrors())); } return $this->renderPartial("add",["model"=>$model,"types"=>(new Emailtype())->get_all()]); } /** * 获取列表数据 */ public function actionList() { $model = new Account(); exit(json_encode($model->get_list($this->get_page_info()))); } /** * 编辑页面 * @return string */ public function actionEdit() { $id=Yii::$app->request->get("id"); $model=new Account(); $data=$model->find()->where(["id"=>$id])->asArray()->one(); $model->email_type=$data["email_type"]; return $this->renderPartial("edit",["data"=>$data,"model"=>$model,"types"=>(new Emailtype())->get_all()]); } /** * 修改数据 */ public function actionEditdata() { if(Yii::$app->request->isPost){ $id=Yii::$app->request->post("id"); $model=Account::findOne($id); if($model->load(Yii::$app->request->post()) && $model->save()){ exit(json_encode(["status"=>10])); } $model->addError("status",20); exit(json_encode($model->getErrors())); } } /** * 根据id删除 * @param $id */ public function actionDel($id) { $model=new Account(); if($model->del_all([$id])){ exit(json_encode(["status"=>10])); } exit(json_encode(["status"=>20])); } }
timelesszhuang/edm
controllers/AccountController.php
PHP
bsd-3-clause
2,203
""" Copyright (c) 2019 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import unicode_literals, absolute_import from atomic_reactor.constants import PLUGIN_PUSH_FLOATING_TAGS_KEY, PLUGIN_GROUP_MANIFESTS_KEY from atomic_reactor.utils.manifest import ManifestUtil from atomic_reactor.plugin import ExitPlugin from atomic_reactor.util import get_floating_images, get_unique_images class PushFloatingTagsPlugin(ExitPlugin): """ Push floating tags to registry """ key = PLUGIN_PUSH_FLOATING_TAGS_KEY is_allowed_to_fail = False def __init__(self, tasker, workflow): """ constructor :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance """ super(PushFloatingTagsPlugin, self).__init__(tasker, workflow) self.manifest_util = ManifestUtil(workflow, None, self.log) def add_floating_tags(self, session, manifest_list_data, floating_images): list_type = manifest_list_data.get("media_type") manifest = manifest_list_data.get("manifest") manifest_digest = manifest_list_data.get("manifest_digest") for image in floating_images: target_repo = image.to_str(registry=False, tag=False) # We have to call store_manifest_in_repository directly for each # referenced manifest, since each one should be a new tag that requires uploading # the manifest again self.log.debug("storing %s as %s", target_repo, image.tag) self.manifest_util.store_manifest_in_repository(session, manifest, list_type, target_repo, target_repo, ref=image.tag) # And store the manifest list in the push_conf push_conf_registry = self.workflow.push_conf.add_docker_registry(session.registry, insecure=session.insecure) for image in floating_images: push_conf_registry.digests[image.tag] = manifest_digest registry_image = get_unique_images(self.workflow)[0] return registry_image.get_repo(explicit_namespace=False), manifest_digest def run(self): """ Run the plugin. """ if self.workflow.build_process_failed: self.log.info('Build failed, skipping %s', PLUGIN_PUSH_FLOATING_TAGS_KEY) return floating_tags = get_floating_images(self.workflow) if not floating_tags: self.log.info('No floating images to tag, skipping %s', PLUGIN_PUSH_FLOATING_TAGS_KEY) return # can't run in the worker build if not self.workflow.is_orchestrator_build(): self.log.warning('%s cannot be used by a worker builder', PLUGIN_PUSH_FLOATING_TAGS_KEY) return manifest_data = self.workflow.postbuild_results.get(PLUGIN_GROUP_MANIFESTS_KEY) if not manifest_data or not manifest_data.get("manifest_digest"): self.log.info('No manifest digest available, skipping %s', PLUGIN_PUSH_FLOATING_TAGS_KEY) return digests = dict() for registry in self.manifest_util.registries: session = self.manifest_util.get_registry_session(registry) repo, digest = self.add_floating_tags(session, manifest_data, floating_tags) digests[repo] = digest return digests
projectatomic/atomic-reactor
atomic_reactor/plugins/exit_push_floating_tags.py
Python
bsd-3-clause
3,577
""" Provider that returns vector representation of features in a data source. This is a provider that does not return an image, but rather queries a data source for raw features and replies with a vector representation such as GeoJSON. For example, it's possible to retrieve data for locations of OpenStreetMap points of interest or street centerlines contained within a tile's boundary. Many Polymaps (http://polymaps.org) examples use GeoJSON vector data tiles, which can be effectively created using this provider. Vector functionality is provided by OGR (http://www.gdal.org/ogr/). Thank you, Frank Warmerdam. Currently two serializations and three encodings are supported for a total of six possible kinds of output with these tile name extensions: GeoJSON (.geojson): See http://geojson.org/geojson-spec.html Arc GeoServices JSON (.arcjson): See http://www.esri.com/library/whitepapers/pdfs/geoservices-rest-spec.pdf GeoBSON (.geobson) and Arc GeoServices BSON (.arcbson): BSON-encoded GeoJSON and Arc JSON, see http://bsonspec.org/#/specification GeoAMF (.geoamf) and Arc GeoServices AMF (.arcamf): AMF0-encoded GeoJSON and Arc JSON, see: http://opensource.adobe.com/wiki/download/attachments/1114283/amf0_spec_121207.pdf Possible future supported formats might include KML and others. Get in touch via Github to suggest other formats: http://github.com/migurski/TileStache. Common parameters: driver: String used to identify an OGR driver. Currently, "ESRI Shapefile", "PostgreSQL", "MySQL", Oracle, Spatialite and "GeoJSON" are supported as data source drivers, with "postgis" and "shapefile" accepted as synonyms. Not case-sensitive. OGR's complete list of potential formats can be found here: http://www.gdal.org/ogr/ogr_formats.html. Feel free to get in touch via Github to suggest new formats: http://github.com/migurski/TileStache. parameters: Dictionary of parameters for each driver. PostgreSQL: "dbname" parameter is required, with name of database. "host", "user", and "password" are optional connection parameters. One of "table" or "query" is required, with a table name in the first case and a complete SQL query in the second. Shapefile and GeoJSON: "file" parameter is required, with filesystem path to data file. properties: Optional list or dictionary of case-sensitive output property names. If omitted, all fields from the data source will be included in response. If a list, treated as a whitelist of field names to include in response. If a dictionary, treated as a whitelist and re-mapping of field names. clipped: Default is true. Boolean flag for optionally clipping the output geometries to the bounds of the enclosing tile, or the string value "padded" for clipping to the bounds of the tile plus 5%. This results in incomplete geometries, dramatically smaller file sizes, and improves performance and compatibility with Polymaps (http://polymaps.org). projected: Default is false. Boolean flag for optionally returning geometries in projected rather than geographic coordinates. Typically this means EPSG:900913 a.k.a. spherical mercator projection. Stylistically a poor fit for GeoJSON, but useful when returning Arc GeoServices responses. precision: Default is 6. Optional number of decimal places to use for floating point values. spacing: Optional number of tile pixels for spacing geometries in responses. Used to cut down on the number of returned features by ensuring that only those features at least this many pixels apart are returned. Order of features in the data source matters: early features beat out later features. verbose: Default is false. Boolean flag for optionally expanding output with additional whitespace for readability. Results in larger but more readable GeoJSON responses. id_property: Default is None. Sets the id of the geojson feature to the specified field of the data source. This can be used, for example, to identify a unique key field for the feature. Example TileStache provider configuration: "vector-postgis-points": { "provider": {"name": "vector", "driver": "PostgreSQL", "parameters": {"dbname": "geodata", "user": "geodata", "table": "planet_osm_point"}} } "vector-postgis-lines": { "provider": {"name": "vector", "driver": "postgis", "parameters": {"dbname": "geodata", "user": "geodata", "table": "planet_osm_line"}} } "vector-shapefile-points": { "provider": {"name": "vector", "driver": "ESRI Shapefile", "parameters": {"file": "oakland-uptown-point.shp"}, "properties": ["NAME", "HIGHWAY"]} } "vector-shapefile-lines": { "provider": {"name": "vector", "driver": "shapefile", "parameters": {"file": "oakland-uptown-line.shp"}, "properties": {"NAME": "name", "HIGHWAY": "highway"}} } "vector-postgis-query": { "provider": {"name": "vector", "driver": "PostgreSQL", "parameters": {"dbname": "geodata", "user": "geodata", "query": "SELECT osm_id, name, highway, way FROM planet_osm_line WHERE SUBSTR(name, 1, 1) = '1'"}} } "vector-sf-streets": { "provider": {"name": "vector", "driver": "GeoJSON", "parameters": {"file": "stclines.json"}, "properties": ["STREETNAME"]} } Caveats: Your data source must have a valid defined projection, or OGR will not know how to correctly filter and reproject it. Although response tiles are typically in web (spherical) mercator projection, the actual vector content of responses is unprojected back to plain WGS84 latitude and longitude. If you are using PostGIS and spherical mercator a.k.a. SRID 900913, you can save yourself a world of trouble by using this definition: http://github.com/straup/postgis-tools/raw/master/spatial_ref_900913-8.3.sql """ from re import compile from urlparse import urlparse, urljoin try: from json import JSONEncoder, loads as json_loads except ImportError: from simplejson import JSONEncoder, loads as json_loads try: from osgeo import ogr, osr except ImportError: # At least we'll be able to build the documentation. pass from TileStache.Core import KnownUnknown from TileStache.Geography import getProjectionByName from Arc import reserialize_to_arc, pyamf_classes class VectorResponse: """ Wrapper class for Vector response that makes it behave like a PIL.Image object. TileStache.getTile() expects to be able to save one of these to a buffer. Constructor arguments: - content: Vector data to be serialized, typically a dictionary. - verbose: Boolean flag to expand response for better legibility. """ def __init__(self, content, verbose, precision=6): self.content = content self.verbose = verbose self.precision = precision def save(self, out, format): """ """ # # Serialize # if format == 'WKT': if 'wkt' in self.content['crs']: out.write(self.content['crs']['wkt']) else: out.write(_sref_4326().ExportToWkt()) return if format in ('GeoJSON', 'GeoBSON', 'GeoAMF'): content = self.content if 'wkt' in content['crs']: content['crs'] = {'type': 'link', 'properties': {'href': '0.wkt', 'type': 'ogcwkt'}} else: del content['crs'] elif format in ('ArcJSON', 'ArcBSON', 'ArcAMF'): content = reserialize_to_arc(self.content, format == 'ArcAMF') else: raise KnownUnknown('Vector response only saves .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % format) # # Encode # if format in ('GeoJSON', 'ArcJSON'): indent = self.verbose and 2 or None encoded = JSONEncoder(indent=indent).iterencode(content) float_pat = compile(r'^-?\d+\.\d+$') for atom in encoded: if float_pat.match(atom): out.write(('%%.%if' % self.precision) % float(atom)) else: out.write(atom) elif format in ('GeoBSON', 'ArcBSON'): import bson encoded = bson.dumps(content) out.write(encoded) elif format in ('GeoAMF', 'ArcAMF'): import pyamf for class_name in pyamf_classes.items(): pyamf.register_class(*class_name) encoded = pyamf.encode(content, 0).read() out.write(encoded) def _sref_4326(): """ """ sref = osr.SpatialReference() proj = getProjectionByName('WGS84') sref.ImportFromProj4(proj.srs) return sref def _tile_perimeter(coord, projection, padded): """ Get a tile's outer edge for a coordinate and a projection. Returns a list of 17 (x, y) coordinates corresponding to a clockwise circumambulation of a tile boundary in a given projection. Projection is like those found in TileStache.Geography, used for tile output. If padded argument is True, pad bbox by 5% on all sides. """ if padded: ul = projection.coordinateProj(coord.left(0.05).up(0.05)) lr = projection.coordinateProj(coord.down(1.05).right(1.05)) else: ul = projection.coordinateProj(coord) lr = projection.coordinateProj(coord.right().down()) xmin, ymin, xmax, ymax = ul.x, ul.y, lr.x, lr.y xspan, yspan = xmax - xmin, ymax - ymin perimeter = [ (xmin, ymin), (xmin + 1 * xspan/4, ymin), (xmin + 2 * xspan/4, ymin), (xmin + 3 * xspan/4, ymin), (xmax, ymin), (xmax, ymin + 1 * yspan/4), (xmax, ymin + 2 * yspan/4), (xmax, ymin + 3 * yspan/4), (xmax, ymax), (xmax - 1 * xspan/4, ymax), (xmax - 2 * xspan/4, ymax), (xmax - 3 * xspan/4, ymax), (xmin, ymax), (xmin, ymax - 1 * yspan/4), (xmin, ymax - 2 * yspan/4), (xmin, ymax - 3 * yspan/4), (xmin, ymin) ] return perimeter def _tile_perimeter_width(coord, projection): """ Get the width in projected coordinates of the coordinate tile polygon. Uses _tile_perimeter(). """ perimeter = _tile_perimeter(coord, projection, False) return perimeter[8][0] - perimeter[0][0] def _tile_perimeter_geom(coord, projection, padded): """ Get an OGR Geometry object for a coordinate tile polygon. Uses _tile_perimeter(). """ perimeter = _tile_perimeter(coord, projection, padded) wkt = 'POLYGON((%s))' % ', '.join(['%.3f %.3f' % xy for xy in perimeter]) geom = ogr.CreateGeometryFromWkt(wkt) ref = osr.SpatialReference() ref.ImportFromProj4(projection.srs) geom.AssignSpatialReference(ref) return geom def _feature_properties(feature, layer_definition, whitelist=None): """ Returns a dictionary of feature properties for a feature in a layer. Third argument is an optional list or dictionary of properties to whitelist by case-sensitive name - leave it None to include everything. A dictionary will cause property names to be re-mapped. OGR property types: OFTInteger (0), OFTIntegerList (1), OFTReal (2), OFTRealList (3), OFTString (4), OFTStringList (5), OFTWideString (6), OFTWideStringList (7), OFTBinary (8), OFTDate (9), OFTTime (10), OFTDateTime (11). """ properties = {} okay_types = ogr.OFTInteger, ogr.OFTReal, ogr.OFTString, ogr.OFTWideString for index in range(layer_definition.GetFieldCount()): field_definition = layer_definition.GetFieldDefn(index) field_type = field_definition.GetType() if field_type not in okay_types: try: name = [oft for oft in dir(ogr) if oft.startswith('OFT') and getattr(ogr, oft) == field_type][0] except IndexError: raise KnownUnknown("Found an OGR field type I've never even seen: %d" % field_type) else: raise KnownUnknown("Found an OGR field type I don't know what to do with: ogr.%s" % name) name = field_definition.GetNameRef() if type(whitelist) in (list, dict) and name not in whitelist: continue property = type(whitelist) is dict and whitelist[name] or name properties[property] = feature.GetField(name) return properties def _append_with_delim(s, delim, data, key): if key in data: return s + delim + str(data[key]) else: return s def _open_layer(driver_name, parameters, dirpath): """ Open a layer, return it and its datasource. Dirpath comes from configuration, and is used to locate files. """ # # Set up the driver # okay_drivers = {'postgis': 'PostgreSQL', 'esri shapefile': 'ESRI Shapefile', 'postgresql': 'PostgreSQL', 'shapefile': 'ESRI Shapefile', 'geojson': 'GeoJSON', 'spatialite': 'SQLite', 'oracle': 'OCI', 'mysql': 'MySQL'} if driver_name.lower() not in okay_drivers: raise KnownUnknown('Got a driver type Vector doesn\'t understand: "%s". Need one of %s.' % (driver_name, ', '.join(okay_drivers.keys()))) driver_name = okay_drivers[driver_name.lower()] driver = ogr.GetDriverByName(str(driver_name)) # # Set up the datasource # if driver_name == 'PostgreSQL': if 'dbname' not in parameters: raise KnownUnknown('Need at least a "dbname" parameter for postgis') conn_parts = [] for part in ('dbname', 'user', 'host', 'password', 'port'): if part in parameters: conn_parts.append("%s='%s'" % (part, parameters[part])) source_name = 'PG:' + ' '.join(conn_parts) elif driver_name == 'MySQL': if 'dbname' not in parameters: raise KnownUnknown('Need a "dbname" parameter for MySQL') if 'table' not in parameters: raise KnownUnknown('Need a "table" parameter for MySQL') conn_parts = [] for part in ('host', 'port', 'user', 'password'): if part in parameters: conn_parts.append("%s=%s" % (part, parameters[part])) source_name = 'MySql:' + parameters["dbname"] + "," + ','.join(conn_parts) + ",tables=" + parameters['table'] elif driver_name == 'OCI': if 'host' not in parameters: raise KnownUnknown('Need a "host" parameter for oracle') if 'table' not in parameters: raise KnownUnknown('Need a "table" parameter for oracle') source_name = 'OCI:' source_name = _append_with_delim(source_name, '', parameters, 'user') source_name = _append_with_delim(source_name, '/', parameters, 'password') if 'user' in parameters: source_name = source_name + '@' source_name = source_name + parameters['host'] source_name = _append_with_delim(source_name, ':', parameters, 'port') source_name = _append_with_delim(source_name, '/', parameters, 'dbname') source_name = source_name + ":" + parameters['table'] elif driver_name in ('ESRI Shapefile', 'GeoJSON', 'SQLite'): if 'file' not in parameters: raise KnownUnknown('Need at least a "file" parameter for a shapefile') file_href = urljoin(dirpath, parameters['file']) scheme, h, file_path, q, p, f = urlparse(file_href) if scheme not in ('file', ''): raise KnownUnknown('Shapefiles need to be local, not %s' % file_href) source_name = file_path datasource = driver.Open(str(source_name)) if datasource is None: raise KnownUnknown('Couldn\'t open datasource %s' % source_name) # # Set up the layer # if driver_name == 'PostgreSQL' or driver_name == 'OCI' or driver_name == 'MySQL': if 'query' in parameters: layer = datasource.ExecuteSQL(str(parameters['query'])) elif 'table' in parameters: layer = datasource.GetLayerByName(str(parameters['table'])) else: raise KnownUnknown('Need at least a "query" or "table" parameter for postgis or oracle') elif driver_name == 'SQLite': layer = datasource.GetLayerByName(str(parameters['layer'])) else: layer = datasource.GetLayer(0) if layer.GetSpatialRef() is None and driver_name != 'SQLite': raise KnownUnknown('Couldn\'t get a layer from data source %s' % source_name) # # Return the layer and the datasource. # # Technically, the datasource is no longer needed # but layer segfaults when it falls out of scope. # return layer, datasource def _get_features(coord, properties, projection, layer, clipped, projected, spacing, id_property): """ Return a list of features in an OGR layer with properties in GeoJSON form. Optionally clip features to coordinate bounding box, and optionally limit returned features to only those separated by number of pixels given as spacing. """ # # Prepare output spatial reference - always WGS84. # if projected: output_sref = osr.SpatialReference() output_sref.ImportFromProj4(projection.srs) else: output_sref = _sref_4326() # # Load layer information # definition = layer.GetLayerDefn() layer_sref = layer.GetSpatialRef() if layer_sref == None: layer_sref = _sref_4326() # # Spatially filter the layer # bbox = _tile_perimeter_geom(coord, projection, clipped == 'padded') bbox.TransformTo(layer_sref) layer.SetSpatialFilter(bbox) features = [] mask = None if spacing is not None: buffer = spacing * _tile_perimeter_width(coord, projection) / 256. for feature in layer: geometry = feature.geometry().Clone() if not geometry.Intersect(bbox): continue if mask and geometry.Intersect(mask): continue if clipped: geometry = geometry.Intersection(bbox) if geometry is None: # may indicate a TopologyException continue # mask out subsequent features if spacing is defined if mask and buffer: mask = geometry.Buffer(buffer, 2).Union(mask) elif spacing is not None: mask = geometry.Buffer(buffer, 2) geometry.AssignSpatialReference(layer_sref) geometry.TransformTo(output_sref) geom = json_loads(geometry.ExportToJson()) prop = _feature_properties(feature, definition, properties) geojson_feature = {'type': 'Feature', 'properties': prop, 'geometry': geom} if id_property != None and id_property in prop: geojson_feature['id'] = prop[id_property] features.append(geojson_feature) return features class Provider: """ Vector Provider for OGR datasources. See module documentation for explanation of constructor arguments. """ def __init__(self, layer, driver, parameters, clipped, verbose, projected, spacing, properties, precision, id_property): self.layer = layer self.driver = driver self.clipped = clipped self.verbose = verbose self.projected = projected self.spacing = spacing self.parameters = parameters self.properties = properties self.precision = precision self.id_property = id_property def renderTile(self, width, height, srs, coord): """ Render a single tile, return a VectorResponse instance. """ layer, ds = _open_layer(self.driver, self.parameters, self.layer.config.dirpath) features = _get_features(coord, self.properties, self.layer.projection, layer, self.clipped, self.projected, self.spacing, self.id_property) response = {'type': 'FeatureCollection', 'features': features} if self.projected: sref = osr.SpatialReference() sref.ImportFromProj4(self.layer.projection.srs) response['crs'] = {'wkt': sref.ExportToWkt()} if srs == getProjectionByName('spherical mercator').srs: response['crs']['wkid'] = 102113 else: response['crs'] = {'srid': 4326, 'wkid': 4326} return VectorResponse(response, self.verbose, self.precision) def getTypeByExtension(self, extension): """ Get mime-type and format by file extension. This only accepts "geojson" for the time being. """ if extension.lower() == 'geojson': return 'text/json', 'GeoJSON' elif extension.lower() == 'arcjson': return 'text/json', 'ArcJSON' elif extension.lower() == 'geobson': return 'application/x-bson', 'GeoBSON' elif extension.lower() == 'arcbson': return 'application/x-bson', 'ArcBSON' elif extension.lower() == 'geoamf': return 'application/x-amf', 'GeoAMF' elif extension.lower() == 'arcamf': return 'application/x-amf', 'ArcAMF' elif extension.lower() == 'wkt': return 'text/x-wkt', 'WKT' raise KnownUnknown('Vector Provider only makes .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % extension)
mpuig/TileStache
TileStache/Vector/__init__.py
Python
bsd-3-clause
22,199
package org.frozenarc.zeframework.connector; import java.util.HashMap; /** * The class is used to connect action and view. when user submit a form the action occurs * and it will be handled by ActionHandler, after that the view loading starts. The two process handling * action and loading view should be connected by somthing. That purpose the class solves. * @author Manan */ public class ActionViewConnector { protected HashMap map; protected String action; protected String view; /** * Constructor */ public ActionViewConnector() { map=new HashMap(); } /** * Preserves some value to use after. * @param key * @param value */ public void putValue(Object key, Object value) { map.put(key, value); } /** * Use value which is preserved before. * @param key * @return value */ public Object getValue(Object key) { return map.get(key); } /** * Remove preserved value. * @param key */ public void removeValue(Object key) { map.remove(key); } /** * The class connects action and view, so the method returns action nane of the action. * @return actionname */ public String getAction() { return action; } /** * The class connects action and view, so the method set action nane of the action. * The method is called by framework * @param action */ public void setAction(String action) { this.action = action; } /** * The class connects action and view, so the method returns view nane of the view. * @return viewname */ public String getView() { return view; } /** * The class connects action and view, so the method set view nane of the view. * The method is called by framework * @param view */ public void setView(String view) { this.view = view; } }
frozenarc/zeframework
src/main/java/org/frozenarc/zeframework/connector/ActionViewConnector.java
Java
bsd-3-clause
1,996
<?php // WARNING, this is a read only file created by import scripts // WARNING // WARNING, Changes made to this file will be clobbered // WARNING // WARNING, Please make changes on poeditor instead of here // // ?> konu: {target.type} #{target.id} hakkında rapor {alternative:plain} Merhaba, Aktarma numarası {target.id} ile ilgili rapor bu e-postanın ekindedir. Saygılarımızla, {cfg:site_name} {alternative:html} <p> Merhaba, </p> <p> Aktarma numarası {target.id} ile ilgili rapor bu e-postanın ekindedir. </p> <p>Saygılarımızla,<br/> {cfg:site_name}</p>
filesender/filesender
language/tr_TR/report_attached.mail.php
PHP
bsd-3-clause
589
// Copyright (c) 2010-2022, Lawrence Livermore National Security, LLC. Produced // at the Lawrence Livermore National Laboratory. All Rights reserved. See files // LICENSE and NOTICE for details. LLNL-CODE-806117. // // This file is part of the MFEM library. For more information and source code // availability visit https://mfem.org. // // MFEM is free software; you can redistribute it and/or modify it under the // terms of the BSD-3 license. We welcome feedback and contributions, see file // CONTRIBUTING.md for details. #include "../general/forall.hpp" #include "bilininteg.hpp" #include "gridfunc.hpp" namespace mfem { template<int T_D1D = 0, int T_Q1D = 0> static void EAMassAssemble1D(const int NE, const Array<double> &basis, const Vector &padata, Vector &eadata, const bool add, const int d1d = 0, const int q1d = 0) { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; MFEM_VERIFY(D1D <= MAX_D1D, ""); MFEM_VERIFY(Q1D <= MAX_Q1D, ""); auto B = Reshape(basis.Read(), Q1D, D1D); auto D = Reshape(padata.Read(), Q1D, NE); auto M = Reshape(eadata.ReadWrite(), D1D, D1D, NE); MFEM_FORALL_3D(e, NE, D1D, D1D, 1, { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; constexpr int MQ1 = T_Q1D ? T_Q1D : MAX_Q1D; double r_Bi[MQ1]; double r_Bj[MQ1]; for (int q = 0; q < Q1D; q++) { r_Bi[q] = B(q,MFEM_THREAD_ID(x)); r_Bj[q] = B(q,MFEM_THREAD_ID(y)); } MFEM_FOREACH_THREAD(i1,x,D1D) { MFEM_FOREACH_THREAD(j1,y,D1D) { double val = 0.0; for (int k1 = 0; k1 < Q1D; ++k1) { val += r_Bi[k1] * r_Bj[k1] * D(k1, e); } if (add) { M(i1, j1, e) += val; } else { M(i1, j1, e) = val; } } } }); } template<int T_D1D = 0, int T_Q1D = 0> static void EAMassAssemble2D(const int NE, const Array<double> &basis, const Vector &padata, Vector &eadata, const bool add, const int d1d = 0, const int q1d = 0) { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; MFEM_VERIFY(D1D <= MAX_D1D, ""); MFEM_VERIFY(Q1D <= MAX_Q1D, ""); auto B = Reshape(basis.Read(), Q1D, D1D); auto D = Reshape(padata.Read(), Q1D, Q1D, NE); auto M = Reshape(eadata.ReadWrite(), D1D, D1D, D1D, D1D, NE); MFEM_FORALL_3D(e, NE, D1D, D1D, 1, { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; constexpr int MD1 = T_D1D ? T_D1D : MAX_D1D; constexpr int MQ1 = T_Q1D ? T_Q1D : MAX_Q1D; double r_B[MQ1][MD1]; for (int d = 0; d < D1D; d++) { for (int q = 0; q < Q1D; q++) { r_B[q][d] = B(q,d); } } MFEM_SHARED double s_D[MQ1][MQ1]; MFEM_FOREACH_THREAD(k1,x,Q1D) { MFEM_FOREACH_THREAD(k2,y,Q1D) { s_D[k1][k2] = D(k1,k2,e); } } MFEM_SYNC_THREAD; MFEM_FOREACH_THREAD(i1,x,D1D) { MFEM_FOREACH_THREAD(i2,y,D1D) { for (int j1 = 0; j1 < D1D; ++j1) { for (int j2 = 0; j2 < D1D; ++j2) { double val = 0.0; for (int k1 = 0; k1 < Q1D; ++k1) { for (int k2 = 0; k2 < Q1D; ++k2) { val += r_B[k1][i1] * r_B[k1][j1] * r_B[k2][i2] * r_B[k2][j2] * s_D[k1][k2]; } } if (add) { M(i1, i2, j1, j2, e) += val; } else { M(i1, i2, j1, j2, e) = val; } } } } } }); } template<int T_D1D = 0, int T_Q1D = 0> static void EAMassAssemble3D(const int NE, const Array<double> &basis, const Vector &padata, Vector &eadata, const bool add, const int d1d = 0, const int q1d = 0) { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; MFEM_VERIFY(D1D <= MAX_D1D, ""); MFEM_VERIFY(Q1D <= MAX_Q1D, ""); auto B = Reshape(basis.Read(), Q1D, D1D); auto D = Reshape(padata.Read(), Q1D, Q1D, Q1D, NE); auto M = Reshape(eadata.ReadWrite(), D1D, D1D, D1D, D1D, D1D, D1D, NE); MFEM_FORALL_3D(e, NE, D1D, D1D, D1D, { const int D1D = T_D1D ? T_D1D : d1d; const int Q1D = T_Q1D ? T_Q1D : q1d; constexpr int MD1 = T_D1D ? T_D1D : MAX_D1D; constexpr int MQ1 = T_Q1D ? T_Q1D : MAX_Q1D; double r_B[MQ1][MD1]; for (int d = 0; d < D1D; d++) { for (int q = 0; q < Q1D; q++) { r_B[q][d] = B(q,d); } } MFEM_SHARED double s_D[MQ1][MQ1][MQ1]; MFEM_FOREACH_THREAD(k1,x,Q1D) { MFEM_FOREACH_THREAD(k2,y,Q1D) { MFEM_FOREACH_THREAD(k3,z,Q1D) { s_D[k1][k2][k3] = D(k1,k2,k3,e); } } } MFEM_SYNC_THREAD; MFEM_FOREACH_THREAD(i1,x,D1D) { MFEM_FOREACH_THREAD(i2,y,D1D) { MFEM_FOREACH_THREAD(i3,z,D1D) { for (int j1 = 0; j1 < D1D; ++j1) { for (int j2 = 0; j2 < D1D; ++j2) { for (int j3 = 0; j3 < D1D; ++j3) { double val = 0.0; for (int k1 = 0; k1 < Q1D; ++k1) { for (int k2 = 0; k2 < Q1D; ++k2) { for (int k3 = 0; k3 < Q1D; ++k3) { val += r_B[k1][i1] * r_B[k1][j1] * r_B[k2][i2] * r_B[k2][j2] * r_B[k3][i3] * r_B[k3][j3] * s_D[k1][k2][k3]; } } } if (add) { M(i1, i2, i3, j1, j2, j3, e) += val; } else { M(i1, i2, i3, j1, j2, j3, e) = val; } } } } } } } }); } void MassIntegrator::AssembleEA(const FiniteElementSpace &fes, Vector &ea_data, const bool add) { AssemblePA(fes); ne = fes.GetMesh()->GetNE(); const Array<double> &B = maps->B; if (dim == 1) { switch ((dofs1D << 4 ) | quad1D) { case 0x22: return EAMassAssemble1D<2,2>(ne,B,pa_data,ea_data,add); case 0x33: return EAMassAssemble1D<3,3>(ne,B,pa_data,ea_data,add); case 0x44: return EAMassAssemble1D<4,4>(ne,B,pa_data,ea_data,add); case 0x55: return EAMassAssemble1D<5,5>(ne,B,pa_data,ea_data,add); case 0x66: return EAMassAssemble1D<6,6>(ne,B,pa_data,ea_data,add); case 0x77: return EAMassAssemble1D<7,7>(ne,B,pa_data,ea_data,add); case 0x88: return EAMassAssemble1D<8,8>(ne,B,pa_data,ea_data,add); case 0x99: return EAMassAssemble1D<9,9>(ne,B,pa_data,ea_data,add); default: return EAMassAssemble1D(ne,B,pa_data,ea_data,add, dofs1D,quad1D); } } else if (dim == 2) { switch ((dofs1D << 4 ) | quad1D) { case 0x22: return EAMassAssemble2D<2,2>(ne,B,pa_data,ea_data,add); case 0x33: return EAMassAssemble2D<3,3>(ne,B,pa_data,ea_data,add); case 0x44: return EAMassAssemble2D<4,4>(ne,B,pa_data,ea_data,add); case 0x55: return EAMassAssemble2D<5,5>(ne,B,pa_data,ea_data,add); case 0x66: return EAMassAssemble2D<6,6>(ne,B,pa_data,ea_data,add); case 0x77: return EAMassAssemble2D<7,7>(ne,B,pa_data,ea_data,add); case 0x88: return EAMassAssemble2D<8,8>(ne,B,pa_data,ea_data,add); case 0x99: return EAMassAssemble2D<9,9>(ne,B,pa_data,ea_data,add); default: return EAMassAssemble2D(ne,B,pa_data,ea_data,add, dofs1D,quad1D); } } else if (dim == 3) { switch ((dofs1D << 4 ) | quad1D) { case 0x23: return EAMassAssemble3D<2,3>(ne,B,pa_data,ea_data,add); case 0x34: return EAMassAssemble3D<3,4>(ne,B,pa_data,ea_data,add); case 0x45: return EAMassAssemble3D<4,5>(ne,B,pa_data,ea_data,add); case 0x56: return EAMassAssemble3D<5,6>(ne,B,pa_data,ea_data,add); case 0x67: return EAMassAssemble3D<6,7>(ne,B,pa_data,ea_data,add); case 0x78: return EAMassAssemble3D<7,8>(ne,B,pa_data,ea_data,add); case 0x89: return EAMassAssemble3D<8,9>(ne,B,pa_data,ea_data,add); default: return EAMassAssemble3D(ne,B,pa_data,ea_data,add, dofs1D,quad1D); } } MFEM_ABORT("Unknown kernel."); } }
mfem/mfem
fem/bilininteg_mass_ea.cpp
C++
bsd-3-clause
9,841
package edu.rutgers.css.Rutgers.api.soc; /** * Created by jamchamb on 10/30/14. */ public interface Titleable { String getDisplayTitle(); }
rutgersmobile/android-client
app/src/main/java/edu/rutgers/css/Rutgers/api/soc/Titleable.java
Java
bsd-3-clause
147
// You have a total of n coins that you want to form in a staircase shape, where // every k-th row must have exactly k coins. // Given n, find the total number of full staircase rows that can be formed. // n is a non-negative integer and fits within the range of a 32-bit signed // integer. // Example 1: // n = 5 // The coins can form the following rows: // ¤ // ¤ ¤ // ¤ ¤ // Because the 3rd row is incomplete, we return 2. // Example 2: // n = 8 // The coins can form the following rows: // ¤ // ¤ ¤ // ¤ ¤ ¤ // ¤ ¤ // Because the 4th row is incomplete, we return 3. class Solution { public: int arrangeCoins(int n) { // (1 + x) * x / 2 = n return (sqrt(1 + 8LL * n) - 1) / 2; } };
ysmiles/leetcode-cpp
401-500/441-Arranging_Coins-e.cpp
C++
bsd-3-clause
733
// @flow import invariant from 'invariant'; import * as React from 'react'; import Animated from 'react-native-reanimated'; import { useMessageListData } from 'lib/selectors/chat-selectors'; import { messageKey } from 'lib/shared/message-utils'; import { colorIsDark, viewerIsMember } from 'lib/shared/thread-utils'; import type { ThreadInfo } from 'lib/types/thread-types'; import { KeyboardContext } from '../keyboard/keyboard-state'; import { OverlayContext } from '../navigation/overlay-context'; import { MultimediaMessageTooltipModalRouteName, RobotextMessageTooltipModalRouteName, TextMessageTooltipModalRouteName, } from '../navigation/route-names'; import { useSelector } from '../redux/redux-utils'; import type { ChatMessageInfoItemWithHeight, ChatMessageItemWithHeight, ChatRobotextMessageInfoItemWithHeight, ChatTextMessageInfoItemWithHeight, } from '../types/chat-types'; import type { LayoutCoordinates, VerticalBounds } from '../types/layout-types'; import type { AnimatedViewStyle } from '../types/styles'; import { ChatContext, useHeightMeasurer } from './chat-context'; import { clusterEndHeight } from './composed-message.react'; import { failedSendHeight } from './failed-send.react'; import { inlineSidebarHeight, inlineSidebarMarginBottom, inlineSidebarMarginTop, } from './inline-sidebar.react'; import { authorNameHeight } from './message-header.react'; import { multimediaMessageItemHeight } from './multimedia-message-utils'; import { getSidebarThreadInfo } from './sidebar-navigation'; import textMessageSendFailed from './text-message-send-failed'; import { timestampHeight } from './timestamp.react'; /* eslint-disable import/no-named-as-default-member */ const { Node, Extrapolate, interpolateNode, interpolateColors, block, call, eq, cond, sub, } = Animated; /* eslint-enable import/no-named-as-default-member */ function textMessageItemHeight( item: ChatTextMessageInfoItemWithHeight, ): number { const { messageInfo, contentHeight, startsCluster, endsCluster } = item; const { isViewer } = messageInfo.creator; let height = 5 + contentHeight; // 5 from marginBottom in ComposedMessage if (!isViewer && startsCluster) { height += authorNameHeight; } if (endsCluster) { height += clusterEndHeight; } if (textMessageSendFailed(item)) { height += failedSendHeight; } if (item.threadCreatedFromMessage) { height += inlineSidebarHeight + inlineSidebarMarginTop + inlineSidebarMarginBottom; } return height; } function robotextMessageItemHeight( item: ChatRobotextMessageInfoItemWithHeight, ): number { if (item.threadCreatedFromMessage) { return item.contentHeight + inlineSidebarHeight; } return item.contentHeight; } function messageItemHeight(item: ChatMessageInfoItemWithHeight): number { let height = 0; if (item.messageShapeType === 'text') { height += textMessageItemHeight(item); } else if (item.messageShapeType === 'multimedia') { height += multimediaMessageItemHeight(item); } else { height += robotextMessageItemHeight(item); } if (item.startsConversation) { height += timestampHeight; } return height; } function chatMessageItemHeight(item: ChatMessageItemWithHeight): number { if (item.itemType === 'loader') { return 56; } return messageItemHeight(item); } function useMessageTargetParameters( sourceMessage: ChatMessageInfoItemWithHeight, initialCoordinates: LayoutCoordinates, messageListVerticalBounds: VerticalBounds, currentInputBarHeight: number, targetInputBarHeight: number, sidebarThreadInfo: ?ThreadInfo, ): { +position: number, +color: string, } { const messageListData = useMessageListData({ searching: false, userInfoInputArray: [], threadInfo: sidebarThreadInfo, }); const [ messagesWithHeight, setMessagesWithHeight, ] = React.useState<?$ReadOnlyArray<ChatMessageItemWithHeight>>(null); const measureMessages = useHeightMeasurer(); React.useEffect(() => { if (messageListData) { measureMessages( messageListData, sidebarThreadInfo, setMessagesWithHeight, ); } }, [measureMessages, messageListData, sidebarThreadInfo]); const sourceMessageID = sourceMessage.messageInfo?.id; const targetDistanceFromBottom = React.useMemo(() => { if (!messagesWithHeight) { return 0; } let offset = 0; for (const message of messagesWithHeight) { offset += chatMessageItemHeight(message); if (message.messageInfo && message.messageInfo.id === sourceMessageID) { return offset; } } return ( messageListVerticalBounds.height + chatMessageItemHeight(sourceMessage) ); }, [ messageListVerticalBounds.height, messagesWithHeight, sourceMessage, sourceMessageID, ]); if (!sidebarThreadInfo) { return { position: 0, color: sourceMessage.threadInfo.color, }; } const authorNameComponentHeight = sourceMessage.messageInfo.creator.isViewer ? 0 : authorNameHeight; const currentDistanceFromBottom = messageListVerticalBounds.height + messageListVerticalBounds.y - initialCoordinates.y + timestampHeight + authorNameComponentHeight + currentInputBarHeight; return { position: targetDistanceFromBottom + targetInputBarHeight - currentDistanceFromBottom, color: sidebarThreadInfo.color, }; } type AnimatedMessageArgs = { +sourceMessage: ChatMessageInfoItemWithHeight, +initialCoordinates: LayoutCoordinates, +messageListVerticalBounds: VerticalBounds, +progress: Node, +targetInputBarHeight: ?number, }; function useAnimatedMessageTooltipButton({ sourceMessage, initialCoordinates, messageListVerticalBounds, progress, targetInputBarHeight, }: AnimatedMessageArgs): { +style: AnimatedViewStyle, +threadColorOverride: ?Node, +isThreadColorDarkOverride: ?boolean, } { const chatContext = React.useContext(ChatContext); invariant(chatContext, 'chatContext should be set'); const { currentTransitionSidebarSourceID, setCurrentTransitionSidebarSourceID, chatInputBarHeights, sidebarAnimationType, setSidebarAnimationType, } = chatContext; const viewerID = useSelector( state => state.currentUserInfo && state.currentUserInfo.id, ); const sidebarThreadInfo = React.useMemo(() => { return getSidebarThreadInfo(sourceMessage, viewerID); }, [sourceMessage, viewerID]); const currentInputBarHeight = chatInputBarHeights.get(sourceMessage.threadInfo.id) ?? 0; const keyboardState = React.useContext(KeyboardContext); const viewerIsSidebarMember = viewerIsMember(sidebarThreadInfo); React.useEffect(() => { const newSidebarAnimationType = !currentInputBarHeight || !targetInputBarHeight || keyboardState?.keyboardShowing || !viewerIsSidebarMember ? 'fade_source_message' : 'move_source_message'; setSidebarAnimationType(newSidebarAnimationType); }, [ currentInputBarHeight, keyboardState?.keyboardShowing, setSidebarAnimationType, sidebarThreadInfo, targetInputBarHeight, viewerIsSidebarMember, ]); const { position: targetPosition, color: targetColor, } = useMessageTargetParameters( sourceMessage, initialCoordinates, messageListVerticalBounds, currentInputBarHeight, targetInputBarHeight ?? currentInputBarHeight, sidebarThreadInfo, ); React.useEffect(() => { return () => setCurrentTransitionSidebarSourceID(null); }, [setCurrentTransitionSidebarSourceID]); const bottom = React.useMemo( () => interpolateNode(progress, { inputRange: [0.3, 1], outputRange: [targetPosition, 0], extrapolate: Extrapolate.CLAMP, }), [progress, targetPosition], ); const [ isThreadColorDarkOverride, setThreadColorDarkOverride, ] = React.useState<?boolean>(null); const setThreadColorBrightness = React.useCallback(() => { const isSourceThreadDark = colorIsDark(sourceMessage.threadInfo.color); const isTargetThreadDark = colorIsDark(targetColor); if (isSourceThreadDark !== isTargetThreadDark) { setThreadColorDarkOverride(isTargetThreadDark); } }, [sourceMessage.threadInfo.color, targetColor]); const threadColorOverride = React.useMemo(() => { if ( sourceMessage.messageShapeType !== 'text' || !currentTransitionSidebarSourceID ) { return null; } return block([ cond(eq(progress, 1), call([], setThreadColorBrightness)), interpolateColors(progress, { inputRange: [0, 1], outputColorRange: [ `#${targetColor}`, `#${sourceMessage.threadInfo.color}`, ], }), ]); }, [ currentTransitionSidebarSourceID, progress, setThreadColorBrightness, sourceMessage.messageShapeType, sourceMessage.threadInfo.color, targetColor, ]); const messageContainerStyle = React.useMemo(() => { return { bottom: currentTransitionSidebarSourceID ? bottom : 0, opacity: currentTransitionSidebarSourceID && sidebarAnimationType === 'fade_source_message' ? 0 : 1, }; }, [bottom, currentTransitionSidebarSourceID, sidebarAnimationType]); return { style: messageContainerStyle, threadColorOverride, isThreadColorDarkOverride, }; } function getMessageTooltipKey(item: ChatMessageInfoItemWithHeight): string { return `tooltip|${messageKey(item.messageInfo)}`; } function isMessageTooltipKey(key: string): boolean { return key.startsWith('tooltip|'); } function useOverlayPosition(item: ChatMessageInfoItemWithHeight) { const overlayContext = React.useContext(OverlayContext); invariant(overlayContext, 'should be set'); for (const overlay of overlayContext.visibleOverlays) { if ( (overlay.routeName === MultimediaMessageTooltipModalRouteName || overlay.routeName === TextMessageTooltipModalRouteName || overlay.routeName === RobotextMessageTooltipModalRouteName) && overlay.routeKey === getMessageTooltipKey(item) ) { return overlay.position; } } return undefined; } function useContentAndHeaderOpacity( item: ChatMessageInfoItemWithHeight, ): number | Node { const overlayPosition = useOverlayPosition(item); const chatContext = React.useContext(ChatContext); return React.useMemo( () => overlayPosition && chatContext?.sidebarAnimationType === 'move_source_message' ? sub( 1, interpolateNode(overlayPosition, { inputRange: [0.05, 0.06], outputRange: [0, 1], extrapolate: Extrapolate.CLAMP, }), ) : 1, [chatContext?.sidebarAnimationType, overlayPosition], ); } function useDeliveryIconOpacity( item: ChatMessageInfoItemWithHeight, ): number | Node { const overlayPosition = useOverlayPosition(item); const chatContext = React.useContext(ChatContext); return React.useMemo(() => { if ( !overlayPosition || !chatContext?.currentTransitionSidebarSourceID || chatContext?.sidebarAnimationType === 'fade_source_message' ) { return 1; } return interpolateNode(overlayPosition, { inputRange: [0.05, 0.06, 1], outputRange: [1, 0, 0], extrapolate: Extrapolate.CLAMP, }); }, [ chatContext?.currentTransitionSidebarSourceID, chatContext?.sidebarAnimationType, overlayPosition, ]); } export { chatMessageItemHeight, useAnimatedMessageTooltipButton, messageItemHeight, getMessageTooltipKey, isMessageTooltipKey, useContentAndHeaderOpacity, useDeliveryIconOpacity, };
Ashoat/squadcal
native/chat/utils.js
JavaScript
bsd-3-clause
11,758
// // "$Id: Fl_File_Browser.H 6614 2009-01-01 16:11:32Z matt $" // // FileBrowser definitions. // // Copyright 1999-2009 by Michael Sweet. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Library General Public // License as published by the Free Software Foundation; either // version 2 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Library General Public License for more details. // // You should have received a copy of the GNU Library General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 // USA. // // Please report all bugs and problems on the following page: // // http://www.fltk.org/str.php // /* \file Fl_File_Browser widget . */ // // Include necessary header files... // #ifndef _Fl_File_Browser_H_ # define _Fl_File_Browser_H_ # include "Fl_Browser.H" # include "Fl_File_Icon.H" # include "filename.H" // // Fl_File_Browser class... // /** The Fl_File_Browser widget displays a list of filenames, optionally with file-specific icons. */ class FL_EXPORT Fl_File_Browser : public Fl_Browser { int filetype_; const char *directory_; uchar iconsize_; const char *pattern_; int full_height() const; int item_height(void *) const; int item_width(void *) const; void item_draw(void *, int, int, int, int) const; int incr_height() const { return (item_height(0)); } public: enum { FILES, DIRECTORIES }; /** The constructor creates the Fl_File_Browser widget at the specified position and size. The destructor destroys the widget and frees all memory that has been allocated. */ Fl_File_Browser(int, int, int, int, const char * = 0); /** Sets or gets the size of the icons. The default size is 20 pixels. */ uchar iconsize() const { return (iconsize_); }; /** Sets or gets the size of the icons. The default size is 20 pixels. */ void iconsize(uchar s) { iconsize_ = s; redraw(); }; /** Sets or gets the filename filter. The pattern matching uses the fl_filename_match() function in FLTK. */ void filter(const char *pattern); /** Sets or gets the filename filter. The pattern matching uses the fl_filename_match() function in FLTK. */ const char *filter() const { return (pattern_); }; /** Loads the specified directory into the browser. If icons have been loaded then the correct icon is associated with each file in the list. <P>The sort argument specifies a sort function to be used with fl_filename_list(). */ int load(const char *directory, Fl_File_Sort_F *sort = fl_numericsort); Fl_Fontsize textsize() const { return Fl_Browser::textsize(); }; void textsize(Fl_Fontsize s) { Fl_Browser::textsize(s); iconsize_ = (uchar)(3 * s / 2); }; /** Sets or gets the file browser type, FILES or DIRECTORIES. When set to FILES, both files and directories are shown. Otherwise only directories are shown. */ int filetype() const { return (filetype_); }; /** Sets or gets the file browser type, FILES or DIRECTORIES. When set to FILES, both files and directories are shown. Otherwise only directories are shown. */ void filetype(int t) { filetype_ = t; }; }; #endif // !_Fl_File_Browser_H_ // // End of "$Id: Fl_File_Browser.H 6614 2009-01-01 16:11:32Z matt $". //
cocoageek/max-gui
maxgui.mod/fltkmaxgui.mod/FL/Fl_File_Browser.H
C++
bsd-3-clause
3,645
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/wm/workspace/workspace_manager.h" #include <algorithm> #include <functional> #include "ash/root_window_controller.h" #include "ash/shelf/shelf_layout_manager.h" #include "ash/shell.h" #include "ash/shell_window_ids.h" #include "ash/wm/base_layout_manager.h" #include "ash/wm/frame_painter.h" #include "ash/wm/property_util.h" #include "ash/wm/window_animations.h" #include "ash/wm/window_properties.h" #include "ash/wm/window_util.h" #include "ash/wm/workspace/auto_window_management.h" #include "ash/wm/workspace/desktop_background_fade_controller.h" #include "ash/wm/workspace/workspace_animations.h" #include "ash/wm/workspace/workspace_cycler.h" #include "ash/wm/workspace/workspace_cycler_animator.h" #include "ash/wm/workspace/workspace_cycler_configuration.h" #include "ash/wm/workspace/workspace_layout_manager.h" #include "ash/wm/workspace/workspace.h" #include "base/auto_reset.h" #include "base/logging.h" #include "base/stl_util.h" #include "ui/aura/client/aura_constants.h" #include "ui/aura/root_window.h" #include "ui/aura/window.h" #include "ui/aura/window_property.h" #include "ui/base/ui_base_types.h" #include "ui/compositor/layer.h" #include "ui/compositor/layer_animator.h" #include "ui/compositor/scoped_animation_duration_scale_mode.h" #include "ui/compositor/scoped_layer_animation_settings.h" #include "ui/views/widget/widget.h" DECLARE_WINDOW_PROPERTY_TYPE(ash::internal::Workspace*); DECLARE_EXPORTED_WINDOW_PROPERTY_TYPE(ASH_EXPORT, ui::WindowShowState); using aura::Window; namespace ash { namespace internal { DEFINE_WINDOW_PROPERTY_KEY(Workspace*, kWorkspaceKey, NULL); namespace { // Duration for fading out the desktop background when maximizing. const int kCrossFadeSwitchTimeMS = 700; // Amount of time to pause before animating anything. Only used during initial // animation (when logging in). const int kInitialPauseTimeMS = 750; // Changes the parent of |window| and all its transient children to // |new_parent|. If |stack_beneach| is non-NULL all the windows are stacked // beneath it. void ReparentWindow(Window* window, Window* new_parent, Window* stack_beneath) { new_parent->AddChild(window); if (stack_beneath) new_parent->StackChildBelow(window, stack_beneath); for (size_t i = 0; i < window->transient_children().size(); ++i) ReparentWindow(window->transient_children()[i], new_parent, stack_beneath); } } // namespace // Workspace ------------------------------------------------------------------- // LayoutManager installed on the parent window of all the Workspace window (eg // |WorkspaceManager::contents_window_|). class WorkspaceManager::LayoutManagerImpl : public BaseLayoutManager { public: explicit LayoutManagerImpl(WorkspaceManager* workspace_manager) : BaseLayoutManager( workspace_manager->contents_window_->GetRootWindow()), workspace_manager_(workspace_manager) { } virtual ~LayoutManagerImpl() {} // Overridden from BaseWorkspaceLayoutManager: virtual void OnWindowResized() OVERRIDE { for (size_t i = 0; i < window()->children().size(); ++i) window()->children()[i]->SetBounds(gfx::Rect(window()->bounds().size())); } virtual void OnWindowAddedToLayout(Window* child) OVERRIDE { // Only workspaces should be added as children. DCHECK((child->id() == kShellWindowId_WorkspaceContainer) || workspace_manager_->creating_fade_); child->SetBounds(gfx::Rect(window()->bounds().size())); } private: aura::Window* window() { return workspace_manager_->contents_window_; } WorkspaceManager* workspace_manager_; DISALLOW_COPY_AND_ASSIGN(LayoutManagerImpl); }; // WorkspaceManager ----------------------------------------------------------- WorkspaceManager::WorkspaceManager(Window* contents_window) : contents_window_(contents_window), active_workspace_(NULL), shelf_(NULL), in_move_(false), clear_unminimizing_workspace_factory_(this), unminimizing_workspace_(NULL), app_terminating_(false), creating_fade_(false), workspace_cycler_(NULL) { // Clobber any existing event filter. contents_window->SetEventFilter(NULL); // |contents_window| takes ownership of LayoutManagerImpl. contents_window->SetLayoutManager(new LayoutManagerImpl(this)); active_workspace_ = CreateWorkspace(false); workspaces_.push_back(active_workspace_); active_workspace_->window()->Show(); Shell::GetInstance()->AddShellObserver(this); if (ash::WorkspaceCyclerConfiguration::IsCyclerEnabled()) workspace_cycler_.reset(new WorkspaceCycler(this)); } WorkspaceManager::~WorkspaceManager() { Shell::GetInstance()->RemoveShellObserver(this); // Release the windows, they'll be destroyed when |contents_window_| is // destroyed. std::for_each(workspaces_.begin(), workspaces_.end(), std::mem_fun(&Workspace::ReleaseWindow)); std::for_each(pending_workspaces_.begin(), pending_workspaces_.end(), std::mem_fun(&Workspace::ReleaseWindow)); std::for_each(to_delete_.begin(), to_delete_.end(), std::mem_fun(&Workspace::ReleaseWindow)); STLDeleteElements(&workspaces_); STLDeleteElements(&pending_workspaces_); STLDeleteElements(&to_delete_); } // static bool WorkspaceManager::IsMaximized(Window* window) { return IsMaximizedState(window->GetProperty(aura::client::kShowStateKey)); } // static bool WorkspaceManager::IsMaximizedState(ui::WindowShowState state) { return state == ui::SHOW_STATE_MAXIMIZED || state == ui::SHOW_STATE_FULLSCREEN; } // static bool WorkspaceManager::WillRestoreMaximized(Window* window) { return wm::IsWindowMinimized(window) && IsMaximizedState(window->GetProperty(aura::client::kRestoreShowStateKey)); } WorkspaceWindowState WorkspaceManager::GetWindowState() const { if (!shelf_) return WORKSPACE_WINDOW_STATE_DEFAULT; const bool is_active_maximized = active_workspace_->is_maximized(); const gfx::Rect shelf_bounds(shelf_->GetIdealBounds()); const Window::Windows& windows(active_workspace_->window()->children()); bool window_overlaps_launcher = false; bool has_maximized_window = false; for (Window::Windows::const_iterator i = windows.begin(); i != windows.end(); ++i) { if (GetIgnoredByShelf(*i)) continue; ui::Layer* layer = (*i)->layer(); if (!layer->GetTargetVisibility() || layer->GetTargetOpacity() == 0.0f) continue; // Ignore maximized/fullscreen windows if we're in the desktop. Such a state // is transitory and means we haven't yet switched. If we did consider such // windows we'll return the wrong thing, which can lead to prematurely // changing the launcher state and clobbering restore bounds. if (is_active_maximized) { if (wm::IsWindowMaximized(*i)) { // An untracked window may still be fullscreen so we keep iterating when // we hit a maximized window. has_maximized_window = true; } else if (wm::IsWindowFullscreen(*i)) { return WORKSPACE_WINDOW_STATE_FULL_SCREEN; } } if (!window_overlaps_launcher && (*i)->bounds().Intersects(shelf_bounds)) window_overlaps_launcher = true; } if (has_maximized_window) return WORKSPACE_WINDOW_STATE_MAXIMIZED; return window_overlaps_launcher ? WORKSPACE_WINDOW_STATE_WINDOW_OVERLAPS_SHELF : WORKSPACE_WINDOW_STATE_DEFAULT; } void WorkspaceManager::SetShelf(ShelfLayoutManager* shelf) { shelf_ = shelf; } void WorkspaceManager::SetActiveWorkspaceByWindow(Window* window) { Workspace* workspace = FindBy(window); if (!workspace) return; if (workspace != active_workspace_) { // A window is being made active. In the following cases we reparent to // the active desktop: // . The window is not tracked by workspace code. This is used for tab // dragging. Since tab dragging needs to happen in the active workspace we // have to reparent the window (otherwise the window you dragged the tab // out of would disappear since the workspace changed). Since this case is // only transiently used (property reset on input release) we don't worry // about window state. In fact we can't consider window state here as we // have to allow dragging of a maximized window to work in this case. // . The window persists across all workspaces. For example, the task // manager is in the desktop worskpace and the current workspace is // maximized. If we swapped to the desktop you would lose context. Instead // we reparent. The exception to this is if the window is maximized (it // needs its own workspace then) or we're in the process of maximizing. If // we're in the process of maximizing the window needs its own workspace. if (!GetTrackedByWorkspace(window) || (GetPersistsAcrossAllWorkspaces(window) && !IsMaximized(window) && !(wm::IsWindowMinimized(window) && WillRestoreMaximized(window)))) { ReparentWindow(window, active_workspace_->window(), NULL); } else { SetActiveWorkspace(workspace, SWITCH_WINDOW_MADE_ACTIVE, base::TimeDelta()); } } if (workspace->is_maximized() && IsMaximized(window)) { // Clicking on the maximized window in a maximized workspace. Force all // other windows to drop to the desktop. MoveChildrenToDesktop(workspace->window(), NULL); } } Window* WorkspaceManager::GetActiveWorkspaceWindow() { return active_workspace_->window(); } Window* WorkspaceManager::GetParentForNewWindow(Window* window) { // Try to put windows with transient parents in the same workspace as their // transient parent. if (window->transient_parent() && !IsMaximized(window)) { Workspace* workspace = FindBy(window->transient_parent()); if (workspace) return workspace->window(); // Fall through to normal logic. } if (!GetTrackedByWorkspace(window)) return active_workspace_->window(); if (IsMaximized(window)) { // Wait for the window to be made active before showing the workspace. Workspace* workspace = CreateWorkspace(true); pending_workspaces_.insert(workspace); return workspace->window(); } if (!GetTrackedByWorkspace(window) || GetPersistsAcrossAllWorkspaces(window)) return active_workspace_->window(); return desktop_workspace()->window(); } bool WorkspaceManager::CanStartCyclingThroughWorkspaces() const { return workspace_cycler_.get() && workspaces_.size() > 1u; } void WorkspaceManager::InitWorkspaceCyclerAnimatorWithCurrentState( WorkspaceCyclerAnimator* animator) { if (animator) animator->Init(workspaces_, active_workspace_); } void WorkspaceManager::SetActiveWorkspaceFromCycler(Workspace* workspace) { if (!workspace || workspace == active_workspace_) return; SetActiveWorkspace(workspace, SWITCH_WORKSPACE_CYCLER, base::TimeDelta()); // Activate the topmost window in the newly activated workspace as // SetActiveWorkspace() does not do so. aura::Window* topmost_activatable_window = workspace->GetTopmostActivatableWindow(); if (topmost_activatable_window) wm::ActivateWindow(topmost_activatable_window); } void WorkspaceManager::DoInitialAnimation() { if (active_workspace_->is_maximized()) { RootWindowController* root_controller = GetRootWindowController( contents_window_->GetRootWindow()); if (root_controller) { aura::Window* background = root_controller->GetContainer( kShellWindowId_DesktopBackgroundContainer); background->Show(); ShowOrHideDesktopBackground(background, SWITCH_INITIAL, base::TimeDelta(), false); } } ShowWorkspace(active_workspace_, active_workspace_, SWITCH_INITIAL); } void WorkspaceManager::OnAppTerminating() { app_terminating_ = true; } void WorkspaceManager::UpdateShelfVisibility() { if (shelf_) shelf_->UpdateVisibilityState(); } Workspace* WorkspaceManager::FindBy(Window* window) const { while (window) { Workspace* workspace = window->GetProperty(kWorkspaceKey); if (workspace) return workspace; window = window->parent(); } return NULL; } void WorkspaceManager::SetActiveWorkspace(Workspace* workspace, SwitchReason reason, base::TimeDelta duration) { DCHECK(workspace); if (active_workspace_ == workspace) return; // It is possible for a user to use accelerator keys to restore windows etc // while the user is cycling through workspaces. if (workspace_cycler_) workspace_cycler_->AbortCycling(); pending_workspaces_.erase(workspace); // Adjust the z-order. No need to adjust the z-order for the desktop since // it always stays at the bottom. if (workspace != desktop_workspace() && FindWorkspace(workspace) == workspaces_.end()) { contents_window_->StackChildAbove(workspace->window(), workspaces_.back()->window()); workspaces_.push_back(workspace); } Workspace* last_active = active_workspace_; active_workspace_ = workspace; // The display work-area may have changed while |workspace| was not the active // workspace. Give it a chance to adjust its state for the new work-area. active_workspace_->workspace_layout_manager()-> OnDisplayWorkAreaInsetsChanged(); const bool is_unminimizing_maximized_window = unminimizing_workspace_ && unminimizing_workspace_ == active_workspace_ && active_workspace_->is_maximized(); if (is_unminimizing_maximized_window) { // If we're unminimizing a window it needs to be on the top, otherwise you // won't see the animation. contents_window_->StackChildAtTop(active_workspace_->window()); } else if (active_workspace_->is_maximized() && last_active->is_maximized() && reason != SWITCH_MAXIMIZED_FROM_MAXIMIZED_WORKSPACE) { // When switching between maximized windows we need the last active // workspace on top of the new, otherwise the animations won't look // right. Since only one workspace is visible at a time stacking order of // the workspace windows ultimately doesn't matter. contents_window_->StackChildAtTop(last_active->window()); } UpdateShelfVisibility(); // NOTE: duration supplied to this method is only used for desktop background. HideWorkspace(last_active, reason, is_unminimizing_maximized_window); ShowWorkspace(workspace, last_active, reason); RootWindowController* root_controller = GetRootWindowController( contents_window_->GetRootWindow()); if (root_controller) { aura::Window* background = root_controller->GetContainer( kShellWindowId_DesktopBackgroundContainer); if (last_active == desktop_workspace()) { ShowOrHideDesktopBackground(background, reason, duration, false); } else if (active_workspace_ == desktop_workspace() && !app_terminating_) { ShowOrHideDesktopBackground(background, reason, duration, true); } } // Showing or hiding a workspace may change the "solo window" status of // a window, requiring the header to be updated. FramePainter::UpdateSoloWindowHeader(contents_window_->GetRootWindow()); } WorkspaceManager::Workspaces::iterator WorkspaceManager::FindWorkspace(Workspace* workspace) { return std::find(workspaces_.begin(), workspaces_.end(), workspace); } Workspace* WorkspaceManager::CreateWorkspace(bool maximized) { return new Workspace(this, contents_window_, maximized); } void WorkspaceManager::MoveWorkspaceToPendingOrDelete( Workspace* workspace, Window* stack_beneath, SwitchReason reason) { // We're all ready moving windows. if (in_move_) return; DCHECK_NE(desktop_workspace(), workspace); // The user may have closed or minimized a window via accelerator keys while // cycling through workspaces. if (workspace_cycler_) workspace_cycler_->AbortCycling(); if (workspace == active_workspace_) SelectNextWorkspace(reason); base::AutoReset<bool> setter(&in_move_, true); MoveChildrenToDesktop(workspace->window(), stack_beneath); { Workspaces::iterator workspace_i(FindWorkspace(workspace)); if (workspace_i != workspaces_.end()) workspaces_.erase(workspace_i); } if (workspace->window()->children().empty()) { if (workspace == unminimizing_workspace_) unminimizing_workspace_ = NULL; pending_workspaces_.erase(workspace); ScheduleDelete(workspace); } else { pending_workspaces_.insert(workspace); } } void WorkspaceManager::MoveChildrenToDesktop(aura::Window* window, aura::Window* stack_beneath) { // Build the list of windows to move. Exclude maximized/fullscreen and windows // with transient parents. Window::Windows to_move; for (size_t i = 0; i < window->children().size(); ++i) { Window* child = window->children()[i]; if (!child->transient_parent() && !IsMaximized(child) && !WillRestoreMaximized(child)) { to_move.push_back(child); } } // Move the windows, but make sure the window is still a child of |window| // (moving may cascade and cause other windows to move). for (size_t i = 0; i < to_move.size(); ++i) { if (std::find(window->children().begin(), window->children().end(), to_move[i]) != window->children().end()) { ReparentWindow(to_move[i], desktop_workspace()->window(), stack_beneath); } } } void WorkspaceManager::SelectNextWorkspace(SwitchReason reason) { DCHECK_NE(active_workspace_, desktop_workspace()); Workspaces::const_iterator workspace_i(FindWorkspace(active_workspace_)); Workspaces::const_iterator next_workspace_i(workspace_i + 1); if (next_workspace_i != workspaces_.end()) SetActiveWorkspace(*next_workspace_i, reason, base::TimeDelta()); else SetActiveWorkspace(*(workspace_i - 1), reason, base::TimeDelta()); } void WorkspaceManager::ScheduleDelete(Workspace* workspace) { to_delete_.insert(workspace); delete_timer_.Stop(); delete_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(1), this, &WorkspaceManager::ProcessDeletion); } void WorkspaceManager::SetUnminimizingWorkspace(Workspace* workspace) { // The normal sequence of unminimizing a window is: Show() the window, which // triggers changing the kShowStateKey to NORMAL and lastly the window is made // active. This means at the time the window is unminimized we don't know if // the workspace it is in is going to become active. To track this // |unminimizing_workspace_| is set at the time we unminimize and a task is // schedule to reset it. This way when we get the activate we know we're in // the process unminimizing and can do the right animation. unminimizing_workspace_ = workspace; if (unminimizing_workspace_) { base::MessageLoop::current()->PostTask( FROM_HERE, base::Bind(&WorkspaceManager::SetUnminimizingWorkspace, clear_unminimizing_workspace_factory_.GetWeakPtr(), static_cast<Workspace*>(NULL))); } } void WorkspaceManager::FadeDesktop(aura::Window* window, base::TimeDelta duration) { if (views::corewm::WindowAnimationsDisabled(NULL) || ui::ScopedAnimationDurationScaleMode::duration_scale_mode() == ui::ScopedAnimationDurationScaleMode::ZERO_DURATION) return; base::AutoReset<bool> reseter(&creating_fade_, true); DesktopBackgroundFadeController::Direction direction; aura::Window* parent = NULL; aura::Window* stack_above = NULL; if (active_workspace_ == desktop_workspace()) { direction = DesktopBackgroundFadeController::FADE_IN; parent = desktop_workspace()->window(); stack_above = window; } else { direction = DesktopBackgroundFadeController::FADE_OUT; parent = contents_window_; stack_above = desktop_workspace()->window(); DCHECK_EQ(kCrossFadeSwitchTimeMS, static_cast<int>(duration.InMilliseconds())); duration = base::TimeDelta::FromMilliseconds(kCrossFadeSwitchTimeMS); } desktop_fade_controller_.reset( new DesktopBackgroundFadeController( parent, stack_above, duration, direction)); } void WorkspaceManager::ShowOrHideDesktopBackground( aura::Window* window, SwitchReason reason, base::TimeDelta duration, bool show) const { WorkspaceAnimationDetails details; details.direction = show ? WORKSPACE_ANIMATE_UP : WORKSPACE_ANIMATE_DOWN; details.duration = duration; switch (reason) { case SWITCH_WORKSPACE_CYCLER: // The workspace cycler has already animated the desktop background's // opacity. Do not do any further animation. break; case SWITCH_MAXIMIZED_FROM_MAXIMIZED_WORKSPACE: case SWITCH_MAXIMIZED_OR_RESTORED: // FadeDesktop() fades the desktop background by animating the opacity of // a black window immediately above the desktop background. Set the // workspace as animated to delay hiding the desktop background by // |duration|. details.animate = true; break; case SWITCH_INITIAL: details.animate = true; details.animate_scale = true; details.pause_time_ms = kInitialPauseTimeMS; break; default: details.animate = true; details.animate_scale = true; break; } if (show) ash::internal::ShowWorkspace(window, details); else ash::internal::HideWorkspace(window, details); } void WorkspaceManager::ShowWorkspace( Workspace* workspace, Workspace* last_active, SwitchReason reason) const { WorkspaceAnimationDetails details; details.direction = (last_active == desktop_workspace() || reason == SWITCH_INITIAL) ? WORKSPACE_ANIMATE_DOWN : WORKSPACE_ANIMATE_UP; switch (reason) { case SWITCH_WINDOW_MADE_ACTIVE: case SWITCH_TRACKED_BY_WORKSPACE_CHANGED: case SWITCH_WINDOW_REMOVED: case SWITCH_VISIBILITY_CHANGED: case SWITCH_MINIMIZED: details.animate = details.animate_scale = true; details.animate_opacity = last_active == desktop_workspace(); break; case SWITCH_INITIAL: details.animate = details.animate_opacity = details.animate_scale = true; details.pause_time_ms = kInitialPauseTimeMS; break; // Remaining cases require no animation. default: break; } ash::internal::ShowWorkspace(workspace->window(), details); } void WorkspaceManager::HideWorkspace( Workspace* workspace, SwitchReason reason, bool is_unminimizing_maximized_window) const { WorkspaceAnimationDetails details; details.direction = active_workspace_ == desktop_workspace() ? WORKSPACE_ANIMATE_UP : WORKSPACE_ANIMATE_DOWN; switch (reason) { case SWITCH_WINDOW_MADE_ACTIVE: case SWITCH_TRACKED_BY_WORKSPACE_CHANGED: details.animate_opacity = ((active_workspace_ == desktop_workspace() || workspace != desktop_workspace()) && !is_unminimizing_maximized_window); details.animate_scale = true; details.animate = true; break; case SWITCH_VISIBILITY_CHANGED: // The window is most likely closing. Make the workspace visible for the // duration of the switch so that the close animation is visible. details.animate = true; details.animate_scale = true; break; case SWITCH_MAXIMIZED_FROM_MAXIMIZED_WORKSPACE: case SWITCH_MAXIMIZED_OR_RESTORED: if (active_workspace_->is_maximized()) { // Delay the hide until the animation is done. details.duration = base::TimeDelta::FromMilliseconds(kCrossFadeSwitchTimeMS); details.animate = true; } break; // Remaining cases require no animation. default: break; } ash::internal::HideWorkspace(workspace->window(), details); } void WorkspaceManager::ProcessDeletion() { std::set<Workspace*> to_delete; to_delete.swap(to_delete_); for (std::set<Workspace*>::iterator i = to_delete.begin(); i != to_delete.end(); ++i) { Workspace* workspace = *i; if (workspace->window()->layer()->children().empty()) { delete workspace->ReleaseWindow(); delete workspace; } else { to_delete_.insert(workspace); } } if (!to_delete_.empty()) { delete_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(1), this, &WorkspaceManager::ProcessDeletion); } } void WorkspaceManager::OnWindowAddedToWorkspace(Workspace* workspace, Window* child) { child->SetProperty(kWorkspaceKey, workspace); // Don't make changes to window parenting as the right parent was chosen // by way of GetParentForNewWindow() or we explicitly moved the window // to the workspace. if (workspace == active_workspace_) { UpdateShelfVisibility(); FramePainter::UpdateSoloWindowHeader(child->GetRootWindow()); } RearrangeVisibleWindowOnShow(child); } void WorkspaceManager::OnWillRemoveWindowFromWorkspace(Workspace* workspace, Window* child) { if (child->TargetVisibility()) RearrangeVisibleWindowOnHideOrRemove(child); child->ClearProperty(kWorkspaceKey); } void WorkspaceManager::OnWindowRemovedFromWorkspace(Workspace* workspace, Window* child) { if (workspace->ShouldMoveToPending()) MoveWorkspaceToPendingOrDelete(workspace, NULL, SWITCH_WINDOW_REMOVED); UpdateShelfVisibility(); } void WorkspaceManager::OnWorkspaceChildWindowVisibilityChanged( Workspace* workspace, Window* child) { if (workspace->ShouldMoveToPending()) { MoveWorkspaceToPendingOrDelete(workspace, NULL, SWITCH_VISIBILITY_CHANGED); } else { if (child->TargetVisibility()) RearrangeVisibleWindowOnShow(child); else RearrangeVisibleWindowOnHideOrRemove(child); if (workspace == active_workspace_) { UpdateShelfVisibility(); FramePainter::UpdateSoloWindowHeader(child->GetRootWindow()); } } } void WorkspaceManager::OnWorkspaceWindowChildBoundsChanged( Workspace* workspace, Window* child) { if (workspace == active_workspace_) UpdateShelfVisibility(); } void WorkspaceManager::OnWorkspaceWindowShowStateChanged( Workspace* workspace, Window* child, ui::WindowShowState last_show_state, ui::Layer* old_layer) { // |child| better still be in |workspace| else things have gone wrong. DCHECK_EQ(workspace, child->GetProperty(kWorkspaceKey)); if (wm::IsWindowMinimized(child)) { if (workspace->ShouldMoveToPending()) MoveWorkspaceToPendingOrDelete(workspace, NULL, SWITCH_MINIMIZED); DCHECK(!old_layer); } else { // Set of cases to deal with: // . More than one maximized window: move newly maximized window into // own workspace. // . One maximized window and not in a maximized workspace: move window // into own workspace. // . No maximized window and not in desktop: move to desktop and further // any existing windows are stacked beneath |child|. const bool is_active = wm::IsActiveWindow(child); Workspace* new_workspace = NULL; const int max_count = workspace->GetNumMaximizedWindows(); base::TimeDelta duration = old_layer && !IsMaximized(child) ? GetCrossFadeDuration(old_layer->bounds(), child->bounds()) : base::TimeDelta::FromMilliseconds(kCrossFadeSwitchTimeMS); if (max_count == 0) { if (workspace != desktop_workspace()) { { base::AutoReset<bool> setter(&in_move_, true); ReparentWindow(child, desktop_workspace()->window(), NULL); } DCHECK(!is_active || old_layer); new_workspace = desktop_workspace(); SetActiveWorkspace(new_workspace, SWITCH_MAXIMIZED_OR_RESTORED, duration); MoveWorkspaceToPendingOrDelete(workspace, child, SWITCH_MAXIMIZED_OR_RESTORED); if (FindWorkspace(workspace) == workspaces_.end()) workspace = NULL; } } else if ((max_count == 1 && workspace == desktop_workspace()) || max_count > 1) { new_workspace = CreateWorkspace(true); pending_workspaces_.insert(new_workspace); ReparentWindow(child, new_workspace->window(), NULL); } if (is_active && new_workspace) { // |old_layer| may be NULL if as part of processing // WorkspaceLayoutManager::OnWindowPropertyChanged() the window is made // active. if (old_layer) { SetActiveWorkspace(new_workspace, max_count >= 2 ? SWITCH_MAXIMIZED_FROM_MAXIMIZED_WORKSPACE : SWITCH_MAXIMIZED_OR_RESTORED, duration); CrossFadeWindowBetweenWorkspaces(new_workspace->window(), child, old_layer); if (workspace == desktop_workspace() || new_workspace == desktop_workspace()) { FadeDesktop(child, duration); } } else { SetActiveWorkspace(new_workspace, SWITCH_OTHER, base::TimeDelta()); } } else { if (last_show_state == ui::SHOW_STATE_MINIMIZED) SetUnminimizingWorkspace(new_workspace ? new_workspace : workspace); DCHECK(!old_layer); } } UpdateShelfVisibility(); } void WorkspaceManager::OnTrackedByWorkspaceChanged(Workspace* workspace, aura::Window* window) { Workspace* new_workspace = NULL; if (IsMaximized(window)) { if (workspace->is_maximized() && workspace->GetNumMaximizedWindows() == 1) { // If |window| is the only window in a maximized workspace then leave // it there. Additionally animate it back to the origin. ui::ScopedLayerAnimationSettings settings(window->layer()->GetAnimator()); // All bounds changes get routed through WorkspaceLayoutManager and since // the window is maximized WorkspaceLayoutManager is going to force a // value. In other words, it doesn't matter what we supply to SetBounds() // here. window->SetBounds(gfx::Rect()); return; } new_workspace = CreateWorkspace(true); pending_workspaces_.insert(new_workspace); } else if (workspace->is_maximized()) { new_workspace = desktop_workspace(); } else { return; } // If the window is active we need to make sure the destination Workspace // window is showing. Otherwise the window will be parented to a hidden window // and lose activation. const bool is_active = wm::IsActiveWindow(window); if (is_active) new_workspace->window()->Show(); ReparentWindow(window, new_workspace->window(), NULL); if (is_active) { SetActiveWorkspace(new_workspace, SWITCH_TRACKED_BY_WORKSPACE_CHANGED, base::TimeDelta()); } } } // namespace internal } // namespace ash
loopCM/chromium
ash/wm/workspace/workspace_manager.cc
C++
bsd-3-clause
31,279
module CASA module Receiver module Strategy class AdjInStore def self.factory options = false options ? self.new(options) : nil end attr_reader :options def initialize options @options = options if @options.has_key?('class') and @options.has_key?('require') require @options['require'] class_object = @options['class'].split('::').inject(Object){|o,c| o.const_get c} @handler = class_object.new @options['options'] elsif @options.has_key?('handler') @handler = @options['handler'] end end def create payload_hash, options = nil identity = payload_hash['identity'] current = get identity if current return false if payload_hash['attributes']['timestamp'] <= current['attributes']['timestamp'] delete identity end @handler.create payload_hash, options true end def get payload_identity, options = nil @handler.get payload_identity, options end def delete payload_identity, options = nil @handler.delete payload_identity, options end def reset! options = nil @handler.reset! options end end end end end
AppSharing/casa-receiver
lib/casa/receiver/strategy/adj_in_store.rb
Ruby
bsd-3-clause
1,347
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Alessandro Tasora // ============================================================================= // // FEA advanced demo: // - loading an Abaqus tetrahedron mesh // - using it as a wheel with contacts to ground // // ============================================================================= #include "chrono/physics/ChSystemSMC.h" #include "chrono/physics/ChBodyEasy.h" #include "chrono/physics/ChLoaderUV.h" #include "chrono/physics/ChLoadContainer.h" #include "chrono/fea/ChMesh.h" #include "chrono/fea/ChMeshFileLoader.h" #include "chrono/fea/ChContactSurfaceMesh.h" #include "chrono/fea/ChContactSurfaceNodeCloud.h" #include "chrono/fea/ChVisualizationFEAmesh.h" #include "chrono/fea/ChLinkPointFrame.h" #include "chrono_pardisomkl/ChSolverPardisoMKL.h" #include "chrono_irrlicht/ChIrrApp.h" using namespace chrono; using namespace chrono::fea; using namespace chrono::irrlicht; using namespace irr; int main(int argc, char* argv[]) { GetLog() << "Copyright (c) 2017 projectchrono.org\nChrono version: " << CHRONO_VERSION << "\n\n"; // Global parameter for tire: double tire_rad = 0.8; double tire_vel_z0 = -3; ChVector<> tire_center(0, 0.02 + tire_rad, 0.5); ChMatrix33<> tire_alignment(Q_from_AngAxis(CH_C_PI, VECT_Y)); // create rotated 180° on y double tire_w0 = tire_vel_z0 / tire_rad; // Create a Chrono::Engine physical system ChSystemSMC my_system; // Create the Irrlicht visualization (open the Irrlicht device, // bind a simple user interface, etc. etc.) ChIrrApp application(&my_system, L"FEA contacts", core::dimension2d<u32>(1280, 720), VerticalDir::Y, false, true); // Easy shortcuts to add camera, lights, logo and sky in Irrlicht scene: application.AddLogo(); application.AddSkyBox(); application.AddTypicalLights(); application.AddCamera(core::vector3dfCH(ChVector<>(1, 1.4, -1.2)), core::vector3dfCH(ChVector<>(0, tire_rad, 0))); // application.SetContactsDrawMode(irr::tools::CONTACT_DISTANCES); application.AddLightWithShadow(core::vector3dfCH(ChVector<>(1.5, 5.5, -2.5)), core::vector3df(0, 0, 0), 3, 2.2, 7.2, 40, 512, video::SColorf((f32)0.8, (f32)0.8, (f32)1.0)); // // CREATE THE PHYSICAL SYSTEM // // Create the surface material, containing information // about friction etc. auto mysurfmaterial = chrono_types::make_shared<ChMaterialSurfaceSMC>(); mysurfmaterial->SetYoungModulus(10e4); mysurfmaterial->SetFriction(0.3f); mysurfmaterial->SetRestitution(0.2f); mysurfmaterial->SetAdhesion(0); auto mysurfmaterial2 = chrono_types::make_shared<ChMaterialSurfaceSMC>(); mysurfmaterial->SetYoungModulus(30e4); mysurfmaterial->SetFriction(0.3f); mysurfmaterial->SetRestitution(0.2f); mysurfmaterial->SetAdhesion(0); // RIGID BODIES // Create some rigid bodies, for instance a floor: auto mfloor = chrono_types::make_shared<ChBodyEasyBox>(2, 0.2, 6, 2700, true, true, mysurfmaterial); mfloor->SetBodyFixed(true); my_system.Add(mfloor); auto mtexture = chrono_types::make_shared<ChTexture>(); mtexture->SetTextureFilename(GetChronoDataFile("textures/concrete.jpg")); mfloor->AddAsset(mtexture); // Create a step if (false) { auto mfloor_step = chrono_types::make_shared<ChBodyEasyBox>(1, 0.2, 0.5, 2700, true, true, mysurfmaterial); mfloor_step->SetPos(ChVector<>(0, 0.1, -0.2)); mfloor_step->SetBodyFixed(true); my_system.Add(mfloor_step); } // Create some bent rectangular fixed slabs if (false) { for (int i = 0; i < 50; ++i) { auto mcube = chrono_types::make_shared<ChBodyEasyBox>(0.25, 0.2, 0.25, 2700, true, true, mysurfmaterial); ChQuaternion<> vrot; vrot.Q_from_AngAxis(ChRandom() * CH_C_2PI, VECT_Y); mcube->Move(ChCoordsys<>(VNULL, vrot)); vrot.Q_from_AngAxis((ChRandom() - 0.5) * 2 * CH_C_DEG_TO_RAD * 20, ChVector<>(ChRandom() - 0.5, 0, ChRandom() - 0.5).Normalize()); mcube->Move(ChCoordsys<>(VNULL, vrot)); mcube->SetPos(ChVector<>((ChRandom() - 0.5) * 1.8, ChRandom() * 0.1, -ChRandom() * 3.2 + 0.9)); mcube->SetBodyFixed(true); my_system.Add(mcube); auto mcubecol = chrono_types::make_shared<ChColorAsset>(); mcubecol->SetColor(ChColor(0.3f, 0.3f, 0.3f)); mcube->AddAsset(mcubecol); } } // Create some stones / obstacles on the ground if (true) { for (int i = 0; i < 150; ++i) { auto mcube = chrono_types::make_shared<ChBodyEasyBox>(0.18, 0.04, 0.18, 2700, true, true, mysurfmaterial2); ChQuaternion<> vrot; vrot.Q_from_AngAxis(ChRandom() * CH_C_2PI, VECT_Y); mcube->Move(ChCoordsys<>(VNULL, vrot)); mcube->SetPos(ChVector<>((ChRandom() - 0.5) * 1.4, ChRandom() * 0.2 + 0.05, -ChRandom() * 2.6 + 0.2)); my_system.Add(mcube); auto mcubecol = chrono_types::make_shared<ChColorAsset>(); mcubecol->SetColor(ChColor(0.3f, 0.3f, 0.3f)); mcube->AddAsset(mcubecol); } } // FINITE ELEMENT MESH // Create a mesh, that is a container for groups // of FEA elements and their referenced nodes. auto my_mesh = chrono_types::make_shared<ChMesh>(); // Create a material, that must be assigned to each solid element in the mesh, // and set its parameters auto mmaterial = chrono_types::make_shared<ChContinuumElastic>(); mmaterial->Set_E(0.016e9); // rubber 0.01e9, steel 200e9 mmaterial->Set_v(0.4); mmaterial->Set_RayleighDampingK(0.004); mmaterial->Set_density(1000); // Load an ABAQUS .INP tetrahedron mesh file from disk, defining a tetrahedron mesh. // Note that not all features of INP files are supported. Also, quadratic tetrahedrons are promoted to linear. // This is much easier than creating all nodes and elements via C++ programming. // Ex. you can generate these .INP files using Abaqus or exporting from the SolidWorks simulation tool. std::map<std::string, std::vector<std::shared_ptr<ChNodeFEAbase>>> node_sets; try { ChMeshFileLoader::FromAbaqusFile(my_mesh, GetChronoDataFile("models/tractor_wheel/tractor_wheel_coarse.INP").c_str(), mmaterial, node_sets, tire_center, tire_alignment); } catch (ChException myerr) { GetLog() << myerr.what(); return 0; } // Create the contact surface(s). // In this case it is a ChContactSurfaceNodeCloud, so just pass // all nodes to it. auto mcontactsurf = chrono_types::make_shared<ChContactSurfaceNodeCloud>(mysurfmaterial); my_mesh->AddContactSurface(mcontactsurf); mcontactsurf->AddAllNodes(); // Apply initial speed and angular speed for (unsigned int i = 0; i < my_mesh->GetNnodes(); ++i) { ChVector<> node_pos = std::dynamic_pointer_cast<ChNodeFEAxyz>(my_mesh->GetNode(i))->GetPos(); ChVector<> tang_vel = Vcross(ChVector<>(tire_w0, 0, 0), node_pos - tire_center); std::dynamic_pointer_cast<ChNodeFEAxyz>(my_mesh->GetNode(i)) ->SetPos_dt(ChVector<>(0, 0, tire_vel_z0) + tang_vel); } // Remember to add the mesh to the system! my_system.Add(my_mesh); // Add a rim auto mwheel_rim = chrono_types::make_shared<ChBody>(); mwheel_rim->SetMass(80); mwheel_rim->SetInertiaXX(ChVector<>(60, 60, 60)); mwheel_rim->SetPos(tire_center); mwheel_rim->SetRot(tire_alignment); mwheel_rim->SetPos_dt(ChVector<>(0, 0, tire_vel_z0)); mwheel_rim->SetWvel_par(ChVector<>(tire_w0, 0, 0)); application.GetSystem()->Add(mwheel_rim); auto mobjmesh = chrono_types::make_shared<ChObjShapeFile>(); mobjmesh->SetFilename(GetChronoDataFile("models/tractor_wheel/tractor_wheel_rim.obj")); mwheel_rim->AddAsset(mobjmesh); // Connect rim and tire using constraints. // the BC_RIMTIRE nodeset, in the Abaqus INP file, lists the nodes involved auto nodeset_sel = "BC_RIMTIRE"; for (auto i = 0; i < node_sets.at(nodeset_sel).size(); ++i) { auto mlink = chrono_types::make_shared<ChLinkPointFrame>(); mlink->Initialize(std::dynamic_pointer_cast<ChNodeFEAxyz>(node_sets[nodeset_sel][i]), mwheel_rim); my_system.Add(mlink); } // Create a mesh surface, for applying loads: auto mmeshsurf = chrono_types::make_shared<ChMeshSurface>(); my_mesh->AddMeshSurface(mmeshsurf); // Nodes of the load surface are those of the nodeset with label BC_SURF: nodeset_sel = "BC_SURF"; mmeshsurf->AddFacesFromNodeSet(node_sets[nodeset_sel]); // Apply load to all surfaces in the mesh surface auto mloadcontainer = chrono_types::make_shared<ChLoadContainer>(); my_system.Add(mloadcontainer); for (auto i = 0; i < mmeshsurf->GetFacesList().size(); ++i) { auto aface = std::shared_ptr<ChLoadableUV>(mmeshsurf->GetFacesList()[i]); auto faceload = chrono_types::make_shared<ChLoad<ChLoaderPressure>>(aface); faceload->loader.SetPressure(10000); // low pressure... the tire has no ply! mloadcontainer->Add(faceload); } // // Optional... visualization // // ==Asset== attach a visualization of the FEM mesh. // This will automatically update a triangle mesh (a ChTriangleMeshShape // asset that is internally managed) by setting proper // coordinates and vertex colors as in the FEM elements. // Such triangle mesh can be rendered by Irrlicht or POVray or whatever // postprocessor that can handle a colored ChTriangleMeshShape). // Do not forget AddAsset() at the end! auto mvisualizemesh = chrono_types::make_shared<ChVisualizationFEAmesh>(*(my_mesh.get())); mvisualizemesh->SetFEMdataType(ChVisualizationFEAmesh::E_PLOT_NODE_SPEED_NORM); mvisualizemesh->SetColorscaleMinMax(0.0, 10); mvisualizemesh->SetSmoothFaces(true); my_mesh->AddAsset(mvisualizemesh); /* auto mvisualizemeshB = chrono_types::make_shared<ChVisualizationFEAmesh>(*(my_mesh.get())); mvisualizemeshB->SetFEMdataType(ChVisualizationFEAmesh::E_PLOT_SURFACE); mvisualizemeshB->SetWireframe(true); my_mesh->AddAsset(mvisualizemeshB); */ /* auto mvisualizemeshC = chrono_types::make_shared<ChVisualizationFEAmesh>(*(my_mesh.get())); mvisualizemeshC->SetFEMglyphType(ChVisualizationFEAmesh::E_GLYPH_NODE_DOT_POS); mvisualizemeshC->SetFEMdataType(ChVisualizationFEAmesh::E_PLOT_NONE); mvisualizemeshC->SetSymbolsThickness(0.006); my_mesh->AddAsset(mvisualizemeshC); */ // ==IMPORTANT!== Use this function for adding a ChIrrNodeAsset to all items // in the system. These ChIrrNodeAsset assets are 'proxies' to the Irrlicht meshes. // If you need a finer control on which item really needs a visualization proxy in // Irrlicht, just use application.AssetBind(myitem); on a per-item basis. application.AssetBindAll(); // ==IMPORTANT!== Use this function for 'converting' into Irrlicht meshes the assets // that you added to the bodies into 3D shapes, they can be visualized by Irrlicht! application.AssetUpdateAll(); // Use shadows in realtime view application.AddShadowAll(); // // SIMULATION LOOP // // Change solver to Pardiso from Chrono::PardisoMKL auto mkl_solver = chrono_types::make_shared<ChSolverPardisoMKL>(); mkl_solver->LockSparsityPattern(true); my_system.SetSolver(mkl_solver); my_system.Update(); // Change type of integrator: my_system.SetTimestepperType(ChTimestepper::Type::EULER_IMPLICIT_LINEARIZED); // fast, less precise // my_system.SetTimestepperType(chrono::ChTimestepper::Type::HHT); // precise,slower, might iterate each step // if later you want to change integrator settings: if (auto mystepper = std::dynamic_pointer_cast<ChTimestepperHHT>(my_system.GetTimestepper())) { mystepper->SetAlpha(-0.2); mystepper->SetMaxiters(2); mystepper->SetAbsTolerances(1e-6); } application.SetTimestep(0.001); while (application.GetDevice()->run()) { application.BeginScene(); application.DrawAll(); application.DoStep(); application.EndScene(); } return 0; }
projectchrono/chrono
src/demos/fea/demo_FEA_abaqus_wheel.cpp
C++
bsd-3-clause
12,980
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.protobuf.util; import com.google.common.io.BaseEncoding; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonIOException; import com.google.gson.JsonNull; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import com.google.gson.stream.JsonReader; import com.google.protobuf.Any; import com.google.protobuf.BoolValue; import com.google.protobuf.ByteString; import com.google.protobuf.BytesValue; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.EnumDescriptor; import com.google.protobuf.Descriptors.EnumValueDescriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.Descriptors.FileDescriptor; import com.google.protobuf.Descriptors.OneofDescriptor; import com.google.protobuf.DoubleValue; import com.google.protobuf.Duration; import com.google.protobuf.DynamicMessage; import com.google.protobuf.FieldMask; import com.google.protobuf.FloatValue; import com.google.protobuf.Int32Value; import com.google.protobuf.Int64Value; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.ListValue; import com.google.protobuf.Message; import com.google.protobuf.MessageOrBuilder; import com.google.protobuf.NullValue; import com.google.protobuf.StringValue; import com.google.protobuf.Struct; import com.google.protobuf.Timestamp; import com.google.protobuf.UInt32Value; import com.google.protobuf.UInt64Value; import com.google.protobuf.Value; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.math.BigInteger; import java.text.ParseException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.logging.Logger; /** * Utility classes to convert protobuf messages to/from JSON format. The JSON * format follows Proto3 JSON specification and only proto3 features are * supported. Proto2 only features (e.g., extensions and unknown fields) will * be discarded in the conversion. That is, when converting proto2 messages * to JSON format, extensions and unknown fields will be treated as if they * do not exist. This applies to proto2 messages embedded in proto3 messages * as well. */ public class JsonFormat { private static final Logger logger = Logger.getLogger(JsonFormat.class.getName()); private JsonFormat() {} /** * Creates a {@link Printer} with default configurations. */ public static Printer printer() { return new Printer(TypeRegistry.getEmptyTypeRegistry(), false, false, false); } /** * A Printer converts protobuf message to JSON format. */ public static class Printer { private final TypeRegistry registry; private final boolean includingDefaultValueFields; private final boolean preservingProtoFieldNames; private final boolean omittingInsignificantWhitespace; private Printer( TypeRegistry registry, boolean includingDefaultValueFields, boolean preservingProtoFieldNames, boolean omittingInsignificantWhitespace) { this.registry = registry; this.includingDefaultValueFields = includingDefaultValueFields; this.preservingProtoFieldNames = preservingProtoFieldNames; this.omittingInsignificantWhitespace = omittingInsignificantWhitespace; } /** * Creates a new {@link Printer} using the given registry. The new Printer * clones all other configurations from the current {@link Printer}. * * @throws IllegalArgumentException if a registry is already set. */ public Printer usingTypeRegistry(TypeRegistry registry) { if (this.registry != TypeRegistry.getEmptyTypeRegistry()) { throw new IllegalArgumentException("Only one registry is allowed."); } return new Printer( registry, includingDefaultValueFields, preservingProtoFieldNames, omittingInsignificantWhitespace); } /** * Creates a new {@link Printer} that will also print fields set to their * defaults. Empty repeated fields and map fields will be printed as well. * The new Printer clones all other configurations from the current * {@link Printer}. */ public Printer includingDefaultValueFields() { return new Printer( registry, true, preservingProtoFieldNames, omittingInsignificantWhitespace); } /** * Creates a new {@link Printer} that is configured to use the original proto * field names as defined in the .proto file rather than converting them to * lowerCamelCase. The new Printer clones all other configurations from the * current {@link Printer}. */ public Printer preservingProtoFieldNames() { return new Printer( registry, includingDefaultValueFields, true, omittingInsignificantWhitespace); } /** * Create a new {@link Printer} that will omit all insignificant whitespace * in the JSON output. This new Printer clones all other configurations from the * current Printer. Insignificant whitespace is defined by the JSON spec as whitespace * that appear between JSON structural elements: * <pre> * ws = *( * %x20 / ; Space * %x09 / ; Horizontal tab * %x0A / ; Line feed or New line * %x0D ) ; Carriage return * </pre> * See <a href="https://tools.ietf.org/html/rfc7159">https://tools.ietf.org/html/rfc7159</a> * current {@link Printer}. */ public Printer omittingInsignificantWhitespace() { return new Printer(registry, includingDefaultValueFields, preservingProtoFieldNames, true); } /** * Converts a protobuf message to JSON format. * * @throws InvalidProtocolBufferException if the message contains Any types * that can't be resolved. * @throws IOException if writing to the output fails. */ public void appendTo(MessageOrBuilder message, Appendable output) throws IOException { // TODO(xiaofeng): Investigate the allocation overhead and optimize for // mobile. new PrinterImpl( registry, includingDefaultValueFields, preservingProtoFieldNames, output, omittingInsignificantWhitespace) .print(message); } /** * Converts a protobuf message to JSON format. Throws exceptions if there * are unknown Any types in the message. */ public String print(MessageOrBuilder message) throws InvalidProtocolBufferException { try { StringBuilder builder = new StringBuilder(); appendTo(message, builder); return builder.toString(); } catch (InvalidProtocolBufferException e) { throw e; } catch (IOException e) { // Unexpected IOException. throw new IllegalStateException(e); } } } /** * Creates a {@link Parser} with default configuration. */ public static Parser parser() { return new Parser(TypeRegistry.getEmptyTypeRegistry(), false, Parser.DEFAULT_RECURSION_LIMIT); } /** * A Parser parses JSON to protobuf message. */ public static class Parser { private final TypeRegistry registry; private final boolean ignoringUnknownFields; private final int recursionLimit; // The default parsing recursion limit is aligned with the proto binary parser. private static final int DEFAULT_RECURSION_LIMIT = 100; private Parser(TypeRegistry registry, boolean ignoreUnknownFields, int recursionLimit) { this.registry = registry; this.ignoringUnknownFields = ignoreUnknownFields; this.recursionLimit = recursionLimit; } /** * Creates a new {@link Parser} using the given registry. The new Parser * clones all other configurations from this Parser. * * @throws IllegalArgumentException if a registry is already set. */ public Parser usingTypeRegistry(TypeRegistry registry) { if (this.registry != TypeRegistry.getEmptyTypeRegistry()) { throw new IllegalArgumentException("Only one registry is allowed."); } return new Parser(registry, ignoringUnknownFields, recursionLimit); } /** * Creates a new {@link Parser} configured to not throw an exception when an unknown field is * encountered. The new Parser clones all other configurations from this Parser. */ public Parser ignoringUnknownFields() { return new Parser(this.registry, true, recursionLimit); } /** * Parses from JSON into a protobuf message. * * @throws InvalidProtocolBufferException if the input is not valid JSON * format or there are unknown fields in the input. */ public void merge(String json, Message.Builder builder) throws InvalidProtocolBufferException { // TODO(xiaofeng): Investigate the allocation overhead and optimize for // mobile. new ParserImpl(registry, ignoringUnknownFields, recursionLimit).merge(json, builder); } /** * Parses from JSON into a protobuf message. * * @throws InvalidProtocolBufferException if the input is not valid JSON * format or there are unknown fields in the input. * @throws IOException if reading from the input throws. */ public void merge(Reader json, Message.Builder builder) throws IOException { // TODO(xiaofeng): Investigate the allocation overhead and optimize for // mobile. new ParserImpl(registry, ignoringUnknownFields, recursionLimit).merge(json, builder); } // For testing only. Parser usingRecursionLimit(int recursionLimit) { return new Parser(registry, ignoringUnknownFields, recursionLimit); } } /** * A TypeRegistry is used to resolve Any messages in the JSON conversion. * You must provide a TypeRegistry containing all message types used in * Any message fields, or the JSON conversion will fail because data * in Any message fields is unrecognizable. You don't need to supply a * TypeRegistry if you don't use Any message fields. */ public static class TypeRegistry { private static class EmptyTypeRegistryHolder { private static final TypeRegistry EMPTY = new TypeRegistry(Collections.<String, Descriptor>emptyMap()); } public static TypeRegistry getEmptyTypeRegistry() { return EmptyTypeRegistryHolder.EMPTY; } public static Builder newBuilder() { return new Builder(); } /** * Find a type by its full name. Returns null if it cannot be found in * this {@link TypeRegistry}. */ public Descriptor find(String name) { return types.get(name); } private final Map<String, Descriptor> types; private TypeRegistry(Map<String, Descriptor> types) { this.types = types; } /** * A Builder is used to build {@link TypeRegistry}. */ public static class Builder { private Builder() {} /** * Adds a message type and all types defined in the same .proto file as * well as all transitively imported .proto files to this {@link Builder}. */ public Builder add(Descriptor messageType) { if (types == null) { throw new IllegalStateException("A TypeRegistry.Builer can only be used once."); } addFile(messageType.getFile()); return this; } /** * Adds message types and all types defined in the same .proto file as * well as all transitively imported .proto files to this {@link Builder}. */ public Builder add(Iterable<Descriptor> messageTypes) { if (types == null) { throw new IllegalStateException("A TypeRegistry.Builer can only be used once."); } for (Descriptor type : messageTypes) { addFile(type.getFile()); } return this; } /** * Builds a {@link TypeRegistry}. This method can only be called once for * one Builder. */ public TypeRegistry build() { TypeRegistry result = new TypeRegistry(types); // Make sure the built {@link TypeRegistry} is immutable. types = null; return result; } private void addFile(FileDescriptor file) { // Skip the file if it's already added. if (!files.add(file.getFullName())) { return; } for (FileDescriptor dependency : file.getDependencies()) { addFile(dependency); } for (Descriptor message : file.getMessageTypes()) { addMessage(message); } } private void addMessage(Descriptor message) { for (Descriptor nestedType : message.getNestedTypes()) { addMessage(nestedType); } if (types.containsKey(message.getFullName())) { logger.warning("Type " + message.getFullName() + " is added multiple times."); return; } types.put(message.getFullName(), message); } private final Set<String> files = new HashSet<String>(); private Map<String, Descriptor> types = new HashMap<String, Descriptor>(); } } /** * An interface for json formatting that can be used in * combination with the omittingInsignificantWhitespace() method */ interface TextGenerator { void indent(); void outdent(); void print(final CharSequence text) throws IOException; } /** * Format the json without indentation */ private static final class CompactTextGenerator implements TextGenerator { private final Appendable output; private CompactTextGenerator(final Appendable output) { this.output = output; } /** * ignored by compact printer */ public void indent() {} /** * ignored by compact printer */ public void outdent() {} /** * Print text to the output stream. */ public void print(final CharSequence text) throws IOException { output.append(text); } } /** * A TextGenerator adds indentation when writing formatted text. */ private static final class PrettyTextGenerator implements TextGenerator { private final Appendable output; private final StringBuilder indent = new StringBuilder(); private boolean atStartOfLine = true; private PrettyTextGenerator(final Appendable output) { this.output = output; } /** * Indent text by two spaces. After calling Indent(), two spaces will be * inserted at the beginning of each line of text. Indent() may be called * multiple times to produce deeper indents. */ public void indent() { indent.append(" "); } /** * Reduces the current indent level by two spaces, or crashes if the indent * level is zero. */ public void outdent() { final int length = indent.length(); if (length < 2) { throw new IllegalArgumentException(" Outdent() without matching Indent()."); } indent.delete(length - 2, length); } /** * Print text to the output stream. */ public void print(final CharSequence text) throws IOException { final int size = text.length(); int pos = 0; for (int i = 0; i < size; i++) { if (text.charAt(i) == '\n') { write(text.subSequence(pos, i + 1)); pos = i + 1; atStartOfLine = true; } } write(text.subSequence(pos, size)); } private void write(final CharSequence data) throws IOException { if (data.length() == 0) { return; } if (atStartOfLine) { atStartOfLine = false; output.append(indent); } output.append(data); } } /** * A Printer converts protobuf messages to JSON format. */ private static final class PrinterImpl { private final TypeRegistry registry; private final boolean includingDefaultValueFields; private final boolean preservingProtoFieldNames; private final TextGenerator generator; // We use Gson to help handle string escapes. private final Gson gson; private final CharSequence blankOrSpace; private final CharSequence blankOrNewLine; private static class GsonHolder { private static final Gson DEFAULT_GSON = new GsonBuilder().disableHtmlEscaping().create(); } PrinterImpl( TypeRegistry registry, boolean includingDefaultValueFields, boolean preservingProtoFieldNames, Appendable jsonOutput, boolean omittingInsignificantWhitespace) { this.registry = registry; this.includingDefaultValueFields = includingDefaultValueFields; this.preservingProtoFieldNames = preservingProtoFieldNames; this.gson = GsonHolder.DEFAULT_GSON; // json format related properties, determined by printerType if (omittingInsignificantWhitespace) { this.generator = new CompactTextGenerator(jsonOutput); this.blankOrSpace = ""; this.blankOrNewLine = ""; } else { this.generator = new PrettyTextGenerator(jsonOutput); this.blankOrSpace = " "; this.blankOrNewLine = "\n"; } } void print(MessageOrBuilder message) throws IOException { WellKnownTypePrinter specialPrinter = wellKnownTypePrinters.get(message.getDescriptorForType().getFullName()); if (specialPrinter != null) { specialPrinter.print(this, message); return; } print(message, null); } private interface WellKnownTypePrinter { void print(PrinterImpl printer, MessageOrBuilder message) throws IOException; } private static final Map<String, WellKnownTypePrinter> wellKnownTypePrinters = buildWellKnownTypePrinters(); private static Map<String, WellKnownTypePrinter> buildWellKnownTypePrinters() { Map<String, WellKnownTypePrinter> printers = new HashMap<String, WellKnownTypePrinter>(); // Special-case Any. printers.put( Any.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printAny(message); } }); // Special-case wrapper types. WellKnownTypePrinter wrappersPrinter = new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printWrapper(message); } }; printers.put(BoolValue.getDescriptor().getFullName(), wrappersPrinter); printers.put(Int32Value.getDescriptor().getFullName(), wrappersPrinter); printers.put(UInt32Value.getDescriptor().getFullName(), wrappersPrinter); printers.put(Int64Value.getDescriptor().getFullName(), wrappersPrinter); printers.put(UInt64Value.getDescriptor().getFullName(), wrappersPrinter); printers.put(StringValue.getDescriptor().getFullName(), wrappersPrinter); printers.put(BytesValue.getDescriptor().getFullName(), wrappersPrinter); printers.put(FloatValue.getDescriptor().getFullName(), wrappersPrinter); printers.put(DoubleValue.getDescriptor().getFullName(), wrappersPrinter); // Special-case Timestamp. printers.put( Timestamp.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printTimestamp(message); } }); // Special-case Duration. printers.put( Duration.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printDuration(message); } }); // Special-case FieldMask. printers.put( FieldMask.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printFieldMask(message); } }); // Special-case Struct. printers.put( Struct.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printStruct(message); } }); // Special-case Value. printers.put( Value.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printValue(message); } }); // Special-case ListValue. printers.put( ListValue.getDescriptor().getFullName(), new WellKnownTypePrinter() { @Override public void print(PrinterImpl printer, MessageOrBuilder message) throws IOException { printer.printListValue(message); } }); return printers; } /** Prints google.protobuf.Any */ private void printAny(MessageOrBuilder message) throws IOException { if (Any.getDefaultInstance().equals(message)) { generator.print("{}"); return; } Descriptor descriptor = message.getDescriptorForType(); FieldDescriptor typeUrlField = descriptor.findFieldByName("type_url"); FieldDescriptor valueField = descriptor.findFieldByName("value"); // Validates type of the message. Note that we can't just cast the message // to com.google.protobuf.Any because it might be a DynamicMessage. if (typeUrlField == null || valueField == null || typeUrlField.getType() != FieldDescriptor.Type.STRING || valueField.getType() != FieldDescriptor.Type.BYTES) { throw new InvalidProtocolBufferException("Invalid Any type."); } String typeUrl = (String) message.getField(typeUrlField); String typeName = getTypeName(typeUrl); Descriptor type = registry.find(typeName); if (type == null) { throw new InvalidProtocolBufferException("Cannot find type for url: " + typeUrl); } ByteString content = (ByteString) message.getField(valueField); Message contentMessage = DynamicMessage.getDefaultInstance(type).getParserForType().parseFrom(content); WellKnownTypePrinter printer = wellKnownTypePrinters.get(typeName); if (printer != null) { // If the type is one of the well-known types, we use a special // formatting. generator.print("{" + blankOrNewLine); generator.indent(); generator.print("\"@type\":" + blankOrSpace + gson.toJson(typeUrl) + "," + blankOrNewLine); generator.print("\"value\":" + blankOrSpace); printer.print(this, contentMessage); generator.print(blankOrNewLine); generator.outdent(); generator.print("}"); } else { // Print the content message instead (with a "@type" field added). print(contentMessage, typeUrl); } } /** Prints wrapper types (e.g., google.protobuf.Int32Value) */ private void printWrapper(MessageOrBuilder message) throws IOException { Descriptor descriptor = message.getDescriptorForType(); FieldDescriptor valueField = descriptor.findFieldByName("value"); if (valueField == null) { throw new InvalidProtocolBufferException("Invalid Wrapper type."); } // When formatting wrapper types, we just print its value field instead of // the whole message. printSingleFieldValue(valueField, message.getField(valueField)); } private ByteString toByteString(MessageOrBuilder message) { if (message instanceof Message) { return ((Message) message).toByteString(); } else { return ((Message.Builder) message).build().toByteString(); } } /** Prints google.protobuf.Timestamp */ private void printTimestamp(MessageOrBuilder message) throws IOException { Timestamp value = Timestamp.parseFrom(toByteString(message)); generator.print("\"" + Timestamps.toString(value) + "\""); } /** Prints google.protobuf.Duration */ private void printDuration(MessageOrBuilder message) throws IOException { Duration value = Duration.parseFrom(toByteString(message)); generator.print("\"" + Durations.toString(value) + "\""); } /** Prints google.protobuf.FieldMask */ private void printFieldMask(MessageOrBuilder message) throws IOException { FieldMask value = FieldMask.parseFrom(toByteString(message)); generator.print("\"" + FieldMaskUtil.toJsonString(value) + "\""); } /** Prints google.protobuf.Struct */ private void printStruct(MessageOrBuilder message) throws IOException { Descriptor descriptor = message.getDescriptorForType(); FieldDescriptor field = descriptor.findFieldByName("fields"); if (field == null) { throw new InvalidProtocolBufferException("Invalid Struct type."); } // Struct is formatted as a map object. printMapFieldValue(field, message.getField(field)); } /** Prints google.protobuf.Value */ private void printValue(MessageOrBuilder message) throws IOException { // For a Value message, only the value of the field is formatted. Map<FieldDescriptor, Object> fields = message.getAllFields(); if (fields.isEmpty()) { // No value set. generator.print("null"); return; } // A Value message can only have at most one field set (it only contains // an oneof). if (fields.size() != 1) { throw new InvalidProtocolBufferException("Invalid Value type."); } for (Map.Entry<FieldDescriptor, Object> entry : fields.entrySet()) { printSingleFieldValue(entry.getKey(), entry.getValue()); } } /** Prints google.protobuf.ListValue */ private void printListValue(MessageOrBuilder message) throws IOException { Descriptor descriptor = message.getDescriptorForType(); FieldDescriptor field = descriptor.findFieldByName("values"); if (field == null) { throw new InvalidProtocolBufferException("Invalid ListValue type."); } printRepeatedFieldValue(field, message.getField(field)); } /** Prints a regular message with an optional type URL. */ private void print(MessageOrBuilder message, String typeUrl) throws IOException { generator.print("{" + blankOrNewLine); generator.indent(); boolean printedField = false; if (typeUrl != null) { generator.print("\"@type\":" + blankOrSpace + gson.toJson(typeUrl)); printedField = true; } Map<FieldDescriptor, Object> fieldsToPrint = null; if (includingDefaultValueFields) { fieldsToPrint = new TreeMap<FieldDescriptor, Object>(); for (FieldDescriptor field : message.getDescriptorForType().getFields()) { if (field.isOptional()) { if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE && !message.hasField(field)){ // Always skip empty optional message fields. If not we will recurse indefinitely if // a message has itself as a sub-field. continue; } OneofDescriptor oneof = field.getContainingOneof(); if (oneof != null && !message.hasField(field)) { // Skip all oneof fields except the one that is actually set continue; } } fieldsToPrint.put(field, message.getField(field)); } } else { fieldsToPrint = message.getAllFields(); } for (Map.Entry<FieldDescriptor, Object> field : fieldsToPrint.entrySet()) { if (printedField) { // Add line-endings for the previous field. generator.print("," + blankOrNewLine); } else { printedField = true; } printField(field.getKey(), field.getValue()); } // Add line-endings for the last field. if (printedField) { generator.print(blankOrNewLine); } generator.outdent(); generator.print("}"); } private void printField(FieldDescriptor field, Object value) throws IOException { if (preservingProtoFieldNames) { generator.print("\"" + field.getName() + "\":" + blankOrSpace); } else { generator.print("\"" + field.getJsonName() + "\":" + blankOrSpace); } if (field.isMapField()) { printMapFieldValue(field, value); } else if (field.isRepeated()) { printRepeatedFieldValue(field, value); } else { printSingleFieldValue(field, value); } } @SuppressWarnings("rawtypes") private void printRepeatedFieldValue(FieldDescriptor field, Object value) throws IOException { generator.print("["); boolean printedElement = false; for (Object element : (List) value) { if (printedElement) { generator.print("," + blankOrSpace); } else { printedElement = true; } printSingleFieldValue(field, element); } generator.print("]"); } @SuppressWarnings("rawtypes") private void printMapFieldValue(FieldDescriptor field, Object value) throws IOException { Descriptor type = field.getMessageType(); FieldDescriptor keyField = type.findFieldByName("key"); FieldDescriptor valueField = type.findFieldByName("value"); if (keyField == null || valueField == null) { throw new InvalidProtocolBufferException("Invalid map field."); } generator.print("{" + blankOrNewLine); generator.indent(); boolean printedElement = false; for (Object element : (List) value) { Message entry = (Message) element; Object entryKey = entry.getField(keyField); Object entryValue = entry.getField(valueField); if (printedElement) { generator.print("," + blankOrNewLine); } else { printedElement = true; } // Key fields are always double-quoted. printSingleFieldValue(keyField, entryKey, true); generator.print(":" + blankOrSpace); printSingleFieldValue(valueField, entryValue); } if (printedElement) { generator.print(blankOrNewLine); } generator.outdent(); generator.print("}"); } private void printSingleFieldValue(FieldDescriptor field, Object value) throws IOException { printSingleFieldValue(field, value, false); } /** * Prints a field's value in JSON format. * * @param alwaysWithQuotes whether to always add double-quotes to primitive * types. */ private void printSingleFieldValue( final FieldDescriptor field, final Object value, boolean alwaysWithQuotes) throws IOException { switch (field.getType()) { case INT32: case SINT32: case SFIXED32: if (alwaysWithQuotes) { generator.print("\""); } generator.print(((Integer) value).toString()); if (alwaysWithQuotes) { generator.print("\""); } break; case INT64: case SINT64: case SFIXED64: generator.print("\"" + ((Long) value).toString() + "\""); break; case BOOL: if (alwaysWithQuotes) { generator.print("\""); } if (((Boolean) value).booleanValue()) { generator.print("true"); } else { generator.print("false"); } if (alwaysWithQuotes) { generator.print("\""); } break; case FLOAT: Float floatValue = (Float) value; if (floatValue.isNaN()) { generator.print("\"NaN\""); } else if (floatValue.isInfinite()) { if (floatValue < 0) { generator.print("\"-Infinity\""); } else { generator.print("\"Infinity\""); } } else { if (alwaysWithQuotes) { generator.print("\""); } generator.print(floatValue.toString()); if (alwaysWithQuotes) { generator.print("\""); } } break; case DOUBLE: Double doubleValue = (Double) value; if (doubleValue.isNaN()) { generator.print("\"NaN\""); } else if (doubleValue.isInfinite()) { if (doubleValue < 0) { generator.print("\"-Infinity\""); } else { generator.print("\"Infinity\""); } } else { if (alwaysWithQuotes) { generator.print("\""); } generator.print(doubleValue.toString()); if (alwaysWithQuotes) { generator.print("\""); } } break; case UINT32: case FIXED32: if (alwaysWithQuotes) { generator.print("\""); } generator.print(unsignedToString((Integer) value)); if (alwaysWithQuotes) { generator.print("\""); } break; case UINT64: case FIXED64: generator.print("\"" + unsignedToString((Long) value) + "\""); break; case STRING: generator.print(gson.toJson(value)); break; case BYTES: generator.print("\""); generator.print(BaseEncoding.base64().encode(((ByteString) value).toByteArray())); generator.print("\""); break; case ENUM: // Special-case google.protobuf.NullValue (it's an Enum). if (field.getEnumType().getFullName().equals("google.protobuf.NullValue")) { // No matter what value it contains, we always print it as "null". if (alwaysWithQuotes) { generator.print("\""); } generator.print("null"); if (alwaysWithQuotes) { generator.print("\""); } } else { if (((EnumValueDescriptor) value).getIndex() == -1) { generator.print(String.valueOf(((EnumValueDescriptor) value).getNumber())); } else { generator.print("\"" + ((EnumValueDescriptor) value).getName() + "\""); } } break; case MESSAGE: case GROUP: print((Message) value); break; } } } /** Convert an unsigned 32-bit integer to a string. */ private static String unsignedToString(final int value) { if (value >= 0) { return Integer.toString(value); } else { return Long.toString(value & 0x00000000FFFFFFFFL); } } /** Convert an unsigned 64-bit integer to a string. */ private static String unsignedToString(final long value) { if (value >= 0) { return Long.toString(value); } else { // Pull off the most-significant bit so that BigInteger doesn't think // the number is negative, then set it again using setBit(). return BigInteger.valueOf(value & Long.MAX_VALUE).setBit(Long.SIZE - 1).toString(); } } private static String getTypeName(String typeUrl) throws InvalidProtocolBufferException { String[] parts = typeUrl.split("/"); if (parts.length == 1) { throw new InvalidProtocolBufferException("Invalid type url found: " + typeUrl); } return parts[parts.length - 1]; } private static class ParserImpl { private final TypeRegistry registry; private final JsonParser jsonParser; private final boolean ignoringUnknownFields; private final int recursionLimit; private int currentDepth; ParserImpl(TypeRegistry registry, boolean ignoreUnknownFields, int recursionLimit) { this.registry = registry; this.ignoringUnknownFields = ignoreUnknownFields; this.jsonParser = new JsonParser(); this.recursionLimit = recursionLimit; this.currentDepth = 0; } void merge(Reader json, Message.Builder builder) throws IOException { try { JsonReader reader = new JsonReader(json); reader.setLenient(false); merge(jsonParser.parse(reader), builder); } catch (InvalidProtocolBufferException e) { throw e; } catch (JsonIOException e) { // Unwrap IOException. if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } else { throw new InvalidProtocolBufferException(e.getMessage()); } } catch (Exception e) { // We convert all exceptions from JSON parsing to our own exceptions. throw new InvalidProtocolBufferException(e.getMessage()); } } void merge(String json, Message.Builder builder) throws InvalidProtocolBufferException { try { JsonReader reader = new JsonReader(new StringReader(json)); reader.setLenient(false); merge(jsonParser.parse(reader), builder); } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { // We convert all exceptions from JSON parsing to our own exceptions. throw new InvalidProtocolBufferException(e.getMessage()); } } private interface WellKnownTypeParser { void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException; } private static final Map<String, WellKnownTypeParser> wellKnownTypeParsers = buildWellKnownTypeParsers(); private static Map<String, WellKnownTypeParser> buildWellKnownTypeParsers() { Map<String, WellKnownTypeParser> parsers = new HashMap<String, WellKnownTypeParser>(); // Special-case Any. parsers.put( Any.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeAny(json, builder); } }); // Special-case wrapper types. WellKnownTypeParser wrappersPrinter = new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeWrapper(json, builder); } }; parsers.put(BoolValue.getDescriptor().getFullName(), wrappersPrinter); parsers.put(Int32Value.getDescriptor().getFullName(), wrappersPrinter); parsers.put(UInt32Value.getDescriptor().getFullName(), wrappersPrinter); parsers.put(Int64Value.getDescriptor().getFullName(), wrappersPrinter); parsers.put(UInt64Value.getDescriptor().getFullName(), wrappersPrinter); parsers.put(StringValue.getDescriptor().getFullName(), wrappersPrinter); parsers.put(BytesValue.getDescriptor().getFullName(), wrappersPrinter); parsers.put(FloatValue.getDescriptor().getFullName(), wrappersPrinter); parsers.put(DoubleValue.getDescriptor().getFullName(), wrappersPrinter); // Special-case Timestamp. parsers.put( Timestamp.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeTimestamp(json, builder); } }); // Special-case Duration. parsers.put( Duration.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeDuration(json, builder); } }); // Special-case FieldMask. parsers.put( FieldMask.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeFieldMask(json, builder); } }); // Special-case Struct. parsers.put( Struct.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeStruct(json, builder); } }); // Special-case ListValue. parsers.put( ListValue.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeListValue(json, builder); } }); // Special-case Value. parsers.put( Value.getDescriptor().getFullName(), new WellKnownTypeParser() { @Override public void merge(ParserImpl parser, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { parser.mergeValue(json, builder); } }); return parsers; } private void merge(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { WellKnownTypeParser specialParser = wellKnownTypeParsers.get(builder.getDescriptorForType().getFullName()); if (specialParser != null) { specialParser.merge(this, json, builder); return; } mergeMessage(json, builder, false); } // Maps from camel-case field names to FieldDescriptor. private final Map<Descriptor, Map<String, FieldDescriptor>> fieldNameMaps = new HashMap<Descriptor, Map<String, FieldDescriptor>>(); private Map<String, FieldDescriptor> getFieldNameMap(Descriptor descriptor) { if (!fieldNameMaps.containsKey(descriptor)) { Map<String, FieldDescriptor> fieldNameMap = new HashMap<String, FieldDescriptor>(); for (FieldDescriptor field : descriptor.getFields()) { fieldNameMap.put(field.getName(), field); fieldNameMap.put(field.getJsonName(), field); } fieldNameMaps.put(descriptor, fieldNameMap); return fieldNameMap; } return fieldNameMaps.get(descriptor); } private void mergeMessage(JsonElement json, Message.Builder builder, boolean skipTypeUrl) throws InvalidProtocolBufferException { if (!(json instanceof JsonObject)) { throw new InvalidProtocolBufferException("Expect message object but got: " + json); } JsonObject object = (JsonObject) json; Map<String, FieldDescriptor> fieldNameMap = getFieldNameMap(builder.getDescriptorForType()); for (Map.Entry<String, JsonElement> entry : object.entrySet()) { if (skipTypeUrl && entry.getKey().equals("@type")) { continue; } FieldDescriptor field = fieldNameMap.get(entry.getKey()); if (field == null) { if (ignoringUnknownFields) { continue; } throw new InvalidProtocolBufferException( "Cannot find field: " + entry.getKey() + " in message " + builder.getDescriptorForType().getFullName()); } mergeField(field, entry.getValue(), builder); } } private void mergeAny(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { Descriptor descriptor = builder.getDescriptorForType(); FieldDescriptor typeUrlField = descriptor.findFieldByName("type_url"); FieldDescriptor valueField = descriptor.findFieldByName("value"); // Validates type of the message. Note that we can't just cast the message // to com.google.protobuf.Any because it might be a DynamicMessage. if (typeUrlField == null || valueField == null || typeUrlField.getType() != FieldDescriptor.Type.STRING || valueField.getType() != FieldDescriptor.Type.BYTES) { throw new InvalidProtocolBufferException("Invalid Any type."); } if (!(json instanceof JsonObject)) { throw new InvalidProtocolBufferException("Expect message object but got: " + json); } JsonObject object = (JsonObject) json; if (object.entrySet().isEmpty()) { return; // builder never modified, so it will end up building the default instance of Any } JsonElement typeUrlElement = object.get("@type"); if (typeUrlElement == null) { throw new InvalidProtocolBufferException("Missing type url when parsing: " + json); } String typeUrl = typeUrlElement.getAsString(); Descriptor contentType = registry.find(getTypeName(typeUrl)); if (contentType == null) { throw new InvalidProtocolBufferException("Cannot resolve type: " + typeUrl); } builder.setField(typeUrlField, typeUrl); Message.Builder contentBuilder = DynamicMessage.getDefaultInstance(contentType).newBuilderForType(); WellKnownTypeParser specialParser = wellKnownTypeParsers.get(contentType.getFullName()); if (specialParser != null) { JsonElement value = object.get("value"); if (value != null) { specialParser.merge(this, value, contentBuilder); } } else { mergeMessage(json, contentBuilder, true); } builder.setField(valueField, contentBuilder.build().toByteString()); } private void mergeFieldMask(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { FieldMask value = FieldMaskUtil.fromJsonString(json.getAsString()); builder.mergeFrom(value.toByteString()); } private void mergeTimestamp(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { try { Timestamp value = Timestamps.parse(json.getAsString()); builder.mergeFrom(value.toByteString()); } catch (ParseException e) { throw new InvalidProtocolBufferException("Failed to parse timestamp: " + json); } } private void mergeDuration(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { try { Duration value = Durations.parse(json.getAsString()); builder.mergeFrom(value.toByteString()); } catch (ParseException e) { throw new InvalidProtocolBufferException("Failed to parse duration: " + json); } } private void mergeStruct(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { Descriptor descriptor = builder.getDescriptorForType(); FieldDescriptor field = descriptor.findFieldByName("fields"); if (field == null) { throw new InvalidProtocolBufferException("Invalid Struct type."); } mergeMapField(field, json, builder); } private void mergeListValue(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { Descriptor descriptor = builder.getDescriptorForType(); FieldDescriptor field = descriptor.findFieldByName("values"); if (field == null) { throw new InvalidProtocolBufferException("Invalid ListValue type."); } mergeRepeatedField(field, json, builder); } private void mergeValue(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { Descriptor type = builder.getDescriptorForType(); if (json instanceof JsonPrimitive) { JsonPrimitive primitive = (JsonPrimitive) json; if (primitive.isBoolean()) { builder.setField(type.findFieldByName("bool_value"), primitive.getAsBoolean()); } else if (primitive.isNumber()) { builder.setField(type.findFieldByName("number_value"), primitive.getAsDouble()); } else { builder.setField(type.findFieldByName("string_value"), primitive.getAsString()); } } else if (json instanceof JsonObject) { FieldDescriptor field = type.findFieldByName("struct_value"); Message.Builder structBuilder = builder.newBuilderForField(field); merge(json, structBuilder); builder.setField(field, structBuilder.build()); } else if (json instanceof JsonArray) { FieldDescriptor field = type.findFieldByName("list_value"); Message.Builder listBuilder = builder.newBuilderForField(field); merge(json, listBuilder); builder.setField(field, listBuilder.build()); } else if (json instanceof JsonNull) { builder.setField( type.findFieldByName("null_value"), NullValue.NULL_VALUE.getValueDescriptor()); } else { throw new IllegalStateException("Unexpected json data: " + json); } } private void mergeWrapper(JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { Descriptor type = builder.getDescriptorForType(); FieldDescriptor field = type.findFieldByName("value"); if (field == null) { throw new InvalidProtocolBufferException("Invalid wrapper type: " + type.getFullName()); } builder.setField(field, parseFieldValue(field, json, builder)); } private void mergeField(FieldDescriptor field, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { if (field.isRepeated()) { if (builder.getRepeatedFieldCount(field) > 0) { throw new InvalidProtocolBufferException( "Field " + field.getFullName() + " has already been set."); } } else { if (builder.hasField(field)) { throw new InvalidProtocolBufferException( "Field " + field.getFullName() + " has already been set."); } if (field.getContainingOneof() != null && builder.getOneofFieldDescriptor(field.getContainingOneof()) != null) { FieldDescriptor other = builder.getOneofFieldDescriptor(field.getContainingOneof()); throw new InvalidProtocolBufferException( "Cannot set field " + field.getFullName() + " because another field " + other.getFullName() + " belonging to the same oneof has already been set "); } } if (field.isRepeated() && json instanceof JsonNull) { // We allow "null" as value for all field types and treat it as if the // field is not present. return; } if (field.isMapField()) { mergeMapField(field, json, builder); } else if (field.isRepeated()) { mergeRepeatedField(field, json, builder); } else { Object value = parseFieldValue(field, json, builder); if (value != null) { builder.setField(field, value); } } } private void mergeMapField(FieldDescriptor field, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { if (!(json instanceof JsonObject)) { throw new InvalidProtocolBufferException("Expect a map object but found: " + json); } Descriptor type = field.getMessageType(); FieldDescriptor keyField = type.findFieldByName("key"); FieldDescriptor valueField = type.findFieldByName("value"); if (keyField == null || valueField == null) { throw new InvalidProtocolBufferException("Invalid map field: " + field.getFullName()); } JsonObject object = (JsonObject) json; for (Map.Entry<String, JsonElement> entry : object.entrySet()) { Message.Builder entryBuilder = builder.newBuilderForField(field); Object key = parseFieldValue(keyField, new JsonPrimitive(entry.getKey()), entryBuilder); Object value = parseFieldValue(valueField, entry.getValue(), entryBuilder); if (value == null) { throw new InvalidProtocolBufferException("Map value cannot be null."); } entryBuilder.setField(keyField, key); entryBuilder.setField(valueField, value); builder.addRepeatedField(field, entryBuilder.build()); } } /** * Gets the default value for a field type. Note that we use proto3 * language defaults and ignore any default values set through the * proto "default" option. */ private Object getDefaultValue(FieldDescriptor field, Message.Builder builder) { switch (field.getType()) { case INT32: case SINT32: case SFIXED32: case UINT32: case FIXED32: return 0; case INT64: case SINT64: case SFIXED64: case UINT64: case FIXED64: return 0L; case FLOAT: return 0.0f; case DOUBLE: return 0.0; case BOOL: return false; case STRING: return ""; case BYTES: return ByteString.EMPTY; case ENUM: return field.getEnumType().getValues().get(0); case MESSAGE: case GROUP: return builder.newBuilderForField(field).getDefaultInstanceForType(); default: throw new IllegalStateException("Invalid field type: " + field.getType()); } } private void mergeRepeatedField( FieldDescriptor field, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { if (!(json instanceof JsonArray)) { throw new InvalidProtocolBufferException("Expect an array but found: " + json); } JsonArray array = (JsonArray) json; for (int i = 0; i < array.size(); ++i) { Object value = parseFieldValue(field, array.get(i), builder); if (value == null) { throw new InvalidProtocolBufferException("Repeated field elements cannot be null"); } builder.addRepeatedField(field, value); } } private int parseInt32(JsonElement json) throws InvalidProtocolBufferException { try { return Integer.parseInt(json.getAsString()); } catch (Exception e) { // Fall through. } // JSON doesn't distinguish between integer values and floating point values so "1" and // "1.000" are treated as equal in JSON. For this reason we accept floating point values for // integer fields as well as long as it actually is an integer (i.e., round(value) == value). try { BigDecimal value = new BigDecimal(json.getAsString()); return value.intValueExact(); } catch (Exception e) { throw new InvalidProtocolBufferException("Not an int32 value: " + json); } } private long parseInt64(JsonElement json) throws InvalidProtocolBufferException { try { return Long.parseLong(json.getAsString()); } catch (Exception e) { // Fall through. } // JSON doesn't distinguish between integer values and floating point values so "1" and // "1.000" are treated as equal in JSON. For this reason we accept floating point values for // integer fields as well as long as it actually is an integer (i.e., round(value) == value). try { BigDecimal value = new BigDecimal(json.getAsString()); return value.longValueExact(); } catch (Exception e) { throw new InvalidProtocolBufferException("Not an int32 value: " + json); } } private int parseUint32(JsonElement json) throws InvalidProtocolBufferException { try { long result = Long.parseLong(json.getAsString()); if (result < 0 || result > 0xFFFFFFFFL) { throw new InvalidProtocolBufferException("Out of range uint32 value: " + json); } return (int) result; } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { // Fall through. } // JSON doesn't distinguish between integer values and floating point values so "1" and // "1.000" are treated as equal in JSON. For this reason we accept floating point values for // integer fields as well as long as it actually is an integer (i.e., round(value) == value). try { BigDecimal decimalValue = new BigDecimal(json.getAsString()); BigInteger value = decimalValue.toBigIntegerExact(); if (value.signum() < 0 || value.compareTo(new BigInteger("FFFFFFFF", 16)) > 0) { throw new InvalidProtocolBufferException("Out of range uint32 value: " + json); } return value.intValue(); } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { throw new InvalidProtocolBufferException("Not an uint32 value: " + json); } } private static final BigInteger MAX_UINT64 = new BigInteger("FFFFFFFFFFFFFFFF", 16); private long parseUint64(JsonElement json) throws InvalidProtocolBufferException { try { BigDecimal decimalValue = new BigDecimal(json.getAsString()); BigInteger value = decimalValue.toBigIntegerExact(); if (value.compareTo(BigInteger.ZERO) < 0 || value.compareTo(MAX_UINT64) > 0) { throw new InvalidProtocolBufferException("Out of range uint64 value: " + json); } return value.longValue(); } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { throw new InvalidProtocolBufferException("Not an uint64 value: " + json); } } private boolean parseBool(JsonElement json) throws InvalidProtocolBufferException { if (json.getAsString().equals("true")) { return true; } if (json.getAsString().equals("false")) { return false; } throw new InvalidProtocolBufferException("Invalid bool value: " + json); } private static final double EPSILON = 1e-6; private float parseFloat(JsonElement json) throws InvalidProtocolBufferException { if (json.getAsString().equals("NaN")) { return Float.NaN; } else if (json.getAsString().equals("Infinity")) { return Float.POSITIVE_INFINITY; } else if (json.getAsString().equals("-Infinity")) { return Float.NEGATIVE_INFINITY; } try { // We don't use Float.parseFloat() here because that function simply // accepts all double values. Here we parse the value into a Double // and do explicit range check on it. double value = Double.parseDouble(json.getAsString()); // When a float value is printed, the printed value might be a little // larger or smaller due to precision loss. Here we need to add a bit // of tolerance when checking whether the float value is in range. if (value > Float.MAX_VALUE * (1.0 + EPSILON) || value < -Float.MAX_VALUE * (1.0 + EPSILON)) { throw new InvalidProtocolBufferException("Out of range float value: " + json); } return (float) value; } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { throw new InvalidProtocolBufferException("Not a float value: " + json); } } private static final BigDecimal MORE_THAN_ONE = new BigDecimal(String.valueOf(1.0 + EPSILON)); // When a float value is printed, the printed value might be a little // larger or smaller due to precision loss. Here we need to add a bit // of tolerance when checking whether the float value is in range. private static final BigDecimal MAX_DOUBLE = new BigDecimal(String.valueOf(Double.MAX_VALUE)).multiply(MORE_THAN_ONE); private static final BigDecimal MIN_DOUBLE = new BigDecimal(String.valueOf(-Double.MAX_VALUE)).multiply(MORE_THAN_ONE); private double parseDouble(JsonElement json) throws InvalidProtocolBufferException { if (json.getAsString().equals("NaN")) { return Double.NaN; } else if (json.getAsString().equals("Infinity")) { return Double.POSITIVE_INFINITY; } else if (json.getAsString().equals("-Infinity")) { return Double.NEGATIVE_INFINITY; } try { // We don't use Double.parseDouble() here because that function simply // accepts all values. Here we parse the value into a BigDecimal and do // explicit range check on it. BigDecimal value = new BigDecimal(json.getAsString()); if (value.compareTo(MAX_DOUBLE) > 0 || value.compareTo(MIN_DOUBLE) < 0) { throw new InvalidProtocolBufferException("Out of range double value: " + json); } return value.doubleValue(); } catch (InvalidProtocolBufferException e) { throw e; } catch (Exception e) { throw new InvalidProtocolBufferException("Not an double value: " + json); } } private String parseString(JsonElement json) { return json.getAsString(); } private ByteString parseBytes(JsonElement json) throws InvalidProtocolBufferException { return ByteString.copyFrom(BaseEncoding.base64().decode(json.getAsString())); } private EnumValueDescriptor parseEnum(EnumDescriptor enumDescriptor, JsonElement json) throws InvalidProtocolBufferException { String value = json.getAsString(); EnumValueDescriptor result = enumDescriptor.findValueByName(value); if (result == null) { // Try to interpret the value as a number. try { int numericValue = parseInt32(json); if (enumDescriptor.getFile().getSyntax() == FileDescriptor.Syntax.PROTO3) { result = enumDescriptor.findValueByNumberCreatingIfUnknown(numericValue); } else { result = enumDescriptor.findValueByNumber(numericValue); } } catch (InvalidProtocolBufferException e) { // Fall through. This exception is about invalid int32 value we get from parseInt32() but // that's not the exception we want the user to see. Since result == null, we will throw // an exception later. } if (result == null) { throw new InvalidProtocolBufferException( "Invalid enum value: " + value + " for enum type: " + enumDescriptor.getFullName()); } } return result; } private Object parseFieldValue(FieldDescriptor field, JsonElement json, Message.Builder builder) throws InvalidProtocolBufferException { if (json instanceof JsonNull) { if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE && field.getMessageType().getFullName().equals(Value.getDescriptor().getFullName())) { // For every other type, "null" means absence, but for the special // Value message, it means the "null_value" field has been set. Value value = Value.newBuilder().setNullValueValue(0).build(); return builder.newBuilderForField(field).mergeFrom(value.toByteString()).build(); } else if (field.getJavaType() == FieldDescriptor.JavaType.ENUM && field.getEnumType().getFullName().equals(NullValue.getDescriptor().getFullName())) { // If the type of the field is a NullValue, then the value should be explicitly set. return field.getEnumType().findValueByNumber(0); } return null; } switch (field.getType()) { case INT32: case SINT32: case SFIXED32: return parseInt32(json); case INT64: case SINT64: case SFIXED64: return parseInt64(json); case BOOL: return parseBool(json); case FLOAT: return parseFloat(json); case DOUBLE: return parseDouble(json); case UINT32: case FIXED32: return parseUint32(json); case UINT64: case FIXED64: return parseUint64(json); case STRING: return parseString(json); case BYTES: return parseBytes(json); case ENUM: return parseEnum(field.getEnumType(), json); case MESSAGE: case GROUP: if (currentDepth >= recursionLimit) { throw new InvalidProtocolBufferException("Hit recursion limit."); } ++currentDepth; Message.Builder subBuilder = builder.newBuilderForField(field); merge(json, subBuilder); --currentDepth; return subBuilder.build(); default: throw new InvalidProtocolBufferException("Invalid field type: " + field.getType()); } } } }
Livefyre/protobuf
java/util/src/main/java/com/google/protobuf/util/JsonFormat.java
Java
bsd-3-clause
67,363
hqDefine('sso/js/edit_identity_provider', [ 'jquery', 'knockout', 'underscore', 'hqwebapp/js/utils/email', "hqwebapp/js/initial_page_data", 'sso/js/models', ], function ( $, ko, _, emailUtils, initialPageData, models ) { $(function () { var emailDomainManager = models.linkedObjectListModel({ asyncHandler: 'identity_provider_admin', requestContext: { idpSlug: initialPageData.get('idp_slug'), }, validateNewObjectFn: function (newObject) { return newObject.length > 4 && newObject.indexOf('.') !== -1 && newObject.indexOf('@') === -1 && !newObject.endsWith('.'); }, }); $('#email-domain-manager').koApplyBindings(emailDomainManager); emailDomainManager.init(); var ssoExemptUserManager = models.linkedObjectListModel({ asyncHandler: 'sso_exempt_users_admin', requestContext: { idpSlug: initialPageData.get('idp_slug'), }, validateNewObjectFn: emailUtils.validateEmail, }); $('#sso-exempt-user-manager').koApplyBindings(ssoExemptUserManager); ssoExemptUserManager.init(); }); });
dimagi/commcare-hq
corehq/apps/sso/static/sso/js/edit_identity_provider.js
JavaScript
bsd-3-clause
1,318
#include "graph2occupancy.h" using namespace std; using namespace Eigen; using namespace g2o; Graph2occupancy::Graph2occupancy(OptimizableGraph *graph, cv::Mat *image, SE2 pose, float resolution, float threhsold, float rows, float cols, float maxRange, float usableRange, float infinityFillingRange, float gain, float squareSize, float angle, float freeThrehsold){ _graph = graph; _mapImage = image; _resolution = resolution; _threshold = threhsold; _rows = rows; _cols = cols; _maxRange = maxRange; _usableRange = usableRange; _infinityFillingRange = infinityFillingRange; _gain = gain; _squareSize = squareSize; _angle = angle; _freeThreshold = freeThrehsold; } void Graph2occupancy::computeMap(){ // Sort verteces vector<int> vertexIds(_graph->vertices().size()); int k = 0; for(OptimizableGraph::VertexIDMap::iterator it = _graph->vertices().begin(); it != _graph->vertices().end(); ++it) { vertexIds[k++] = (it->first); } sort(vertexIds.begin(), vertexIds.end()); /************************************************************************ * Compute map size * ************************************************************************/ // Check the entire graph to find map bounding box Matrix2d boundingBox = Matrix2d::Zero(); std::vector<RobotLaser*> robotLasers; std::vector<SE2> robotPoses; double xmin=std::numeric_limits<double>::max(); double xmax=std::numeric_limits<double>::min(); double ymin=std::numeric_limits<double>::max(); double ymax=std::numeric_limits<double>::min(); SE2 baseTransform(0,0,_angle); bool initialPoseFound = false; SE2 initialPose; for(size_t i = 0; i < vertexIds.size(); ++i) { OptimizableGraph::Vertex *_v = _graph->vertex(vertexIds[i]); VertexSE2 *v = dynamic_cast<VertexSE2*>(_v); if(!v) { continue; } if (v->fixed() && !initialPoseFound){ initialPoseFound = true; initialPose = baseTransform*v->estimate(); } OptimizableGraph::Data *d = v->userData(); while(d) { RobotLaser *robotLaser = dynamic_cast<RobotLaser*>(d); if(!robotLaser) { d = d->next(); continue; } robotLasers.push_back(robotLaser); SE2 transformed_estimate = baseTransform*v->estimate(); robotPoses.push_back(transformed_estimate); double x = transformed_estimate.translation().x(); double y = transformed_estimate.translation().y(); xmax = xmax > x+_usableRange ? xmax : x + _usableRange; ymax = ymax > y+_usableRange ? ymax : y + _usableRange; xmin = xmin < x-_usableRange ? xmin : x - _usableRange; ymin = ymin < y-_usableRange ? ymin : y - _usableRange; d = d->next(); } } boundingBox(0,0)=xmin; boundingBox(1,0)=ymin; boundingBox(0,1)=xmax; boundingBox(1,1)=ymax; if(robotLasers.size() == 0) { std::cout << "No laser scans found ... quitting!" << std::endl; return; } /************************************************************************ * Compute the map * ************************************************************************/ // Create the map Vector2i size; Vector2f offset; if(_rows != 0 && _cols != 0) { size = Vector2i(_rows, _cols); } else { size = Vector2i((boundingBox(0, 1) - boundingBox(0, 0))/ _resolution, (boundingBox(1, 1) - boundingBox(1, 0))/ _resolution); } if(size.x() == 0 || size.y() == 0) { std::cout << "Zero map size ... quitting!" << std::endl; return; } offset = Eigen::Vector2f(boundingBox(0, 0),boundingBox(1, 0)); FrequencyMapCell unknownCell; _map = FrequencyMap(_resolution, offset, size, unknownCell); for (size_t i = 0; i < robotPoses.size(); ++i) { _map.integrateScan(robotLasers[i], robotPoses[i], _maxRange, _usableRange, _infinityFillingRange, _gain, _squareSize); } /************************************************************************ * Convert frequency map into int[8] * ************************************************************************/ *_mapImage = cv::Mat(_map.rows(), _map.cols(), CV_8UC1); _mapImage->setTo(cv::Scalar(0)); for(int c = 0; c < _map.cols(); c++) { for(int r = 0; r < _map.rows(); r++) { if(_map(r, c).misses() == 0 && _map(r, c).hits() == 0) { _mapImage->at<unsigned char>(r, c) = _unknownColor; } else { float fraction = (float)_map(r, c).hits()/(float)(_map(r, c).hits()+_map(r, c).misses()); if (_freeThreshold && fraction < _freeThreshold){ _mapImage->at<unsigned char>(r, c) = _freeColor; } else if (_threshold && fraction > _threshold){ _mapImage->at<unsigned char>(r, c) = _occupiedColor; } else { _mapImage->at<unsigned char>(r, c) = _unknownColor; } } } } Eigen::Vector2f origin(0.0f, 0.0f); if (initialPoseFound){ Eigen::Vector2i originMap = _map.world2map(Eigen::Vector2f(initialPose.translation().x(), initialPose.translation().y())); origin = Eigen::Vector2f(((-_resolution * originMap.y())+initialPose.translation().y()), -(_resolution * (_mapImage->rows-originMap.x()) +initialPose.translation().x())); } _mapCenter = origin; } void Graph2occupancy::setResolution (const float resolution){ _resolution = resolution; } float Graph2occupancy::getResolution (){ return _resolution; } float Graph2occupancy::getThreshold (){ return _threshold; } float Graph2occupancy::getRows (){ return _rows; } float Graph2occupancy::getCols (){ return _cols; } float Graph2occupancy::getFreeThreshold (){ return _freeThreshold; } Vector2f Graph2occupancy::getMapCenter(){ return _mapCenter; }
mtlazaro/cg_mrslam
src/ros_map_publisher/graph2occupancy.cpp
C++
bsd-3-clause
5,798
<?php namespace hiapi\exceptions\domain; /** * Class InvariantException * * @author Dmytro Naumenko <d.naumenko.a@gmail.com> */ class InvariantException extends DomainException { }
hiqdev/hiapi
src/exceptions/domain/InvariantException.php
PHP
bsd-3-clause
188
// Copyright (c) 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ash/policy/handlers/device_wifi_allowed_handler.h" #include <vector> #include "base/bind.h" #include "chromeos/network/network_handler.h" #include "chromeos/network/prohibited_technologies_handler.h" #include "chromeos/settings/cros_settings_names.h" #include "chromeos/settings/cros_settings_provider.h" #include "third_party/cros_system_api/dbus/shill/dbus-constants.h" namespace policy { DeviceWiFiAllowedHandler::DeviceWiFiAllowedHandler( ash::CrosSettings* cros_settings) : cros_settings_(cros_settings) { wifi_policy_subscription_ = cros_settings_->AddSettingsObserver( chromeos::kDeviceWiFiAllowed, base::BindRepeating(&DeviceWiFiAllowedHandler::OnWiFiPolicyChanged, weak_factory_.GetWeakPtr())); // Fire it once so we're sure we get an invocation on startup. OnWiFiPolicyChanged(); } DeviceWiFiAllowedHandler::~DeviceWiFiAllowedHandler() = default; void DeviceWiFiAllowedHandler::OnWiFiPolicyChanged() { chromeos::CrosSettingsProvider::TrustedStatus status = cros_settings_->PrepareTrustedValues( base::BindOnce(&DeviceWiFiAllowedHandler::OnWiFiPolicyChanged, weak_factory_.GetWeakPtr())); if (status != chromeos::CrosSettingsProvider::TRUSTED) return; bool wifi_allowed = true; cros_settings_->GetBoolean(chromeos::kDeviceWiFiAllowed, &wifi_allowed); if (!wifi_allowed) { chromeos::NetworkHandler::Get() ->prohibited_technologies_handler() ->AddGloballyProhibitedTechnology(shill::kTypeWifi); } else { chromeos::NetworkHandler::Get() ->prohibited_technologies_handler() ->RemoveGloballyProhibitedTechnology(shill::kTypeWifi); } } } // namespace policy
nwjs/chromium.src
chrome/browser/ash/policy/handlers/device_wifi_allowed_handler.cc
C++
bsd-3-clause
1,917
############################################################################### ## ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the University of Utah nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### # Check for testing """ This module defines the class Module """ import copy from itertools import izip import weakref from db.domain import DBModule from core.data_structures.point import Point from core.vistrail.annotation import Annotation from core.vistrail.location import Location from core.vistrail.module_function import ModuleFunction from core.vistrail.module_param import ModuleParam from core.vistrail.port import Port, PortEndPoint from core.vistrail.port_spec import PortSpec from core.utils import NoSummon, VistrailsInternalError, report_stack from core.modules.module_descriptor import OverloadedPort from core.modules.module_registry import get_module_registry, ModuleRegistry ################################################################################ # A Module stores not only the information, but a method (summon) that # creates a 'live' object, subclass of core/modules/vistrail_module/Module class Module(DBModule): """ Represents a module from a Pipeline """ ########################################################################## # Constructor and copy def __init__(self, *args, **kwargs): DBModule.__init__(self, *args, **kwargs) if self.cache is None: self.cache = 1 if self.id is None: self.id = -1 if self.location is None: self.location = Location(x=-1.0, y=-1.0) if self.name is None: self.name = '' if self.package is None: self.package = '' if self.version is None: self.version = '' self.set_defaults() def set_defaults(self, other=None): if other is None: self.portVisible = set() self.visible_input_ports = set() self.visible_output_ports = set() self.connected_input_ports = {} self.connected_output_ports = {} self.is_valid = False self.is_breakpoint = False self.is_watched = False self._descriptor_info = None self._module_descriptor = None else: self.portVisible = copy.copy(other.portVisible) self.visible_input_ports = copy.copy(other.visible_input_ports) self.visible_output_ports = copy.copy(other.visible_output_ports) self.connected_input_ports = copy.copy(other.connected_input_ports) self.connected_output_ports = \ copy.copy(other.connected_output_ports) self.is_valid = other.is_valid self.is_breakpoint = other.is_breakpoint self.is_watched = other.is_watched self._descriptor_info = None self._module_descriptor = other._module_descriptor if not self.namespace: self.namespace = None self.function_idx = self.db_functions_id_index self.setup_indices() def setup_indices(self): self._input_port_specs = [] self._output_port_specs = [] for port_spec in self.port_spec_list: if port_spec.type == 'input': self._input_port_specs.append(port_spec) elif port_spec.type == 'output': self._output_port_specs.append(port_spec) def __copy__(self): """__copy__() -> Module - Returns a clone of itself""" return Module.do_copy(self) def do_copy(self, new_ids=False, id_scope=None, id_remap=None): cp = DBModule.do_copy(self, new_ids, id_scope, id_remap) cp.__class__ = Module cp.set_defaults(self) return cp @staticmethod def convert(_module): if _module.__class__ == Module: return _module.__class__ = Module for _port_spec in _module.db_portSpecs: PortSpec.convert(_port_spec) if _module.db_location: Location.convert(_module.db_location) for _function in _module.db_functions: ModuleFunction.convert(_function) for _annotation in _module.db_get_annotations(): Annotation.convert(_annotation) _module.set_defaults() ########################################################################## id = DBModule.db_id cache = DBModule.db_cache annotations = DBModule.db_annotations location = DBModule.db_location center = DBModule.db_location name = DBModule.db_name label = DBModule.db_name namespace = DBModule.db_namespace package = DBModule.db_package version = DBModule.db_version port_spec_list = DBModule.db_portSpecs internal_version = '' # type check this (list, hash) def _get_functions(self): self.db_functions.sort(key=lambda x: x.db_pos) return self.db_functions def _set_functions(self, functions): # want to convert functions to hash...? self.db_functions = functions functions = property(_get_functions, _set_functions) def add_function(self, function): self.db_add_function(function) def has_function_with_real_id(self, f_id): return self.db_has_function_with_id(f_id) def get_function_by_real_id(self, f_id): return self.db_get_function_by_id(f_id) def add_annotation(self, annotation): self.db_add_annotation(annotation) def delete_annotation(self, annotation): self.db_delete_annotation(annotation) def has_annotation_with_key(self, key): return self.db_has_annotation_with_key(key) def get_annotation_by_key(self, key): return self.db_get_annotation_by_key(key) def toggle_breakpoint(self): self.is_breakpoint = not self.is_breakpoint def toggle_watched(self): self.is_watched = not self.is_watched def _get_port_specs(self): return self.db_portSpecs_id_index port_specs = property(_get_port_specs) def has_portSpec_with_name(self, name): return self.db_has_portSpec_with_name(name) def get_portSpec_by_name(self, name): return self.db_get_portSpec_by_name(name) def add_port_spec(self, spec): DBModule.db_add_portSpec(self, spec) if spec.type == 'input': self._input_port_specs.append(spec) elif spec.type == 'output': self._output_port_specs.append(spec) # override DBModule.db_add_portSpec so that _*_port_specs are updated db_add_portSpec = add_port_spec def delete_port_spec(self, spec): if spec.type == 'input': self._input_port_specs.remove(spec) elif spec.type == 'output': self._output_port_specs.remove(spec) DBModule.db_delete_portSpec(self, spec) # override DBModule.db_delete_portSpec so that _*_port_specs are updated db_delete_portSpec = delete_port_spec def _get_input_port_specs(self): return sorted(self._input_port_specs, key=lambda x: (x.sort_key, x.id)) input_port_specs = property(_get_input_port_specs) def _get_output_port_specs(self): return sorted(self._output_port_specs, key=lambda x: (x.sort_key, x.id), reverse=True) output_port_specs = property(_get_output_port_specs) def _get_descriptor_info(self): if self._descriptor_info is None: self._descriptor_info = (self.package, self.name, self.namespace, self.version, str(self.internal_version)) return self._descriptor_info descriptor_info = property(_get_descriptor_info) def _get_module_descriptor(self): if self._module_descriptor is None or \ self._module_descriptor() is None: reg = get_module_registry() self._module_descriptor = \ weakref.ref(reg.get_descriptor_by_name(*self.descriptor_info)) return self._module_descriptor() def _set_module_descriptor(self, descriptor): self._module_descriptor = weakref.ref(descriptor) module_descriptor = property(_get_module_descriptor, _set_module_descriptor) def get_port_spec(self, port_name, port_type): """get_port_spec(port_name: str, port_type: str: ['input' | 'output']) -> PortSpec """ if self.has_portSpec_with_name((port_name, port_type)): return self.get_portSpec_by_name((port_name, port_type)) desc = self.module_descriptor reg = get_module_registry() return reg.get_port_spec_from_descriptor(desc, port_name, port_type) def has_port_spec(self, port_name, port_type): if self.has_portSpec_with_name((port_name, port_type)): return True reg = get_module_registry() desc = self.module_descriptor return reg.has_port_spec_from_descriptor(desc, port_name, port_type) def summon(self): result = self.module_descriptor.module() if self.cache != 1: result.is_cacheable = lambda *args: False if hasattr(result, 'input_ports_order'): result.input_ports_order = [p.name for p in self.input_port_specs] if hasattr(result, 'output_ports_order'): result.output_ports_order = [p.name for p in self.output_port_specs] # output_ports are reversed for display purposes... result.output_ports_order.reverse() # FIXME this may not be quite right because we don't have self.registry # anymore. That said, I'm not sure how self.registry would have # worked for hybrids... result.registry = get_module_registry() return result def is_group(self): return False def is_abstraction(self): return False def getNumFunctions(self): """getNumFunctions() -> int - Returns the number of functions """ return len(self.functions) def sourcePorts(self): """sourcePorts() -> list of Port Returns list of source (output) ports module supports. """ registry = get_module_registry() desc = self.module_descriptor ports = registry.module_source_ports_from_descriptor(True, desc) ports.extend(self.output_port_specs) return ports def destinationPorts(self): """destinationPorts() -> list of Port Returns list of destination (input) ports module supports """ registry = get_module_registry() desc = self.module_descriptor ports = registry.module_destination_ports_from_descriptor(True, desc) ports.extend(self.input_port_specs) return ports ########################################################################## # Debugging def show_comparison(self, other): if type(other) != type(self): print "Type mismatch" print type(self), type(other) elif self.id != other.id: print "id mismatch" print self.id, other.id elif self.name != other.name: print "name mismatch" print self.name, other.name elif self.cache != other.cache: print "cache mismatch" print self.cache, other.cache elif self.location != other.location: print "location mismatch" # FIXME Location has no show_comparison # self.location.show_comparison(other.location) elif len(self.functions) != len(other.functions): print "function length mismatch" print len(self.functions), len(other.functions) else: for f, g in izip(self.functions, other.functions): if f != g: print "function mismatch" f.show_comparison(g) return print "No difference found" assert self == other ########################################################################## # Operators def __str__(self): """__str__() -> str Returns a string representation of itself. """ def get_name(): if self.namespace: return self.namespace + '|' + self.name return self.name return ("(Module '%s:%s' id=%s functions:%s port_specs:%s)@%X" % (self.package, get_name(), self.id, [str(f) for f in self.functions], [str(port_spec) for port_spec in self.db_portSpecs], id(self))) def __eq__(self, other): """ __eq__(other: Module) -> boolean Returns True if self and other have the same attributes. Used by == operator. """ if type(other) != type(self): return False if self.name != other.name: return False if self.namespace != other.namespace: return False if self.package != other.package: return False if self.cache != other.cache: return False if self.location != other.location: return False if len(self.functions) != len(other.functions): return False if len(self.annotations) != len(other.annotations): return False for f, g in izip(self.functions, other.functions): if f != g: return False for f, g in izip(self.annotations, other.annotations): if f != g: return False return True def __ne__(self, other): return not self.__eq__(other) ########################################################################## # Properties ################################################################################ # Testing import unittest class TestModule(unittest.TestCase): def create_module(self, id_scope=None): from db.domain import IdScope if id_scope is None: id_scope = IdScope() params = [ModuleParam(id=id_scope.getNewId(ModuleParam.vtType), type='Int', val='1')] functions = [ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType), name='value', parameters=params)] module = Module(id=id_scope.getNewId(Module.vtType), name='Float', package='edu.utah.sci.vistrails.basic', functions=functions) return module def test_copy(self): """Check that copy works correctly""" from db.domain import IdScope id_scope = IdScope() m1 = self.create_module(id_scope) m2 = copy.copy(m1) self.assertEquals(m1, m2) self.assertEquals(m1.id, m2.id) m3 = m1.do_copy(True, id_scope, {}) self.assertEquals(m1, m3) self.assertNotEquals(m1.id, m3.id) def test_serialization(self): """ Check that serialize and unserialize are working properly """ import core.db.io m1 = self.create_module() xml_str = core.db.io.serialize(m1) m2 = core.db.io.unserialize(xml_str, Module) self.assertEquals(m1, m2) self.assertEquals(m1.id, m2.id) def testEq(self): """Check correctness of equality operator.""" x = Module() self.assertNotEquals(x, None) def testAccessors(self): """Check that accessors are working.""" x = Module() self.assertEquals(x.id, -1) x.id = 10 self.assertEquals(x.id, 10) self.assertEquals(x.cache, 1) x.cache = 1 self.assertEquals(x.cache, 1) self.assertEquals(x.location.x, -1.0) x.location = Location(x=1, y=x.location.y) self.assertEquals(x.location.x, 1) self.assertEquals(x.name, "") def testSummonModule(self): """Check that summon creates a correct module""" x = Module() x.name = "String" x.package = 'edu.utah.sci.vistrails.basic' try: registry = get_module_registry() c = x.summon() m = registry.get_descriptor_by_name('edu.utah.sci.vistrails.basic', 'String').module assert type(c) == m except NoSummon: msg = "Expected to get a String object, got a NoSummon exception" self.fail(msg) def test_constructor(self): m1_param = ModuleParam(val="1.2", type="Float", alias="", ) m1_function = ModuleFunction(name="value", parameters=[m1_param], ) m1 = Module(id=0, name='Float', functions=[m1_function], ) m2 = Module() m2.name = "Float" m2.id = 0 f = ModuleFunction() f.name = "value" m2.functions.append(f) param = ModuleParam() param.strValue = "1.2" param.type = "Float" param.alias = "" f.params.append(param) assert m1 == m2 def test_str(self): m = Module(id=0, name='Float', functions=[ModuleFunction(name='value', parameters=[ModuleParam(type='Int', val='1', )], )], ) str(m) if __name__ == '__main__': unittest.main()
CMUSV-VisTrails/WorkflowRecommendation
vistrails/core/vistrail/module.py
Python
bsd-3-clause
19,617
<?php /* What kind of environment is this: development, test, or live (ie, production)? */ define('SS_ENVIRONMENT_TYPE', 'dev'); /* Database connection */ define('SS_DATABASE_SERVER', 'localhost'); define('SS_DATABASE_USERNAME', 'root'); define('SS_DATABASE_PASSWORD', '');
melechi/s3.net.au
example._ss_environment.php
PHP
bsd-3-clause
277
<?php use yupe\components\controllers\FrontController; class CategoryController extends FrontController { /** * @var ProductRepository */ protected $productRepository; /** * @var AttributeFilter */ protected $attributeFilter; public function init() { $this->productRepository = Yii::app()->getComponent('productRepository'); $this->attributeFilter = Yii::app()->getComponent('attributesFilter'); parent::init(); } public function actionIndex() { $this->render('index', [ 'dataProvider' => new CArrayDataProvider(StoreCategory::model()->getMenuList(1), [ 'id' => 'id', 'pagination' => false ]), ]); } /** * @param $path * @throws CHttpException */ public function actionView($path) { $category = StoreCategory::model()->findByPath($path); if (null === $category) { throw new CHttpException(404); } $data = Yii::app()->getRequest()->getQueryString() ? $this->productRepository->getByFilter( $this->attributeFilter->getMainAttributesForSearchFromQuery(Yii::app()->getRequest(), [AttributeFilter::MAIN_SEARCH_PARAM_CATEGORY => [$category->id]]), $this->attributeFilter->getEavAttributesForSearchFromQuery(Yii::app()->getRequest()) ) : $this->productRepository->getListForCategory($category); $this->render( 'view', [ 'dataProvider' => $data, 'category' => $category ] ); } }
mettoff/archive
protected/modules/store/controllers/CategoryController.php
PHP
bsd-3-clause
1,628
<?php /** * AspectPHP_Advice_Extractor * * @category PHP * @package AspectPHP_Advice * @author Matthias Molitor <matthias@matthimatiker.de> * @copyright 2012 Matthias Molitor * @license http://www.opensource.org/licenses/BSD-3-Clause BSD License * @link https://github.com/Matthimatiker/AspectPHP * @since 31.03.2012 */ /** * Extracts advices from aspects. * * @category PHP * @package AspectPHP_Advice * @author Matthias Molitor <matthias@matthimatiker.de> * @copyright 2012 Matthias Molitor * @license http://www.opensource.org/licenses/BSD-3-Clause BSD License * @link https://github.com/Matthimatiker/AspectPHP * @since 31.03.2012 */ class AspectPHP_Advice_Extractor { /** * Extracts the advices from the given aspect. * * Advices are represented by advisor objects, a combination of * pointcut and advice. * * @param AspectPHP_Aspect $aspect * @return AspectPHP_Advisor_Container A container that contains advisors grouped by type. * @throws AspectPHP_Reflection_Exception If advices or pointcuts are not valid. */ public function getAdvicesFrom(AspectPHP_Aspect $aspect) { $advisors = new AspectPHP_Advisor_Container(); $aspectInfo = new AspectPHP_Reflection_Aspect($aspect); foreach ($aspectInfo->getAdvices() as $advice) { /* @var $advice AspectPHP_Reflection_Advice */ foreach (AspectPHP_Advice_Type::all() as $type) { /* @var $type string */ foreach ($advice->getPointcutsByType($type) as $pointcutMethod) { /* @var $pointcutMethod AspectPHP_Reflection_Pointcut */ $pointcut = $pointcutMethod->createPointcut($aspect); $advisor = new AspectPHP_Advisor_Callback($pointcut, array($aspect, $advice->getName())); $advisors->{$type}()->add($advisor); } } } return $advisors; } }
Matthimatiker/AspectPHP
library/AspectPHP/Advice/Extractor.php
PHP
bsd-3-clause
1,984
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0);
antoinecarme/pyaf
tests/artificial/transf_Difference/trend_ConstantTrend/cycle_12/ar_/test_artificial_1024_Difference_ConstantTrend_12__20.py
Python
bsd-3-clause
271
/* Picasso - a vector graphics library * * Copyright (C) 2008 Zhang Ji Peng * Contact: onecoolx@gmail.com */ #include "common.h" #include "device.h" #include "interfaces.h" #include "picasso.h" #include "picasso_global.h" #include "picasso_raster_adapter.h" #include "picasso_objects.h" #include "picasso_matrix.h" namespace picasso { raster_adapter::raster_adapter() { m_impl = get_system_device()->create_raster_adapter(); } raster_adapter::~raster_adapter() { get_system_device()->destroy_raster_adapter(m_impl); } void raster_adapter::set_gamma_power(scalar g) { m_impl->set_gamma_power(g); } void raster_adapter::set_antialias(bool b) { m_impl->set_antialias(b); } void raster_adapter::reset(void) { m_impl->reset(); } bool raster_adapter::is_empty(void) const { return m_impl->is_empty(); } void raster_adapter::add_shape(const vertex_source& vs, unsigned int id) { m_impl->add_shape(vs, id); } void raster_adapter::set_stroke_dashes(scalar start, const scalar* dashes, unsigned int num) { m_impl->set_stroke_dashes(start, dashes, num); } void raster_adapter::set_stroke_attr_val(int idx, scalar val) { m_impl->set_stroke_attr_val(idx, val); } void raster_adapter::set_stroke_attr(int idx, int val) { m_impl->set_stroke_attr(idx, val); } void raster_adapter::set_fill_attr(int idx, int val) { m_impl->set_fill_attr(idx, val); } void raster_adapter::set_raster_method(unsigned int m) { m_impl->set_raster_method(m); } void raster_adapter::set_transform(const trans_affine& mtx) { m_impl->set_transform(mtx.impl()); } void raster_adapter::commit(void) { m_impl->commit(); } //static methods bool raster_adapter::fill_contents_point(const vertex_source& vs, scalar x, scalar y, filling_rule rule) { bool ret = false; abstract_raster_adapter* rs = get_system_device()->create_raster_adapter(); trans_affine mtx; if (rs) { rs->set_raster_method(raster_fill); rs->set_fill_attr(FIA_FILL_RULE, rule); rs->set_transform(mtx.impl()); rs->add_shape(vs, 0); rs->commit(); ret = rs->contains(x, y); } get_system_device()->destroy_raster_adapter(rs); return ret; } bool raster_adapter::stroke_contents_point(const vertex_source& vs, scalar x, scalar y, scalar w) { bool ret = false; abstract_raster_adapter* rs = get_system_device()->create_raster_adapter(); trans_affine mtx; if (rs) { rs->set_raster_method(raster_stroke); rs->set_stroke_attr_val(STA_WIDTH, w); rs->set_transform(mtx.impl()); rs->add_shape(vs, 0); rs->commit(); ret = rs->contains(x, y); } get_system_device()->destroy_raster_adapter(rs); return ret; } }
onecoolx/picasso
src/picasso_raster_adapter.cpp
C++
bsd-3-clause
2,749