text
stringlengths 3
1.05M
|
|---|
import unittest
from you_get.common import any_download
class JseEduTest(unittest.TestCase):
def test_download(self):
any_download('https://mskzkt.jse.edu.cn/cloudCourse/seyk/detail.php?resource_id=12961', output_dir='.',
merge=True, info_only=False)
|
!function(){"use strict";function t(e,n,i){return n=void 0===n?1:n,i=i||n+1,i-n<=1?function(){if(arguments.length<=n||"string"===r.type(arguments[n]))return e.apply(this,arguments);var t,i=arguments[n];for(var o in i){var s=Array.prototype.slice.call(arguments);s.splice(n,1,o,i[o]),t=e.apply(this,s)}return t}:t(t(e,n+1,i),n,i-1)}function e(t,r,i){var o=n(i);if("string"===o){var s=Object.getOwnPropertyDescriptor(r,i);!s||s.writable&&s.configurable&&s.enumerable&&!s.get&&!s.set?t[i]=r[i]:(delete t[i],Object.defineProperty(t,i,s))}else if("array"===o)i.forEach(function(n){n in r&&e(t,r,n)});else for(var a in r)i&&("regexp"===o&&!i.test(a)||"function"===o&&!i.call(r,a))||e(t,r,a);return t}function n(t){if(null===t)return"null";if(void 0===t)return"undefined";var e=(Object.prototype.toString.call(t).match(/^\[object\s+(.*?)\]$/)[1]||"").toLowerCase();return"number"==e&&isNaN(t)?"nan":e}var r=self.Bliss=e(function(t,e){return 2==arguments.length&&!e||!t?null:"string"===r.type(t)?(e||document).querySelector(t):t||null},self.Bliss);e(r,{extend:e,overload:t,type:n,property:r.property||"_",listeners:self.WeakMap?new WeakMap:new Map,original:{addEventListener:(self.EventTarget||Node).prototype.addEventListener,removeEventListener:(self.EventTarget||Node).prototype.removeEventListener},sources:{},noop:function(){},$:function(t,e){return t instanceof Node||t instanceof Window?[t]:2!=arguments.length||e?Array.prototype.slice.call("string"==typeof t?(e||document).querySelectorAll(t):t||[]):[]},defined:function(){for(var t=0;t<arguments.length;t++)if(void 0!==arguments[t])return arguments[t]},create:function(t,e){return t instanceof Node?r.set(t,e):(1===arguments.length&&("string"===r.type(t)?e={}:(e=t,t=e.tag,e=r.extend({},e,function(t){return"tag"!==t}))),r.set(document.createElement(t||"div"),e))},each:function(t,e,n){n=n||{};for(var r in t)n[r]=e.call(t,r,t[r]);return n},ready:function(t,e,n){if("function"!=typeof t||e||(e=t,t=void 0),t=t||document,e&&("loading"!==t.readyState?e():r.once(t,"DOMContentLoaded",function(){e()})),!n)return new Promise(function(e){r.ready(t,e,!0)})},Class:function(t){var e,n=["constructor","extends","abstract","static"].concat(Object.keys(r.classProps)),i=t.hasOwnProperty("constructor")?t.constructor:r.noop;2==arguments.length?(e=arguments[0],t=arguments[1]):(e=function(){if(this.constructor.__abstract&&this.constructor===e)throw new Error("Abstract classes cannot be directly instantiated.");e["super"]&&e["super"].apply(this,arguments),i.apply(this,arguments)},e["super"]=t["extends"]||null,e.prototype=r.extend(Object.create(e["super"]?e["super"].prototype:Object),{constructor:e}),e.prototype["super"]=e["super"]?e["super"].prototype:null,e.__abstract=!!t["abstract"]);var o=function(t){return this.hasOwnProperty(t)&&n.indexOf(t)===-1};if(t["static"]){r.extend(e,t["static"],o);for(var s in r.classProps)s in t["static"]&&r.classProps[s](e,t["static"][s])}r.extend(e.prototype,t,o);for(var s in r.classProps)s in t&&r.classProps[s](e.prototype,t[s]);return e},classProps:{lazy:t(function(t,e,n){return Object.defineProperty(t,e,{get:function(){var t=n.call(this);return Object.defineProperty(this,e,{value:t,configurable:!0,enumerable:!0,writable:!0}),t},set:function(t){Object.defineProperty(this,e,{value:t,configurable:!0,enumerable:!0,writable:!0})},configurable:!0,enumerable:!0}),t}),live:t(function(t,e,n){return"function"===r.type(n)&&(n={set:n}),Object.defineProperty(t,e,{get:function(){var t=this["_"+e],r=n.get&&n.get.call(this,t);return void 0!==r?r:t},set:function(t){var r=this["_"+e],i=n.set&&n.set.call(this,t,r);this["_"+e]=void 0!==i?i:t},configurable:n.configurable,enumerable:n.enumerable}),t})},include:function(){var t=arguments[arguments.length-1],e=2===arguments.length&&arguments[0],n=document.createElement("script");return e?Promise.resolve():new Promise(function(e,i){r.set(n,{async:!0,onload:function(){e(),n.parentNode&&n.parentNode.removeChild(n)},onerror:function(){i()},src:t,inside:document.head})})},fetch:function(t,n){if(!t)throw new TypeError("URL parameter is mandatory and cannot be "+t);var i=e({url:new URL(t,location),data:"",method:"GET",headers:{},xhr:new XMLHttpRequest},n);i.method=i.method.toUpperCase(),r.hooks.run("fetch-args",i),"GET"===i.method&&i.data&&(i.url.search+=i.data),document.body.setAttribute("data-loading",i.url),i.xhr.open(i.method,i.url.href,i.async!==!1,i.user,i.password);for(var o in n)if("upload"===o)i.xhr.upload&&"object"==typeof n[o]&&r.extend(i.xhr.upload,n[o]);else if(o in i.xhr)try{i.xhr[o]=n[o]}catch(s){self.console&&console.error(s)}var a=Object.keys(i.headers).map(function(t){return t.toLowerCase()});"GET"!==i.method&&a.indexOf("content-type")===-1&&i.xhr.setRequestHeader("Content-type","application/x-www-form-urlencoded");for(var c in i.headers)void 0!==i.headers[c]&&i.xhr.setRequestHeader(c,i.headers[c]);var u=new Promise(function(t,e){i.xhr.onload=function(){document.body.removeAttribute("data-loading"),0===i.xhr.status||i.xhr.status>=200&&i.xhr.status<300||304===i.xhr.status?t(i.xhr):e(r.extend(Error(i.xhr.statusText),{xhr:i.xhr,get status(){return this.xhr.status}}))},i.xhr.onerror=function(){document.body.removeAttribute("data-loading"),e(r.extend(Error("Network Error"),{xhr:i.xhr}))},i.xhr.ontimeout=function(){document.body.removeAttribute("data-loading"),e(r.extend(Error("Network Timeout"),{xhr:i.xhr}))},i.xhr.send("GET"===i.method?null:i.data)});return u.xhr=i.xhr,u},value:function(t){var e="string"!==r.type(t);return r.$(arguments).slice(+e).reduce(function(t,e){return t&&t[e]},e?t:self)}}),r.Hooks=new r.Class({add:function(t,e,n){if("string"==typeof arguments[0])(Array.isArray(t)?t:[t]).forEach(function(t){this[t]=this[t]||[],e&&this[t][n?"unshift":"push"](e)},this);else for(var t in arguments[0])this.add(t,arguments[0][t],arguments[1])},run:function(t,e){this[t]=this[t]||[],this[t].forEach(function(t){t.call(e&&e.context?e.context:e,e)})}}),r.hooks=new r.Hooks;r.property;r.Element=function(t){this.subject=t,this.data={},this.bliss={}},r.Element.prototype={set:t(function(t,e){t in r.setProps?r.setProps[t].call(this,e):t in this?this[t]=e:this.setAttribute(t,e)},0),transition:function(t,e){return e=+e||400,new Promise(function(n,i){if("transition"in this.style){var o=r.extend({},this.style,/^transition(Duration|Property)$/);r.style(this,{transitionDuration:(e||400)+"ms",transitionProperty:Object.keys(t).join(", ")}),r.once(this,"transitionend",function(){clearTimeout(s),r.style(this,o),n(this)});var s=setTimeout(n,e+50,this);r.style(this,t)}else r.style(this,t),n(this)}.bind(this))},fire:function(t,e){var n=document.createEvent("HTMLEvents");return n.initEvent(t,!0,!0),this.dispatchEvent(r.extend(n,e))},bind:t(function(t,e){if(arguments.length>1&&("function"===r.type(e)||e.handleEvent)){var n=e;e="object"===r.type(arguments[2])?arguments[2]:{capture:!!arguments[2]},e.callback=n}var i=r.listeners.get(this)||{};t.trim().split(/\s+/).forEach(function(t){if(t.indexOf(".")>-1){t=t.split(".");var n=t[1];t=t[0]}i[t]=i[t]||[],0===i[t].filter(function(t){return t.callback===e.callback&&t.capture==e.capture}).length&&i[t].push(r.extend({className:n},e)),r.original.addEventListener.call(this,t,e.callback,e)},this),r.listeners.set(this,i)},0),unbind:t(function(t,e){if(e&&("function"===r.type(e)||e.handleEvent)){var n=e;e=arguments[2]}"boolean"==r.type(e)&&(e={capture:e}),e=e||{},e.callback=e.callback||n;var i=r.listeners.get(this);(t||"").trim().split(/\s+/).forEach(function(t){if(t.indexOf(".")>-1){t=t.split(".");var n=t[1];t=t[0]}if(t&&e.callback)return r.original.removeEventListener.call(this,t,e.callback,e.capture);if(i)for(var o in i)if(!t||o===t)for(var s,a=0;s=i[o][a];a++)n&&n!==s.className||e.callback&&e.callback!==s.callback||!!e.capture!=!!s.capture||(i[o].splice(a,1),r.original.removeEventListener.call(this,o,s.callback,s.capture),a--)},this)},0)},r.setProps={style:function(t){for(var e in t)e in this.style?this.style[e]=t[e]:this.style.setProperty(e,t[e])},attributes:function(t){for(var e in t)this.setAttribute(e,t[e])},properties:function(t){r.extend(this,t)},events:function(t){if(1!=arguments.length||!t||!t.addEventListener)return r.bind.apply(this,[this].concat(r.$(arguments)));var e=this;if(r.listeners){var n=r.listeners.get(t);for(var i in n)n[i].forEach(function(t){r.bind(e,i,t.callback,t.capture)})}for(var o in t)0===o.indexOf("on")&&(this[o]=t[o])},once:t(function(t,e){var n=this,i=function(){return r.unbind(n,t,i),e.apply(n,arguments)};r.bind(this,t,i,{once:!0})},0),delegate:t(function(t,e,n){r.bind(this,t,function(t){t.target.closest(e)&&n.call(this,t)})},0,2),contents:function(t){(t||0===t)&&(Array.isArray(t)?t:[t]).forEach(function(t){var e=r.type(t);/^(string|number)$/.test(e)?t=document.createTextNode(t+""):"object"===e&&(t=r.create(t)),t instanceof Node&&this.appendChild(t)},this)},inside:function(t){t&&t.appendChild(this)},before:function(t){t&&t.parentNode.insertBefore(this,t)},after:function(t){t&&t.parentNode.insertBefore(this,t.nextSibling)},start:function(t){t&&t.insertBefore(this,t.firstChild)},around:function(t){t&&t.parentNode&&r.before(this,t),this.appendChild(t)}},r.Array=function(t){this.subject=t},r.Array.prototype={all:function(t){var e=r.$(arguments).slice(1);return this[t].apply(this,e)}},r.add=t(function(t,e,n,i){n=r.extend({$:!0,element:!0,array:!0},n),"function"==r.type(e)&&(!n.element||t in r.Element.prototype&&i||(r.Element.prototype[t]=function(){return this.subject&&r.defined(e.apply(this.subject,arguments),this.subject)}),!n.array||t in r.Array.prototype&&i||(r.Array.prototype[t]=function(){var t=arguments;return this.subject.map(function(n){return n&&r.defined(e.apply(n,t),n)})}),n.$&&(r.sources[t]=r[t]=e,(n.array||n.element)&&(r[t]=function(){var e=[].slice.apply(arguments),i=e.shift(),o=n.array&&Array.isArray(i)?"Array":"Element";return r[o].prototype[t].apply({subject:i},e)})))},0),r.add(r.Array.prototype,{element:!1}),r.add(r.Element.prototype),r.add(r.setProps),r.add(r.classProps,{element:!1,array:!1});var i=document.createElement("_");r.add(r.extend({},HTMLElement.prototype,function(t){return"function"===r.type(i[t])}),null,!0)}();
/* jsep v0.3.2 (http://jsep.from.so/) */
!function(e){"use strict";var r=function(e,r){var t=new Error(e+" at character "+r);throw t.index=r,t.description=e,t},t={"-":!0,"!":!0,"~":!0,"+":!0},n={"||":1,"&&":2,"|":3,"^":4,"&":5,"==":6,"!=":6,"===":6,"!==":6,"<":7,">":7,"<=":7,">=":7,"<<":8,">>":8,">>>":8,"+":9,"-":9,"*":10,"/":10,"%":10},o=function(e){var r,t=0;for(var n in e)(r=n.length)>t&&e.hasOwnProperty(n)&&(t=r);return t},i=o(t),a=o(n),u={true:!0,false:!1,null:null},s=function(e){return n[e]||0},p=function(e,r,t){return{type:"||"===e||"&&"===e?"LogicalExpression":"BinaryExpression",operator:e,left:r,right:t}},f=function(e){return e>=48&&e<=57},c=function(e){return 36===e||95===e||e>=65&&e<=90||e>=97&&e<=122||e>=128&&!n[String.fromCharCode(e)]},l=function(e){return 36===e||95===e||e>=65&&e<=90||e>=97&&e<=122||e>=48&&e<=57||e>=128&&!n[String.fromCharCode(e)]},d=function(e){for(var o,d,h=0,v=e.charAt,x=e.charCodeAt,y=function(r){return v.call(e,r)},m=function(r){return x.call(e,r)},b=e.length,E=function(){for(var e=m(h);32===e||9===e||10===e||13===e;)e=m(++h)},g=function(){var e,t,n=w();return E(),63!==m(h)?n:(h++,(e=g())||r("Expected expression",h),E(),58===m(h)?(h++,(t=g())||r("Expected expression",h),{type:"ConditionalExpression",test:n,consequent:e,alternate:t}):void r("Expected :",h))},C=function(){E();for(var r=e.substr(h,a),t=r.length;t>0;){if(n.hasOwnProperty(r))return h+=t,r;r=r.substr(0,--t)}return!1},w=function(){var e,t,n,o,i,a,u,f;if(a=O(),!(t=C()))return a;for(i={value:t,prec:s(t)},(u=O())||r("Expected expression after "+t,h),o=[a,i,u];(t=C())&&0!==(n=s(t));){for(i={value:t,prec:n};o.length>2&&n<=o[o.length-2].prec;)u=o.pop(),t=o.pop().value,a=o.pop(),e=p(t,a,u),o.push(e);(e=O())||r("Expected expression after "+t,h),o.push(i,e)}for(e=o[f=o.length-1];f>1;)e=p(o[f-1].value,o[f-2],e),f-=2;return e},O=function(){var r,n,o;if(E(),r=m(h),f(r)||46===r)return U();if(39===r||34===r)return k();if(91===r)return S();for(o=(n=e.substr(h,i)).length;o>0;){if(t.hasOwnProperty(n))return h+=o,{type:"UnaryExpression",operator:n,argument:O(),prefix:!0};n=n.substr(0,--o)}return!(!c(r)&&40!==r)&&A()},U=function(){for(var e,t,n="";f(m(h));)n+=y(h++);if(46===m(h))for(n+=y(h++);f(m(h));)n+=y(h++);if("e"===(e=y(h))||"E"===e){for(n+=y(h++),"+"!==(e=y(h))&&"-"!==e||(n+=y(h++));f(m(h));)n+=y(h++);f(m(h-1))||r("Expected exponent ("+n+y(h)+")",h)}return t=m(h),c(t)?r("Variable names cannot start with a number ("+n+y(h)+")",h):46===t&&r("Unexpected period",h),{type:"Literal",value:parseFloat(n),raw:n}},k=function(){for(var e,t="",n=y(h++),o=!1;h<b;){if((e=y(h++))===n){o=!0;break}if("\\"===e)switch(e=y(h++)){case"n":t+="\n";break;case"r":t+="\r";break;case"t":t+="\t";break;case"b":t+="\b";break;case"f":t+="\f";break;case"v":t+="\v";break;default:t+=e}else t+=e}return o||r('Unclosed quote after "'+t+'"',h),{type:"Literal",value:t,raw:n+t+n}},L=function(){var t,n=m(h),o=h;for(c(n)?h++:r("Unexpected "+y(h),h);h<b&&(n=m(h),l(n));)h++;return t=e.slice(o,h),u.hasOwnProperty(t)?{type:"Literal",value:u[t],raw:t}:"this"===t?{type:"ThisExpression"}:{type:"Identifier",name:t}},j=function(e){for(var t,n,o=[],i=!1;h<b;){if(E(),(t=m(h))===e){i=!0,h++;break}44===t?h++:((n=g())&&"Compound"!==n.type||r("Expected comma",h),o.push(n))}return i||r("Expected "+String.fromCharCode(e),h),o},A=function(){var e,t;for(t=40===(e=m(h))?P():L(),E(),e=m(h);46===e||91===e||40===e;)h++,46===e?(E(),t={type:"MemberExpression",computed:!1,object:t,property:L()}):91===e?(t={type:"MemberExpression",computed:!0,object:t,property:g()},E(),93!==(e=m(h))&&r("Unclosed [",h),h++):40===e&&(t={type:"CallExpression",arguments:j(41),callee:t}),E(),e=m(h);return t},P=function(){h++;var e=g();if(E(),41===m(h))return h++,e;r("Unclosed (",h)},S=function(){return h++,{type:"ArrayExpression",elements:j(93)}},B=[];h<b;)59===(o=m(h))||44===o?h++:(d=g())?B.push(d):h<b&&r('Unexpected "'+y(h)+'"',h);return 1===B.length?B[0]:{type:"Compound",body:B}};if(d.version="0.3.2",d.toString=function(){return"JavaScript Expression Parser (JSEP) v"+d.version},d.addUnaryOp=function(e){return i=Math.max(e.length,i),t[e]=!0,this},d.addBinaryOp=function(e,r){return a=Math.max(e.length,a),n[e]=r,this},d.addLiteral=function(e,r){return u[e]=r,this},d.removeUnaryOp=function(e){return delete t[e],e.length===i&&(i=o(t)),this},d.removeAllUnaryOps=function(){return t={},i=0,this},d.removeBinaryOp=function(e){return delete n[e],e.length===a&&(a=o(n)),this},d.removeAllBinaryOps=function(){return n={},a=0,this},d.removeLiteral=function(e){return delete u[e],this},d.removeAllLiterals=function(){return u={},this},"undefined"==typeof exports){var h=e.jsep;e.jsep=d,d.noConflict=function(){return e.jsep===d&&(e.jsep=h),d}}else"undefined"!=typeof module&&module.exports?exports=module.exports=d:exports.parse=d}(this);
//# sourceMappingURL=jsep.min.js.map
!function(){function e(e,t){return e instanceof Node||e instanceof Window?[e]:[].slice.call("string"==typeof e?(t||document).querySelectorAll(e):e||[])}if(self.Element&&(Element.prototype.matches||(Element.prototype.matches=Element.prototype.webkitMatchesSelector||Element.prototype.mozMatchesSelector||Element.prototype.msMatchesSelector||Element.prototype.oMatchesSelector||null),Element.prototype.matches)){var t=self.Stretchy={selectors:{base:'textarea, select:not([size]), input:not([type]), input[type="'+"text number url email tel".split(" ").join('"], input[type="')+'"]',filter:"*"},script:document.currentScript||e("script").pop(),resize:function(e){if(t.resizes(e)){var i,n=getComputedStyle(e),o=0;!e.value&&e.placeholder&&(i=!0,e.value=e.placeholder);var l=e.nodeName.toLowerCase();if("textarea"==l)e.style.height="0","border-box"==n.boxSizing?o=e.offsetHeight:"content-box"==n.boxSizing&&(o=-e.clientHeight+parseFloat(n.minHeight)),
e.style.height=e.scrollHeight+o+"px";else if("input"==l)if(e.style.width="1000px",e.offsetWidth){e.style.width="0","border-box"==n.boxSizing?o=e.offsetWidth:"padding-box"==n.boxSizing?o=e.clientWidth:"content-box"==n.boxSizing&&(o=parseFloat(n.minWidth));var r=Math.max(o,e.scrollWidth-e.clientWidth);e.style.width=r+"px";for(var s=0;s<10&&(e.scrollLeft=1e10,0!=e.scrollLeft);s++)r+=e.scrollLeft,e.style.width=r+"px"}else e.style.width=e.value.length+1+"ch";else if("select"==l){var c=e.selectedIndex>0?e.selectedIndex:0,a=document.createElement("_");a.textContent=e.options[c].textContent,e.parentNode.insertBefore(a,e.nextSibling);var d;for(var h in n){var p=n[h];/^(width|webkitLogicalWidth|length)$/.test(h)||"string"!=typeof p||(a.style[h]=p,/appearance$/i.test(h)&&(d=h))}a.style.width="",a.offsetWidth>0&&(e.style.width=a.offsetWidth+"px",n[d]&&"none"===n[d]||(e.style.width="calc("+e.style.width+" + 2em)")),a.parentNode.removeChild(a),a=null}i&&(e.value="")}},resizeAll:function(i){
e(i||t.selectors.base).forEach(function(e){t.resize(e)})},active:!0,resizes:function(e){return e&&e.parentNode&&e.matches&&e.matches(t.selectors.base)&&e.matches(t.selectors.filter)},init:function(){t.selectors.filter=t.script.getAttribute("data-filter")||(e("[data-stretchy-filter]").pop()||document.body).getAttribute("data-stretchy-filter")||Stretchy.selectors.filter||"*",t.resizeAll()},$$:e};"loading"!==document.readyState?t.init():document.addEventListener("DOMContentLoaded",t.init);var i=function(e){t.active&&t.resize(e.target)};document.documentElement.addEventListener("input",i),document.documentElement.addEventListener("change",i),self.MutationObserver&&new MutationObserver(function(e){t.active&&e.forEach(function(e){"childList"==e.type&&Stretchy.resizeAll(e.addedNodes)})}).observe(document.documentElement,{childList:!0,subtree:!0})}}();
//# sourceMappingURL=stretchy.min.js.map
"use strict";
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
/**
* Mavo: Create web applications by writing HTML and CSS!
* @author Lea Verou and contributors
* @version v0.1.6
*/
(function ($, $$) {
var _ = self.Mavo = $.Class({
constructor: function constructor(element) {
var _this = this;
this.treeBuilt = Mavo.defer();
this.dataLoaded = Mavo.defer();
this.element = element;
this.inProgress = false;
// Index among other mavos in the page, 1 is first
this.index = Object.keys(_.all).length + 1;
Object.defineProperty(_.all, this.index - 1, { value: this });
// Convert any data-mv-* attributes to mv-*
var selector = _.attributes.map(function (attribute) {
return "[data-" + attribute + "]";
}).join(", ");
[this.element].concat(_toConsumableArray($$(selector, this.element))).forEach(function (element) {
_.attributes.forEach(function (attribute) {
var value = element.getAttribute("data-" + attribute);
if (value !== null) {
element.setAttribute(attribute, value);
}
});
});
// Assign a unique (for the page) id to this mavo instance
this.id = Mavo.getAttribute(this.element, "mv-app", "id") || "mavo" + this.index;
if (this.id in _.all) {
// Duplicate app name
for (var i = 2; this.id + i in _.all; i++) {}
this.id = this.id + i;
}
_.all[this.id] = this;
this.element.setAttribute("mv-app", this.id);
var lang = $.value(this.element.closest("[lang]"), "lang") || Mavo.locale;
this.locale = Mavo.Locale.get(lang);
// Should we start in edit mode?
this.autoEdit = this.element.classList.contains("mv-autoedit");
// Should we save automatically?
this.autoSave = this.element.hasAttribute("mv-autosave");
this.autoSaveDelay = (this.element.getAttribute("mv-autosave") || 3) * 1000;
this.element.setAttribute("typeof", "");
Mavo.hooks.run("init-start", this);
// Apply heuristic for groups
$$(_.selectors.primitive + "," + _.selectors.multiple, this.element).forEach(function (element) {
var hasChildren = $(_.selectors.not(_.selectors.formControl) + ", " + _.selectors.property, element);
if (hasChildren) {
var config = Mavo.Primitive.getConfig(element);
var isCollection = Mavo.is("multiple", element);
if (isCollection || !config.attribute && !config.hasChildren) {
element.setAttribute("typeof", "");
}
}
});
this.expressions = new Mavo.Expressions(this);
// Build mavo objects
Mavo.hooks.run("init-tree-before", this);
this.root = new Mavo.Group(this.element, this);
this.treeBuilt.resolve();
Mavo.hooks.run("init-tree-after", this);
this.permissions = new Mavo.Permissions();
var backendTypes = ["source", "storage", "init"]; // order is significant!
// Figure out backends for storage, data reads, and initialization respectively
backendTypes.forEach(function (role) {
return _this.updateBackend(role);
});
this.backendObserver = new Mavo.Observer(this.element, backendTypes.map(function (role) {
return "mv-" + role;
}), function (records) {
var changed = {};
var roles = records.map(function (record) {
var role = record.attributeName.replace(/^mv-/, "");
changed[role] = _this.updateBackend(role);
return role;
});
// Do we need to re-load data?
if (changed.source) {
// if source changes, always reload
_this.load();
} else if (!_this.source) {
if (changed.storage || changed.init && !_this.root.data) {
_this.load();
}
}
});
this.permissions.can("login", function () {
// We also support a URL param to trigger login, in case the user doesn't want visible login UI
if (Mavo.Functions.url("login") !== null && _this.index == 1 || Mavo.Functions.url(_this.id + "-login") !== null) {
_this.primaryBackend.login();
}
});
// Update login status
$.bind(this.element, "mv-login.mavo", function (evt) {
if (evt.backend == (_this.source || _this.storage)) {
// If last time we rendered we got nothing, maybe now we'll have better luck?
if (!_this.root.data && !_this.unsavedChanges) {
_this.load();
}
}
});
this.bar = new Mavo.UI.Bar(this);
// Prevent editing properties inside <summary> to open and close the summary (fix bug #82)
if ($("summary [property]:not([typeof])")) {
this.element.addEventListener("click", function (evt) {
if (evt.target != document.activeElement) {
evt.preventDefault();
}
});
}
// Is there any control that requires an edit button?
this.needsEdit = this.calculateNeedsEdit();
this.setUnsavedChanges(false);
this.permissions.onchange(function (_ref) {
var action = _ref.action,
value = _ref.value;
var permissions = _this.element.getAttribute("mv-permissions") || "";
permissions = permissions.trim().split(/\s+/).filter(function (a) {
return a != action;
});
if (value) {
permissions.push(action);
}
_this.element.setAttribute("mv-permissions", permissions.join(" "));
});
if (this.needsEdit) {
this.permissions.can(["edit", "add", "delete"], function () {
// Observe entire tree for mv-mode changes
_this.modeObserver = new Mavo.Observer(_this.element, "mv-mode", function (records) {
records.forEach(function (record) {
var element = record.target;
var nodes = _.Node.children(element);
nodeloop: for (var _i = 0; _i < nodes.length; _i++) {
var _node = nodes[_i];
var previousMode = _node.mode,
mode = void 0;
if (_node.element == element) {
// If attribute set directly on a Mavo node, then it forces it into that mode
// otherwise, descendant nodes still inherit, unless they are also mode-restricted
mode = _node.element.getAttribute("mv-mode");
_node.modes = mode;
} else {
// Inherited
if (_node.modes) {
// Mode-restricted, we cannot change to the other mode
continue nodeloop;
}
mode = _.getStyle(_node.element.parentNode, "--mv-mode");
}
_node.mode = mode;
if (previousMode != _node.mode) {
_node[_node.mode == "edit" ? "edit" : "done"]();
}
}
});
}, { subtree: true });
if (_this.autoEdit) {
_this.edit();
}
}, function () {
// cannot
_this.modeObserver && _this.modeObserver.destroy();
});
}
if (this.storage || this.source) {
// Fetch existing data
this.permissions.can("read", function () {
return _this.load();
});
} else {
// No storage or source
requestAnimationFrame(function () {
_this.dataLoaded.resolve();
$.fire(_this.element, "mv-load");
});
}
// Dynamic ids
$.bind(this.element, "mv-load.mavo", function (evt) {
if (location.hash) {
var callback = function callback(records) {
var target = document.getElementById(location.hash.slice(1));
if (target || !location.hash) {
if (_this.element.contains(target)) {
requestAnimationFrame(function () {
// Give the browser a chance to render
Mavo.scrollIntoViewIfNeeded(target);
});
}
if (observer) {
observer.destroy();
observer = null;
}
}
return target;
};
if (!callback()) {
// No target, perhaps not yet?
var observer = new Mavo.Observer(_this.element, "id", callback, { subtree: true });
}
}
requestAnimationFrame(function () {
return Stretchy.resizeAll();
});
});
if (this.autoSave) {
this.dataLoaded.then(function (evt) {
var debouncedSave = _.debounce(function () {
_this.save();
}, _this.autoSaveDelay);
var callback = function callback(evt) {
if (evt.node.saved) {
debouncedSave();
}
};
requestAnimationFrame(function () {
_this.permissions.can("save", function () {
$.bind(_this.element, "mv-change.mavo:autosave", callback);
}, function () {
$.unbind(_this.element, "mv-change.mavo:autosave", callback);
});
});
});
}
// Keyboard navigation
this.element.addEventListener("keydown", function (evt) {
// Ctrl + S or Cmd + S to save
if (_this.permissions.save && evt.keyCode == 83 && evt[_.superKey] && !evt.altKey) {
evt.preventDefault();
_this.save();
} else if (evt.keyCode == 38 || evt.keyCode == 40) {
var element = evt.target;
if (element.matches("textarea, input[type=range], input[type=number]")) {
// Arrow keys are meaningful here
return;
}
if (element.matches(".mv-editor")) {
var editor = true;
element = element.parentNode;
}
var node = Mavo.Node.get(element);
if (node && node.closestCollection) {
var nextNode = node.getCousin(evt.keyCode == 38 ? -1 : 1, { wrap: true });
if (nextNode) {
if (editor && nextNode.editing) {
nextNode.edit({ immediately: true }).then(function () {
return nextNode.editor.focus();
});
} else {
nextNode.element.focus();
}
evt.preventDefault();
}
}
}
});
Mavo.hooks.run("init-end", this);
},
get editing() {
return this.root.editing;
},
getData: function getData(o) {
return this.root.getData(o);
},
toJSON: function toJSON() {
return _.toJSON(this.getData());
},
message: function message(_message, options) {
return new _.UI.Message(this, _message, options);
},
error: function error(message) {
this.message(message, {
type: "error",
dismiss: ["button", "timeout"]
});
// Log more info for programmers
for (var _len = arguments.length, log = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
log[_key - 1] = arguments[_key];
}
if (log.length > 0) {
var _console;
(_console = console).log.apply(_console, ["%c" + this.id + ": " + message, "color: red; font-weight: bold"].concat(log));
}
},
render: function render(data) {
var _this2 = this;
this.expressions.active = false;
var env = { context: this, data: data };
_.hooks.run("render-start", env);
if (env.data) {
this.root.render(env.data);
}
this.unsavedChanges = false;
this.expressions.active = true;
requestAnimationFrame(function () {
return _this2.expressions.update();
});
_.hooks.run("render-end", env);
},
edit: function edit() {
this.root.edit();
$.bind(this.element, "mouseenter.mavo:edit mouseleave.mavo:edit", function (evt) {
if (evt.target.matches(_.selectors.multiple)) {
evt.target.classList.remove("mv-has-hovered-item");
var parent = evt.target.parentNode.closest(_.selectors.multiple);
if (parent) {
parent.classList.toggle("mv-has-hovered-item", evt.type == "mouseenter");
}
}
}, true);
this.setUnsavedChanges();
},
/**
* Set this mavo instance’s unsavedChanges flag.
* @param {Boolean} [value]
* If true, just sets the flag to true, no traversal.
* If false, sets the flag of the Mavo instance and every tree node to false
* If not provided, traverses the tree and recalculates the flag value.
*/
setUnsavedChanges: function setUnsavedChanges(value) {
var unsavedChanges = !!value;
if (!value) {
this.walk(function (obj) {
if (obj.unsavedChanges) {
unsavedChanges = true;
if (value === false) {
obj.unsavedChanges = false;
}
return false;
}
});
}
return this.unsavedChanges = unsavedChanges;
},
// Conclude editing
done: function done() {
this.root.done();
$.unbind(this.element, ".mavo:edit");
this.unsavedChanges = false;
},
/**
* Update the backend for a given role
* @return {Boolean} true if a change occurred, false otherwise
*/
updateBackend: function updateBackend(role) {
var previous = this[role],
backend,
changed;
if (this.index == 1) {
backend = _.Functions.url(role);
}
if (!backend) {
backend = _.Functions.url(this.id + "-" + role) || this.element.getAttribute("mv-" + role) || null;
}
if (backend) {
backend = backend.trim();
if (backend == "none") {
backend = null;
}
}
if (backend && (!previous || !previous.equals(backend))) {
// We have a string, convert to a backend object if different than existing
this[role] = backend = _.Backend.create(backend, {
mavo: this,
format: this.element.getAttribute("mv-" + role + "-format") || this.element.getAttribute("mv-format")
}, this.element.getAttribute("mv-" + role + "-type"), this[role]);
changed = true;
} else if (!backend) {
// We had a backend and now we will un-have it
this[role] = null;
}
changed = changed || (backend ? !backend.equals(previous) : !!previous);
if (changed) {
// A change occured
if (!this.storage && !this.source && this.init) {
// If init is present with no storage and no source, init is equivalent to source
this.source = this.init;
this.init = null;
}
var permissions = this.storage ? this.storage.permissions : new Mavo.Permissions({ edit: true, save: false });
permissions.parent = this.source && this.source.permissions;
this.permissions.parent = permissions;
this.primaryBackend = this.storage || this.source;
}
return changed;
},
/**
* load - Fetch data from source and render it.
*
* @return {Promise} A promise that resolves when the data is loaded.
*/
load: function load() {
var _this3 = this;
var backend = this.source || this.storage;
if (!backend) {
return Promise.resolve();
}
this.inProgress = "Loading";
return backend.ready.then(function () {
return backend.load();
}).catch(function (err) {
// Try again with init
if (_this3.init && _this3.init != backend) {
backend = _this3.init;
return _this3.init.ready.then(function () {
return _this3.init.load();
});
}
// No init, propagate error
return Promise.reject(err);
}).catch(function (err) {
if (err) {
var xhr = err instanceof XMLHttpRequest ? err : err.xhr;
if (xhr && xhr.status == 404) {
_this3.render(null);
} else {
var message = _this3._("problem-loading");
if (xhr) {
message += xhr.status ? _this3._("http-error", err) : ": " + _this3._("cant-connect");
}
_this3.error(message, err);
}
}
return null;
}).then(function (data) {
return _this3.render(data);
}).then(function () {
_this3.inProgress = false;
requestAnimationFrame(function () {
_this3.dataLoaded.resolve();
$.fire(_this3.element, "mv-load");
});
});
},
store: function store() {
var _this4 = this;
if (!this.storage) {
return Promise.resolve();
}
this.inProgress = "Saving";
return this.storage.store(this.getData()).catch(function (err) {
if (err) {
var message = _this4._("problem-saving");
if (err instanceof XMLHttpRequest) {
message += ": " + (err.status ? _this4._("http-error", err) : _this4._("cant-connect"));
}
_this4.error(message, err);
}
return null;
}).then(function (saved) {
_this4.inProgress = false;
return saved;
});
},
upload: function upload(file) {
var _this5 = this;
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "images/" + file.name;
if (!this.uploadBackend) {
return Promise.reject();
}
this.inProgress = this._("uploading");
return this.uploadBackend.upload(file, path).then(function (url) {
_this5.inProgress = false;
return url;
}).catch(function (err) {
_this5.error(_this5._("error-uploading"), err);
_this5.inProgress = false;
return null;
});
},
save: function save() {
var _this6 = this;
return this.store().then(function (saved) {
if (saved) {
$.fire(_this6.element, "mv-save", saved);
_this6.lastSaved = Date.now();
_this6.root.save();
_this6.unsavedChanges = false;
}
});
},
walk: function walk() {
var _root;
return (_root = this.root).walk.apply(_root, arguments);
},
calculateNeedsEdit: function calculateNeedsEdit(test) {
var needsEdit = false;
this.walk(function (obj, path) {
if (needsEdit) {
// If already true, no need to descend further
return false;
}
// True if both modes are allowed and node is not group
needsEdit = !obj.modes && obj.nodeType != "Group";
return !obj.modes;
}, undefined, { descentReturn: true });
return needsEdit;
},
live: {
inProgress: function inProgress(value) {
$.toggleAttribute(this.element, "mv-progress", value, value);
$.toggleAttribute(this.element, "aria-busy", !!value, !!value);
this.element.style.setProperty("--mv-progress-text", value ? "\"" + this._(value) + "\"" : "");
},
unsavedChanges: function unsavedChanges(value) {
this.element.classList.toggle("mv-unsaved-changes", value);
},
needsEdit: function needsEdit(value) {
this.bar.toggle("edit", value && this.permissions.edit);
},
storage: function storage(value) {
if (value !== this._storage && !value) {
var permissions = new Mavo.Permissions({ edit: true, save: false });
permissions.parent = this.permissions.parent;
this.permissions.parent = permissions;
}
},
primaryBackend: function primaryBackend(value) {
value = value || null;
if (value != this._primaryBackend) {
return value;
}
},
uploadBackend: {
get: function get() {
if (this.storage && this.storage.upload) {
// Prioritize storage
return this.storage;
}
}
}
},
static: {
version: "v0.1.6",
all: {},
get: function get(id) {
if (id instanceof Element) {
// Get by element
for (var name in _.all) {
if (_.all[name].element == id) {
return _.all[name];
}
}
return null;
}
var name = typeof id === "number" ? Object.keys(_.all)[id] : id;
return _.all[name] || null;
},
superKey: navigator.platform.indexOf("Mac") === 0 ? "metaKey" : "ctrlKey",
base: location.protocol == "about:" ? document.currentScript ? document.currentScript.src : "http://mavo.io" : location,
dependencies: [],
init: function init() {
var container = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : document;
var mavos = Array.isArray(arguments[0]) ? arguments[0] : $$(_.selectors.init, container);
var ret = mavos.filter(function (element) {
return !_.get(element);
}) // not already inited
.map(function (element) {
return new _(element);
});
return ret;
},
UI: {},
hooks: new $.Hooks(),
attributes: ["mv-app", "mv-storage", "mv-source", "mv-init", "mv-path", "mv-multiple-path", "mv-format", "mv-attribute", "mv-default", "mv-mode", "mv-edit", "mv-permisssions", "mv-rel", "mv-value"],
lazy: {
locale: function locale() {
return document.documentElement.lang || "en-GB";
},
toNode: function toNode() {
return Symbol("toNode");
},
toProxy: function toProxy() {
return Symbol("toProxy");
}
}
}
});
Object.defineProperty(_.all, "length", {
get: function get() {
return Object.keys(this).length;
}
});
{
var s = _.selectors = {
init: ".mv-app, [mv-app], [data-mv-app]",
property: "[property], [itemprop]",
specificProperty: function specificProperty(name) {
return "[property=" + name + "], [itemprop=" + name + "]";
},
group: "[typeof], [itemscope], [itemtype], [mv-group]",
multiple: "[mv-multiple]",
formControl: "input, select, option, textarea",
textInput: ["text", "email", "url", "tel", "search"].map(function (t) {
return "input[type=" + t + "]";
}).join(", ") + ", input:not([type]), textarea",
ui: ".mv-ui",
container: {
// "li": "ul, ol",
"tr": "table",
"option": "select"
// "dt": "dl",
// "dd": "dl"
}
};
var arr = s.arr = function (selector) {
return selector.split(/\s*,\s*/g);
};
var not = s.not = function (selector) {
return arr(selector).map(function (s) {
return ":not(" + s + ")";
}).join("");
};
var or = s.or = function (selector1, selector2) {
return selector1 + ", " + selector2;
};
var and = s.and = function (selector1, selector2) {
var ret = [],
arr2 = arr(selector2);
arr(selector1).forEach(function (s1) {
return ret.push.apply(ret, _toConsumableArray(arr2.map(function (s2) {
return s1 + s2;
})));
});
return ret.join(", ");
};
var andNot = s.andNot = function (selector1, selector2) {
return and(selector1, not(selector2));
};
$.extend(_.selectors, {
primitive: andNot(s.property, s.group),
rootGroup: andNot(s.group, s.property),
item: or(s.multiple, s.group),
output: or(s.specificProperty("output"), ".mv-output")
});
}
// Init mavo. Async to give other scripts a chance to modify stuff.
requestAnimationFrame(function () {
var polyfills = [];
$.each({
"blissfuljs": Array.from && document.documentElement.closest && self.URL && "searchParams" in URL.prototype,
"Intl.~locale.en": self.Intl,
"IntersectionObserver": self.IntersectionObserver,
"Symbol": self.Symbol,
"Element.prototype.remove": Element.prototype.remove
}, function (id, supported) {
if (!supported) {
polyfills.push(id);
}
});
var polyfillURL = "https://cdn.polyfill.io/v2/polyfill.min.js?unknown=polyfill&features=" + polyfills.map(function (a) {
return a + "|gated";
}).join(",");
_.dependencies.push(
// Plugins.load() must be run after DOM load to pick up all mv-plugins attributes
$.ready().then(function () {
return _.Plugins.load();
}), $.include(!polyfills.length, polyfillURL));
_.inited = $.ready().then(function () {
$.attributes($$(_.selectors.init), { "mv-progress": "Loading" });
return _.ready;
}).catch(console.error).then(function () {
return Mavo.init();
});
_.ready = _.thenAll(_.dependencies);
});
Stretchy.selectors.filter = ".mv-editor:not([property]), .mv-autosize";
// Define $ and $$ if they are not already defined
// Primarily for backwards compat since we used to use Bliss Full.
self.$ = self.$ || $;
self.$$ = self.$$ || $$;
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = $.extend(Mavo, {
/**
* Load a file, only once
*/
load: function load(url) {
var base = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : document.currentScript ? document.currentScript.src : location;
_.loaded = _.loaded || new Set();
if (_.loaded.has(url + "")) {
return;
}
url = new URL(url, base);
if (/\.css$/.test(url.pathname)) {
// CSS file
$.create("link", {
"href": url,
"rel": "stylesheet",
"inside": document.head
});
// No need to wait for stylesheets
return Promise.resolve();
}
// JS file
return $.include(url);
},
readFile: function readFile(file) {
var format = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "DataURL";
var reader = new FileReader();
return new Promise(function (resolve, reject) {
reader.onload = function (f) {
return resolve(reader.result);
};
reader.onerror = reader.onabort = reject;
reader["readAs" + format](file);
});
},
toJSON: function toJSON(data) {
if (data === null) {
return "";
}
if (typeof data === "string") {
// Do not stringify twice!
return data;
}
return JSON.stringify(data, null, "\t");
},
/**
* toJSON without cycles
*/
safeToJSON: function safeToJSON(o) {
var cache = self.WeakSet ? new WeakSet() : new Set();
return JSON.stringify(o, function (key, value) {
if ((typeof value === "undefined" ? "undefined" : _typeof(value)) === "object" && value !== null) {
// No circular reference found
if (cache.has(value)) {
return; // Circular reference found!
}
cache.add(value);
}
return value;
});
},
objectify: function objectify(value, properties) {
var primitive = Mavo.value(value);
if ((typeof value === "undefined" ? "undefined" : _typeof(value)) !== "object" || value === null) {
if (value === null) {
var _value;
value = (_value = {}, _defineProperty(_value, Symbol.toStringTag, "Null"), _defineProperty(_value, "toJSON", function toJSON() {
return null;
}), _value);
} else {
var constructor = value.constructor;
value = new constructor(primitive);
value[Symbol.toStringTag] = constructor.name;
}
value.valueOf = value[Symbol.toPrimitive] = function () {
return primitive;
};
}
return $.extend(value, properties);
},
value: function value(_value2) {
return _value2 && _value2.valueOf ? _value2.valueOf() : _value2;
},
/**
* Array & set utlities
*/
// If the passed value is not an array, convert to an array
toArray: function toArray(arr) {
return arr === undefined ? [] : Array.isArray(arr) ? arr : [arr];
},
// Delete an element from an array
// @param all {Boolean} Delete more than one?
delete: function _delete(arr, element, all) {
do {
var index = arr && arr.indexOf(element);
if (index > -1) {
arr.splice(index, 1);
}
} while (index > -1 && all);
},
// Recursively flatten a multi-dimensional array
flatten: function flatten(arr) {
if (!Array.isArray(arr)) {
return [arr];
}
return arr.reduce(function (prev, c) {
return _.toArray(prev).concat(_.flatten(c));
}, []);
},
// Push an item to an array iff it's not already in there
pushUnique: function pushUnique(arr, item) {
if (arr.indexOf(item) === -1) {
arr.push(item);
}
},
union: function union(set1, set2) {
return new Set([].concat(_toConsumableArray(set1 || []), _toConsumableArray(set2 || [])));
},
// Filter an array in place
// TODO add index to callback
filter: function filter(arr, callback) {
for (var i = 0; i < arr.length; i++) {
if (!callback(arr[i])) {
arr.splice(i, 1);
i--;
}
}
},
/**
* DOM element utilities
*/
is: function is(thing) {
for (var _len2 = arguments.length, elements = Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
elements[_key2 - 1] = arguments[_key2];
}
for (var i = 0, element; i < elements.length; i++) {
element = elements[i];
if (element && element.matches && element.matches(_.selectors[thing])) {
return true;
}
}
return false;
},
/**
* Get the current value of a CSS property on an element
*/
getStyle: function getStyle(element, property) {
if (element) {
var value = getComputedStyle(element).getPropertyValue(property);
if (value) {
return value.trim();
}
}
},
/**
* Get/set data on an element
*/
data: function data(element, name, value) {
var data = _.elementData.get(element) || {},
ret;
if (arguments.length == 2) {
ret = data[name];
} else if (value === undefined) {
delete data[name];
} else {
ret = data[name] = value;
}
_.elementData.set(element, data);
return ret;
},
elementData: new WeakMap(),
/**
* Get node from path or get path of a node to an ancestor
* For maximum robustness, all but the last path segment refer to elements only.
* The last part of the path is a decimal: the integer part of the decimal is element index,
* the decimal part is node index *after* that element and starts from 1.
* If the node has no previous element sibling, the integer part of the index will be -1.
*/
elementPath: function elementPath(ancestor, element) {
if (Array.isArray(element)) {
// Get element by path
var path = element;
var ret = path.reduce(function (acc, cur) {
return acc.children[cur >> 0] || acc;
}, ancestor);
var last = path[path.length - 1];
if (last != last >> 0) {
// We are returning a non-element node
var offset = +(last + "").split(".")[1];
if (last >> 0 < 0) {
ret = ret.firstChild;
offset--;
}
for (var i = 0; i < offset; i++) {
ret = ret.nextSibling;
}
}
return ret;
} else {
// Get path
var path = [];
for (var parent = element; parent && parent != ancestor; parent = parent.parentNode) {
var index = 0;
var countNonElementSiblings = parent === element && element.nodeType !== 1;
var offset = countNonElementSiblings ? 1 : 0;
var sibling = parent;
while (sibling = sibling["previous" + (countNonElementSiblings ? "" : "Element") + "Sibling"]) {
if (countNonElementSiblings) {
offset++;
if (sibling.nodeType == 1) {
countNonElementSiblings = false;
}
} else {
index++;
}
}
if (offset > 0) {
index = index - 1 + "." + offset;
}
path.unshift(index);
}
return parent ? path : null;
}
},
/**
* Revocably add/remove elements from the DOM
*/
revocably: {
add: function add(element, parent) {
var comment = _.revocably.isRemoved(element);
if (comment && comment.parentNode) {
comment.parentNode.replaceChild(element, comment);
} else if (element && parent && !element.parentNode) {
// Has not been revocably removed because it has never even been added
parent.appendChild(element);
}
return comment;
},
remove: function remove(element, commentText) {
if (!element) {
return;
}
var comment = _.data(element, "commentstub");
if (!comment) {
commentText = commentText || element.id || element.className || element.nodeName;
comment = _.data(element, "commentstub", document.createComment(commentText));
}
if (element.parentNode) {
// In DOM, remove
element.parentNode.replaceChild(comment, element);
}
return comment;
},
isRemoved: function isRemoved(element) {
if (!element || element.parentNode) {
return false;
}
var comment = _.data(element, "commentstub");
if (comment && comment.parentNode) {
return comment;
}
return false;
},
setAttribute: function setAttribute(element, attribute, value) {
var previousValue = _.data(element, "attribute-" + attribute);
if (previousValue === undefined) {
// Only set this when there's no old value stored, otherwise
// if called multiple times, it could result in losing the original value
_.data(element, "attribute-" + attribute, element.getAttribute(attribute));
}
element.setAttribute(attribute, value);
},
restoreAttribute: function restoreAttribute(element, attribute) {
var previousValue = _.data(element, "attribute-" + attribute);
if (previousValue !== undefined) {
$.toggleAttribute(element, attribute, previousValue);
_.data(element, "attribute-" + attribute, undefined);
}
}
},
inView: {
is: function is(element) {
var r = element.getBoundingClientRect();
return (0 <= r.bottom && r.bottom <= innerHeight || 0 <= r.top && r.top <= innerHeight) && ( // vertical
0 <= r.right && r.right <= innerWidth || 0 <= r.left && r.left <= innerWidth); // horizontal
},
when: function when(element) {
var observer = _.inView.observer = _.inView.observer || new IntersectionObserver(function (entries) {
var _this7 = this;
entries.forEach(function (entry) {
_this7.unobserve(entry.target);
$.fire(entry.target, "mv-inview", { entry: entry });
});
});
return new Promise(function (resolve) {
if (_.is(element)) {
resolve();
}
observer.observe(element);
var callback = function callback(evt) {
element.removeEventListener("mv-inview", callback);
evt.stopPropagation();
resolve();
};
element.addEventListener("mv-inview", callback);
});
}
},
scrollIntoViewIfNeeded: function scrollIntoViewIfNeeded(element) {
if (element && !Mavo.inView.is(element)) {
element.scrollIntoView({ behavior: "smooth" });
}
},
/**
* Set attribute only if it doesn’t exist
*/
setAttributeShy: function setAttributeShy(element, attribute, value) {
if (!element.hasAttribute(attribute)) {
element.setAttribute(attribute, value);
}
},
/**
* Get the value of an attribute, with fallback attributes in priority order.
*/
getAttribute: function getAttribute(element) {
for (var _len3 = arguments.length, attributes = Array(_len3 > 1 ? _len3 - 1 : 0), _key3 = 1; _key3 < _len3; _key3++) {
attributes[_key3 - 1] = arguments[_key3];
}
for (var i = 0, attribute; attribute = attributes[i]; i++) {
var value = element.getAttribute(attribute);
if (value) {
return value;
}
}
return null;
},
/**
* Get the element identified by the URL hash
*/
getTarget: function getTarget() {
var id = location.hash.substr(1);
return document.getElementById(id);
},
/**
* Object utilities
*/
/**
* Check if property exists in object. Like the in operator but more robust and does not throw.
* Why not just in? E.g. "foo".length is 3 but "length" in "foo" throws
*/
in: function _in(obj, property) {
if (obj) {
return (typeof obj === "undefined" ? "undefined" : _typeof(obj)) === "object" && property in obj || obj[property] !== undefined;
}
},
/**
* Get real property name from case insensitive property
*/
getCanonicalProperty: function getCanonicalProperty(obj, property) {
if (obj && (property || property === 0)) {
// Property in object?
if (_.in(obj, property)) {
return property;
}
if (property.toLowerCase) {
// Lowercase property in object?
var propertyL = property.toLowerCase();
if (_.in(obj, propertyL)) {
return propertyL;
}
// Any case property in object?
var properties = Object.keys(obj);
var i = properties.map(function (p) {
return p.toLowerCase();
}).indexOf(propertyL);
if (i > -1) {
return properties[i];
}
}
}
},
subset: function subset(obj, path, value) {
if (arguments.length == 3) {
// Put
if (path.length) {
var last = path[path.length - 1];
var parent = $.value.apply($, [obj].concat(_toConsumableArray(path.slice(0, -1))));
if (Array.isArray(parent) && Array.isArray(value)) {
// Merge arrays instead of adding array inside array
parent.splice.apply(parent, [last, 1].concat(_toConsumableArray(value)));
} else if (parent) {
parent[path[path.length - 1]] = value;
}
return obj;
}
return value;
} else if ((typeof obj === "undefined" ? "undefined" : _typeof(obj)) == "object" && path && path.length) {
// Get
return path.reduce(function (obj, property, i) {
var meta = {};
var ret = Mavo.Functions.get(obj, property, meta);
// We don't yet support multiple properties at the same level
// i.e. the path can't be for the 2nd and 3rd item
path[i] = Array.isArray(meta.property) ? meta.property[0] : meta.property;
if (ret === undefined && meta.query) {
// Not found, return dummy if query
ret = _defineProperty({}, meta.query.property, meta.query.value);
}
return ret;
}, obj);
} else {
return obj;
}
},
clone: function clone(o) {
return JSON.parse(_.safeToJSON(o));
},
// Credit: https://remysharp.com/2010/07/21/throttling-function-calls
debounce: function debounce(fn, delay) {
if (!delay) {
// No throttling
return fn;
}
var timer = null,
_code;
return function () {
var context = this,
args = arguments;
_code = function code() {
fn.apply(context, args);
removeEventListener("beforeunload", _code);
};
clearTimeout(timer);
timer = setTimeout(_code, delay);
addEventListener("beforeunload", _code);
};
},
timeout: function timeout(delay) {
return new Promise(function (resolve) {
return setTimeout(resolve, delay);
});
},
escapeRegExp: function escapeRegExp(s) {
return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, "\\$&");
},
matches: function matches(str, regex) {
var ret = (str + "").match(regex);
return ret ? ret : [];
},
match: function match(str, regex) {
var i = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
return _.matches(str, regex)[i] || "";
},
observeResize: function observeResize(element, callbackOrObserver) {
if (!self.ResizeObserver) {
return;
}
var previousRect = null;
var ro = callbackOrObserver instanceof ResizeObserver ? callbackOrObserver : new ResizeObserver(function (entries) {
var contentRect = entries[entries.length - 1].contentRect;
if (previousRect && previousRect.width == contentRect.width && previousRect.height == contentRect.height) {
return;
}
callbackOrObserver(entries);
previousRect = contentRect;
});
ro.observe(element);
return ro;
},
Observer: $.Class({
constructor: function constructor(element, attribute, callback) {
var o = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
if (callback instanceof MutationObserver) {
this.observer = callback;
}
this.observer = this.observer || new MutationObserver(callback);
this.element = element;
this.callback = callback;
this.attribute = attribute;
this.options = $.extend({}, o);
if (attribute) {
$.extend(this.options, {
attributes: true,
attributeFilter: this.attribute == "all" ? undefined : Mavo.toArray(this.attribute),
attributeOldValue: !!o.oldValue
});
}
if (!this.attribute || this.attribute == "all") {
$.extend(this.options, {
characterData: true,
childList: true,
subtree: true,
characterDataOldValue: !!o.oldValue
});
}
this.run();
},
stop: function stop() {
if (this.observer) {
this.observer.disconnect();
}
this.running = false;
return this;
},
run: function run() {
if (this.observer) {
this.observer.observe(this.element, this.options);
this.running = true;
}
return this;
},
/**
* Disconnect an observer, run some code, then observe again
*/
sneak: function sneak(callback) {
if (this.running) {
this.stop();
var ret = callback();
this.run();
} else {
var ret = callback();
}
return ret;
},
destroy: function destroy() {
this.stop();
this.observer = this.element = null;
},
static: {
sneak: function sneak(observer, callback) {
return observer ? observer.sneak(callback) : callback();
}
}
}),
defer: function defer(constructor) {
var res, rej;
var promise = new Promise(function (resolve, reject) {
if (constructor) {
constructor(resolve, reject);
}
res = resolve;
rej = reject;
});
promise.resolve = function (a) {
res(a);
return promise;
};
promise.reject = function (a) {
rej(a);
return promise;
};
return promise;
},
/**
* Similar to Promise.all() but can handle post-hoc additions
* and does not reject if one promise rejects.
*/
thenAll: function thenAll(iterable) {
// Turn rejected promises into resolved ones
$$(iterable).forEach(function (promise) {
if ($.type(promise) == "promise") {
promise = promise.catch(function (err) {
return err;
});
}
});
return Promise.all(iterable).then(function (resolved) {
if (iterable.length != resolved.length) {
// The list of promises or values changed. Return a new Promise.
// The original promise won't resolve until the new one does.
return _.thenAll(iterable);
}
// The list of promises or values stayed the same.
// Return results immediately.
return resolved;
});
},
/**
* Run & Return a function
*/
rr: function rr(f) {
f();
return f;
},
// Get out of bounds array index to wrap around
wrap: function wrap(index, length) {
return index < 0 ? length - 1 : index >= length ? 0 : index;
},
/**
* Parses a simple CSS-like text format for declaring key-value options:
* Pairs are comma or semicolon-separated, key and value are colon separated.
* Escapes are supported, via backslash. Useful for attributes.
*/
options: function options(str) {
var ret = {};
(str.trim().match(/(?:\\[,;]|[^,;])+/g) || []).forEach(function (option) {
if (option) {
option = option.trim().replace(/\\([,;])/g, "$1");
var pair = option.match(/^\s*((?:\\:|[^:])+?)\s*:\s*(.+)$/);
if (pair) {
ret[pair[1].replace(/\\:/g, ":")] = pair[2];
} else {
// If no value, it's boolean
ret[option] = true;
}
}
});
return ret;
}
});
// Bliss plugins
$.add("toggleAttribute", function (name, value) {
var test = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : value !== null;
if (test) {
this.setAttribute(name, value);
} else {
this.removeAttribute(name);
}
});
// Provide shortcuts to long property chains
$.proxy = $.classProps.proxy = $.overload(function (obj, property, proxy) {
Object.defineProperty(obj, property, {
get: function get() {
return this[proxy][property];
},
set: function set(value) {
this[proxy][property] = value;
},
configurable: true,
enumerable: true
});
return obj;
});
$.classProps.propagated = function (proto, names) {
Mavo.toArray(names).forEach(function (name) {
var existing = proto[name];
proto[name] = function () {
var ret = existing && existing.apply(this, arguments);
if (this.propagate && ret !== false) {
this.propagate(name);
}
};
});
};
// :target-within shim
function updateTargetWithin() {
var element = _.getTarget();
var cl = "mv-target-within";
$$("." + cl).forEach(function (el) {
return el.classList.remove(cl);
});
while (element && element.classList) {
element.classList.add(cl);
element = element.parentNode;
}
};
addEventListener("hashchange", updateTargetWithin);
var idObserver = new Mavo.Observer(document.documentElement, "id", updateTargetWithin);
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Locale = $.Class({
constructor: function constructor(lang, phrases) {
this.lang = lang;
this.phrases = {};
this.extend(phrases);
},
get fallback() {
// TODO should we fallback to other dialects? I.e. should en-US fallback to en-GB if en didn't exist?
if (_.all[this.baseLang]) {
return _.all[this.baseLang];
}
if (this !== _.default) {
return _.default;
}
},
extend: function extend(phrases) {
$.extend(this.phrases, phrases);
},
phrase: function phrase(id, vars) {
var key = id.toLowerCase();
var phrase = this.phrases[key];
if (phrase === undefined && this.fallback) {
phrase = this.fallback.phrase(key);
}
if (phrase === undefined) {
// Everything failed, use id
phrase = Mavo.Functions.readable(key);
} else if (vars) {
var keys = Mavo.matches(phrase, /\{\w+(?=\})/g).map(function (v) {
return v.slice(1);
});
Mavo.Functions.unique(keys).forEach(function (name) {
if (name in vars) {
phrase = phrase.replace(RegExp("{" + name + "}", "gi"), vars[name]);
}
});
}
return phrase;
},
live: {
lang: function lang(_lang) {
this.baseLang = _.getBaseLang(_lang);
if (_lang == this.baseLang) {
this.baseLang = null;
}
}
},
static: {
all: {},
/**
* Register new locale or extend existing locale
*/
register: function register(lang, phrases) {
if (_.all[lang]) {
_.all[lang].extend(phrases);
} else {
_.all[lang] = new _(lang, phrases);
}
},
match: function match() {
var lang = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "";
return _.all[lang] || _.all[_.getBaseLang(lang)];
},
get: function get(lang) {
return _.match(lang) || _.default;
},
getBaseLang: function getBaseLang(lang) {
return lang.split("-")[0];
},
lazy: {
default: function _default() {
return _.match(Mavo.locale) || _.all.en;
}
}
}
});
/**
* Use phrase
*/
Mavo.prototype._ = function (id, vars) {
return this.locale && id ? this.locale.phrase(id, vars) : id;
};
$.ready().then(function () {
$$("datalist.mv-phrases[lang]").forEach(function (datalist) {
var phrases = $$("option", datalist).reduce(function (o, option) {
o[option.value] = option.textContent.trim();
return o;
}, {});
Mavo.Locale.register(datalist.lang, phrases);
});
});
})(Bliss, Bliss.$);
Mavo.Locale.register("en", {
"edit": "Edit",
"save": "Save",
"import": "Import",
"export": "Export",
"logout": "Logout",
"login": "Login",
"loading": "Loading",
"uploading": "Uploading",
"saving": "Saving",
"logged-in-as": "Logged in to {id} as ",
"login-to": "Login to {id}",
"error-uploading": "Error uploading file",
"cannot-load-uploaded-file": "Cannot load uploaded file",
"filename": "Filename?",
"problem-saving": "Problem saving data",
"problem-loading": "Problem loading data",
"cannot-parse": "Can’t understand this file",
"http-error": "HTTP error {status}: {statusText}",
"cant-connect": "Can’t connect to the Internet",
"add-item": "Add {name}",
"add-item-before": "Add new {name} before",
"add-item-after": "Add new {name} after",
"drag-to-reorder": "Drag to reorder {name}",
"delete-item": "Delete this {name}",
"gh-updated-file": "Updated {name}",
"gh-edit-suggestion-saved-in-profile": "Your edits are saved to <a href=\"{previewURL}\" target=\"_blank\">your own profile</a>, because you are not allowed to edit this page.",
"gh-edit-suggestion-instructions": "Write a short description of your edits below to suggest them to the page admins:",
"gh-edit-suggestion-notreviewed": "You have selected to suggest your edits to the page admins. Your suggestions have not been reviewed yet.",
"gh-edit-suggestion-send": "Send edit suggestion",
"gh-edit-suggestion-revoke": "Revoke edit suggestion",
"gh-edit-suggestion-reason-placeholder": "I added / corrected / deleted ...",
"gh-edit-suggestion-cancelled": "Edit suggestion cancelled successfully!",
"gh-edit-suggestion-title": "Suggested edits to data",
"gh-edit-suggestion-body": "Hello there! I used Mavo to suggest the following edits:\n{description}\nPreview my changes here: {previewURL}",
"gh-edit-suggestion-sent": "Edit suggestion sent successfully!"
});
(function ($, $$) {
Mavo.attributes.push("mv-plugins");
var _ = Mavo.Plugins = {
loaded: {},
load: function load() {
_.plugins = new Set();
$$("[mv-plugins]").forEach(function (element) {
element.getAttribute("mv-plugins").trim().split(/\s+/).forEach(function (plugin) {
return _.plugins.add(plugin);
});
});
if (!_.plugins.size) {
return Promise.resolve();
}
// Fetch plugin index
return $.fetch(_.url + "/plugins.json", {
responseType: "json"
}).then(function (xhr) {
// Fetch plugins
return Mavo.thenAll(xhr.response.plugin.filter(function (plugin) {
return _.plugins.has(plugin.id);
}).map(function (plugin) {
// Load plugin
var filename = "mavo-" + plugin.id + ".js";
if (plugin.repo) {
// Plugin hosted in a separate repo
var url = "https://raw.githubusercontent.com/" + plugin.repo + "/master/" + filename;
return _.loaded[plugin.id] ? Promise.resolve() : $.fetch(url).then(function (xhr) {
$.create("script", {
textContent: xhr.responseText,
inside: document.head
});
});
} else {
// Plugin hosted in the mavo-plugins repo
var url = _.url + "/" + plugin.id + "/" + filename;
return $.include(_.loaded[plugin.id], url);
}
}));
});
},
register: function register(name) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (_.loaded[name]) {
// Do not register same plugin twice
return;
}
Mavo.hooks.add(o.hooks);
for (var Class in o.extend) {
var existing = Class == "Mavo" ? Mavo : Mavo[Class];
if ($.type(existing) === "function") {
$.Class(existing, o.extend[Class]);
} else {
$.extend(existing, o.extend[Class]);
}
}
var ready = [];
if (o.ready) {
ready.push(o.ready);
}
if (o.dependencies) {
var base = document.currentScript ? document.currentScript.src : location;
var dependencies = o.dependencies.map(function (url) {
return Mavo.load(url, base);
});
ready.push.apply(ready, _toConsumableArray(dependencies));
}
if (ready.length) {
var _Mavo$dependencies;
(_Mavo$dependencies = Mavo.dependencies).push.apply(_Mavo$dependencies, ready);
}
_.loaded[name] = o;
if (o.init) {
Promise.all(ready).then(function () {
return o.init();
});
}
},
url: "https://plugins.mavo.io"
};
})(Bliss, Bliss.$);
(function ($, $$) {
Mavo.attributes.push("mv-bar");
var _ = Mavo.UI.Bar = $.Class({
constructor: function constructor(mavo) {
var _this8 = this;
this.mavo = mavo;
this.element = $(".mv-bar", this.mavo.element);
this.template = this.mavo.element.getAttribute("mv-bar") || "";
if (this.element) {
this.custom = true;
this.template += " " + (this.element.getAttribute("mv-bar") || "");
this.template = this.template.trim();
for (var id in _.controls) {
this[id] = $(".mv-" + id, this.element);
if (this[id]) {
this.template = this.template || "with";
this.template += " " + id;
}
}
} else {
this.element = $.create({
className: "mv-bar mv-ui",
start: this.mavo.element,
innerHTML: "<button> </button>"
});
}
if (this.element.classList.contains("mv-compact")) {
this.noResize = true;
}
// yes- is deprecated and will be removed
if (/\byes-\w+/.test(this.template)) {
console.warn(this.mavo.id + ": You used mv-bar=\"" + this.template + "\". Note that yes-* in mv-bar is deprecated and will be removed in v0.1.6. Please use the new syntax: http://mavo.io/docs/ui/#bar");
}
this.controls = _.getControls(this.template);
if (this.controls.length) {
// Measure height of 1 row
this.targetHeight = this.element.offsetHeight;
}
if (!this.custom) {
this.element.innerHTML = "";
}
this.controls.forEach(function (id) {
var o = _.controls[id];
if (_this8[id]) {
// Custom control, remove to not mess up order
_this8[id].remove();
}
if (o.create) {
_this8[id] = o.create.call(_this8.mavo, _this8[id]);
} else if (!_this8[id]) {
_this8[id] = $.create("button", {
className: "mv-" + id,
textContent: _this8.mavo._(id)
});
}
// We initially add all of them to retain order,
// then we remove revocably when/if needed
_this8.add(id);
if (o.permission) {
_this8.permissions.can(o.permission, function () {
_this8.toggle(id, !o.condition || o.condition.call(_this8.mavo));
}, function () {
_this8.remove(id);
});
} else if (o.condition && !o.condition.call(_this8.mavo)) {
_this8.remove(id);
}
for (var events in o.events) {
$.bind(_this8[id], events, o.events[events].bind(_this8.mavo));
}
});
var _loop = function _loop(_id) {
var o = _.controls[_id];
if (o.action) {
$.delegate(_this8.mavo.element, "click", ".mv-" + _id, function (evt) {
if (!o.permission || _this8.permissions.is(o.permission)) {
o.action.call(_this8.mavo);
evt.preventDefault();
}
});
}
};
for (var _id in _.controls) {
_loop(_id);
}
if (this.controls.length && !this.noResize) {
this.resize();
if (self.ResizeObserver) {
this.resizeObserver = Mavo.observeResize(this.element, function (entries) {
_this8.resize();
});
}
}
},
resize: function resize() {
if (!this.targetHeight) {
// We don't have a correct measurement for target height, abort
this.targetHeight = this.element.offsetHeight;
return;
}
this.resizeObserver && this.resizeObserver.disconnect();
this.element.classList.remove("mv-compact", "mv-tiny");
// Exceeded single row?
if (this.element.offsetHeight > this.targetHeight * 1.6) {
this.element.classList.add("mv-compact");
if (this.element.offsetHeight > this.targetHeight * 1.2) {
// Still too tall
this.element.classList.add("mv-tiny");
}
}
this.resizeObserver && this.resizeObserver.observe(this.element);
},
add: function add(id) {
var _this9 = this;
var o = _.controls[id];
if (o.prepare) {
o.prepare.call(this.mavo);
}
Mavo.revocably.add(this[id], this.element);
if (!this.resizeObserver && !this.noResize) {
requestAnimationFrame(function () {
return _this9.resize();
});
}
},
remove: function remove(id) {
var _this10 = this;
var o = _.controls[id];
Mavo.revocably.remove(this[id], "mv-" + id);
if (o.cleanup) {
o.cleanup.call(this.mavo);
}
if (!this.resizeObserver && !this.noResize) {
requestAnimationFrame(function () {
return _this10.resize();
});
}
},
toggle: function toggle(id, add) {
return this[add ? "add" : "remove"](id);
},
proxy: {
"permissions": "mavo"
},
static: {
getControls: function getControls(template) {
var all = Object.keys(_.controls);
if (template && (template = template.trim())) {
if (template == "none") {
return [];
}
var relative = /^with\s|\b(yes|no)-\w+\b/.test(template);
template = template.replace(/\byes-|^with\s+/g, "");
var ids = template.split(/\s+/);
// Drop duplicates (last one wins)
ids = Mavo.Functions.unique(ids.reverse()).reverse();
if (relative) {
return all.filter(function (id) {
var positive = ids.lastIndexOf(id);
var negative = ids.lastIndexOf("no-" + id);
var keep = positive > Math.max(-1, negative);
var drop = negative > Math.max(-1, positive);
return keep || !_.controls[id].optional && !drop;
});
}
return ids;
}
// No template, return default set
return all.filter(function (id) {
return !_.controls[id].optional;
});
},
controls: {
status: {
create: function create(custom) {
return custom || $.create({
className: "mv-status"
});
},
prepare: function prepare() {
var backend = this.primaryBackend;
if (backend && backend.user) {
var user = backend.user;
var html = user.name || "";
if (user.avatar) {
html = "<img class=\"mv-avatar\" src=\"" + user.avatar + "\" /> " + html;
}
if (user.url) {
html = "<a href=\"" + user.url + "\" target=\"_blank\">" + html + "</a>";
}
this.bar.status.innerHTML = "<span>" + this._("logged-in-as", backend) + "</span> " + html;
}
},
permission: "logout"
},
edit: {
action: function action() {
if (this.editing) {
this.done();
} else {
this.edit();
}
},
permission: ["edit", "add", "delete"],
cleanup: function cleanup() {
if (this.editing) {
this.done();
}
},
condition: function condition() {
return this.needsEdit;
}
},
save: {
action: function action() {
this.save();
},
events: {
"mouseenter focus": function mouseenterFocus() {
this.element.classList.add("mv-highlight-unsaved");
},
"mouseleave blur": function mouseleaveBlur() {
this.element.classList.remove("mv-highlight-unsaved");
}
},
permission: "save",
condition: function condition() {
return !this.autoSave || this.autoSaveDelay > 0;
}
},
export: {
create: function create(custom) {
var a;
if (custom) {
a = custom.matches("a") ? custom : $.create("a", {
className: "mv-button",
around: custom
});
} else {
a = $.create("a", {
className: "mv-export mv-button",
textContent: this._("export")
});
}
a.setAttribute("download", this.id + ".json");
return a;
},
events: {
mousedown: function mousedown() {
this.bar.export.href = "data:application/json;charset=UTF-8," + encodeURIComponent(this.toJSON());
}
},
permission: "edit",
optional: true
},
import: {
create: function create(custom) {
var _this11 = this;
var button = custom || $.create("span", {
role: "button",
tabIndex: "0",
className: "mv-import mv-button",
textContent: this._("import"),
events: {
focus: function focus(evt) {
input.focus();
}
}
});
var input = $.create("input", {
type: "file",
inside: button,
events: {
change: function change(evt) {
var file = evt.target.files[0];
if (file) {
var reader = $.extend(new FileReader(), {
onload: function onload(evt) {
_this11.inProgress = false;
try {
var json = JSON.parse(reader.result);
_this11.render(json);
} catch (e) {
_this11.error(_this11._("cannot-parse"));
}
},
onerror: function onerror(evt) {
_this11.error(_this11._("problem-loading"));
}
});
_this11.inProgress = _this11._("uploading");
reader.readAsText(file);
}
}
}
});
return button;
},
optional: true
},
login: {
action: function action() {
this.primaryBackend.login();
},
permission: "login"
},
logout: {
action: function action() {
this.primaryBackend.logout();
},
permission: "logout"
}
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.UI.Message = $.Class({
constructor: function constructor(mavo, message, o) {
var _this12 = this;
this.mavo = mavo;
this.message = message;
this.closed = Mavo.defer();
this.element = $.create(_defineProperty({
className: "mv-ui mv-message" + (o.type ? " mv-" + o.type : ""),
innerHTML: this.message,
events: {
click: function click(e) {
return Mavo.scrollIntoViewIfNeeded(_this12.mavo.element);
}
}
}, this.mavo.bar ? "after" : "start", (this.mavo.bar || this.mavo).element));
if (o.classes) {
this.element.classList.add(o.classes);
}
if (o.type == "error") {
this.element.setAttribute("role", "alert");
} else {
this.element.setAttribute("aria-live", "polite");
}
o.dismiss = o.dismiss || {};
if (typeof o.dismiss == "string" || Array.isArray(o.dismiss)) {
var dismiss = {};
Mavo.toArray(o.dismiss).forEach(function (prop) {
dismiss[prop] = true;
});
o.dismiss = dismiss;
}
if (o.dismiss.button) {
$.create("button", {
className: "mv-close mv-ui",
textContent: "×",
events: {
"click": function click(evt) {
return _this12.close();
}
},
start: this.element
});
}
if (o.dismiss.timeout) {
var timeout = typeof o.dismiss.timeout === "number" ? o.dismiss.timeout : 5000;
var closeTimeout;
$.bind(this.element, {
mouseenter: function mouseenter(e) {
return clearTimeout(closeTimeout);
},
mouseleave: Mavo.rr(function (e) {
return closeTimeout = setTimeout(function () {
return _this12.close();
}, timeout);
})
});
}
if (o.dismiss.submit) {
this.element.addEventListener("submit", function (evt) {
evt.preventDefault();
_this12.close(evt.target);
});
}
},
close: function close(resolve) {
var _this13 = this;
$.transition(this.element, { opacity: 0 }).then(function () {
$.remove(_this13.element);
_this13.closed.resolve(resolve);
});
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Permissions = $.Class({
constructor: function constructor(o) {
this.triggers = [];
this.hooks = new $.Hooks();
// If we don’t do this, there is no way to retrieve this from inside parentChanged
this.parentChanged = _.prototype.parentChanged.bind(this);
this.set(o);
},
// Set multiple permissions at once
set: function set(o) {
for (var action in o) {
this[action] = o[action];
}
},
// Set a bunch of permissions to true. Chainable.
on: function on(actions) {
var _this14 = this;
Mavo.toArray(actions).forEach(function (action) {
return _this14[action] = true;
});
return this;
},
// Set a bunch of permissions to false. Chainable.
off: function off(actions) {
var _this15 = this;
actions = Array.isArray(actions) ? actions : [actions];
actions.forEach(function (action) {
return _this15[action] = false;
});
return this;
},
// Fired once at least one of the actions passed can be performed
// Kind of like a Promise that can be resolved multiple times.
can: function can(actions, callback, cannot) {
this.observe(actions, true, callback);
if (cannot) {
// Fired once the action cannot be done anymore, even though it could be done before
this.cannot(actions, cannot);
}
},
// Fired once NONE of the actions can be performed
cannot: function cannot(actions, callback) {
this.observe(actions, false, callback);
},
// Schedule a callback for when a set of permissions changes value
observe: function observe(actions, value, callback) {
actions = Mavo.toArray(actions);
if (this.is(actions, value)) {
// Should be fired immediately
callback();
}
// For future transitions
this.triggers.push({ actions: actions, value: value, callback: callback, active: true });
},
// Compare a set of permissions with true or false
// If comparing with true, we want at least one to be true, i.e. OR
// If comparing with false, we want ALL to be false, i.e. NOR
is: function is(actions) {
var _this16 = this;
var able = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
var or = Mavo.toArray(actions).map(function (action) {
return !!_this16[action];
}).reduce(function (prev, current) {
return prev || current;
});
return able ? or : !or;
},
// Monitor all changes
onchange: function onchange(callback) {
var _this17 = this;
// Future changes
this.hooks.add("change", callback);
// Fire for current values
_.actions.forEach(function (action) {
callback.call(_this17, { action: action, value: _this17[action] });
});
},
parentChanged: function parentChanged() {
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var localValue = this["_" + o.action];
if (localValue !== undefined || o.from == o.value) {
// We have a local value so we don’t care about parent changes OR nothing changed
return;
}
this.fireTriggers(o.action);
this.hooks.run("change", $.extend({ context: this }, o));
},
// A single permission changed value
changed: function changed(action, value, from) {
from = !!from;
value = !!value;
if (value == from) {
// Nothing changed
return;
}
// $.live() calls the setter before the actual property is set so we
// need to set it manually, otherwise it still has its previous value
this["_" + action] = value;
this.fireTriggers(action);
this.hooks.run("change", { action: action, value: value, from: from, context: this });
},
fireTriggers: function fireTriggers(action) {
var _this18 = this;
this.triggers.forEach(function (trigger) {
var match = _this18.is(trigger.actions, trigger.value);
if (trigger.active && trigger.actions.indexOf(action) > -1 && match) {
trigger.active = false;
trigger.callback();
} else if (!match) {
// This is so that triggers can only be executed in an actual transition
// And that if there is a trigger for [a,b] it won't be executed twice
// if a and b are set to true one after the other
trigger.active = true;
}
});
},
or: function or(permissions) {
var _this19 = this;
_.actions.forEach(function (action) {
_this19[action] = _this19[action] || permissions[action];
});
return this;
},
live: {
parent: function parent(_parent) {
var _this20 = this;
var oldParent = this._parent;
if (oldParent == _parent) {
return;
}
this._parent = _parent;
// Remove previous trigger, if any
if (oldParent) {
Mavo.delete(oldParent.hooks.change, this.parentChanged);
}
// What changes does this cause? Fire triggers for them
_.actions.forEach(function (action) {
_this20.parentChanged({
action: action,
value: _parent ? _parent[action] : undefined,
from: oldParent ? oldParent[action] : undefined
});
});
if (_parent) {
// Add new trigger
_parent.onchange(this.parentChanged);
}
}
},
static: {
actions: [],
// Register a new permission type
register: function register(action, setter) {
if (Array.isArray(action)) {
action.forEach(function (action) {
return _.register(action, setter);
});
return;
}
$.live(_.prototype, action, {
get: function get() {
var ret = this["_" + action];
if (ret === undefined && this.parent) {
return this.parent[action];
}
return ret;
},
set: function set(able, previous) {
if (setter) {
setter.call(this, able, previous);
}
this.changed(action, able, previous);
}
});
_.actions.push(action);
}
}
});
_.register(["read", "save"]);
_.register("login", function (can) {
if (can && this.logout) {
this.logout = false;
}
});
_.register("logout", function (can) {
if (can && this.login) {
this.login = false;
}
});
_.register("edit", function (can) {
if (can) {
this.add = this.delete = true;
}
});
_.register(["add", "delete"], function (can) {
if (!can) {
this.edit = false;
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
/**
* Base class for all backends
*/
var _ = Mavo.Backend = $.Class({
constructor: function constructor(url) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
this.update(url, o);
// Permissions of this particular backend.
this.permissions = new Mavo.Permissions();
},
update: function update(url) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
this.source = url;
this.url = new URL(this.source, Mavo.base);
this.mavo = o.mavo;
this.format = Mavo.Formats.create(o.format, this);
},
get: function get() {
var url = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : new URL(this.url);
url.searchParams.set("timestamp", Date.now()); // ensure fresh copy
return $.fetch(url.href).then(function (xhr) {
return Promise.resolve(xhr.responseText);
}, function () {
return Promise.resolve(null);
});
},
load: function load() {
var _this21 = this;
return this.ready.then(function () {
return _this21.get();
}).then(function (response) {
if (typeof response != "string") {
// Backend did the parsing, we're done here
return response;
}
response = response.replace(/^\ufeff/, ""); // Remove Unicode BOM
return _this21.format.parse(response);
});
},
store: function store(data) {
var _this22 = this;
var _ref2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
path = _ref2.path,
_ref2$format = _ref2.format,
format = _ref2$format === undefined ? this.format : _ref2$format;
return this.ready.then(function () {
var serialize = typeof data === "string" ? Promise.resolve(data) : format.stringify(data);
return serialize.then(function (serialized) {
return _this22.put(serialized, path).then(function () {
return { data: data, serialized: serialized };
});
});
});
},
// To be be overriden by subclasses
ready: Promise.resolve(),
login: function login() {
return Promise.resolve();
},
logout: function logout() {
return Promise.resolve();
},
put: function put() {
return Promise.reject();
},
isAuthenticated: function isAuthenticated() {
return !!this.accessToken;
},
// Any extra params to be passed to the oAuth URL.
oAuthParams: function oAuthParams() {
return "";
},
toString: function toString() {
return this.id + " (" + this.url + ")";
},
equals: function equals(backend) {
return backend === this || backend && this.id == backend.id && this.source == backend.source;
},
/**
* Helper for making OAuth requests with JSON-based APIs.
*/
request: function request(call, data) {
var _this23 = this;
var method = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : "GET";
var req = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
req = $.extend({}, req); // clone
req.method = req.method || method;
req.responseType = req.responseType || "json";
req.headers = $.extend({
"Content-Type": "application/json; charset=utf-8"
}, req.headers || {});
req.data = data;
if (this.isAuthenticated()) {
req.headers["Authorization"] = req.headers["Authorization"] || "Bearer " + this.accessToken;
}
if ($.type(req.data) === "object") {
if (req.method == "GET") {
req.data = Object.keys(req.data).map(function (p) {
return p + "=" + encodeURIComponent(req.data[p]);
}).join("&");
} else {
req.data = JSON.stringify(req.data);
}
}
call = new URL(call, this.constructor.apiDomain);
// Prevent getting a cached response. Cache-control is often not allowed via CORS
if (req.method == "GET") {
call.searchParams.set("timestamp", Date.now());
}
return $.fetch(call, req).catch(function (err) {
if (err && err.xhr) {
return Promise.reject(err.xhr);
} else {
_this23.mavo.error("Something went wrong while connecting to " + _this23.id, err);
}
}).then(function (xhr) {
return req.method == "HEAD" ? xhr : xhr.response;
});
},
/**
* Helper method for authenticating in OAuth APIs
*/
oAuthenticate: function oAuthenticate(passive) {
var _this24 = this;
return this.ready.then(function () {
if (_this24.isAuthenticated()) {
return Promise.resolve();
}
return new Promise(function (resolve, reject) {
var id = _this24.id.toLowerCase();
if (passive) {
_this24.accessToken = localStorage["mavo:" + id + "token"];
if (_this24.accessToken) {
resolve(_this24.accessToken);
}
} else {
// Show window
var popup = {
width: Math.min(1000, innerWidth - 100),
height: Math.min(800, innerHeight - 100)
};
popup.top = (screen.height - popup.height) / 2;
popup.left = (screen.width - popup.width) / 2;
var state = {
url: location.href,
backend: _this24.id
};
_this24.authPopup = open(_this24.constructor.oAuth + "?client_id=" + _this24.key + "&state=" + encodeURIComponent(JSON.stringify(state)) + _this24.oAuthParams(), "popup", "width=" + popup.width + ",height=" + popup.height + ",left=" + popup.left + ",top=" + popup.top);
if (!_this24.authPopup) {
var message = "Login popup was blocked! Please check your popup blocker settings.";
_this24.mavo.error(message);
reject(Error(message));
}
addEventListener("message", function (evt) {
if (evt.source === _this24.authPopup) {
if (evt.data.backend == _this24.id) {
_this24.accessToken = localStorage["mavo:" + id + "token"] = evt.data.token;
}
if (!_this24.accessToken) {
reject(Error("Authentication error"));
}
resolve(_this24.accessToken);
// Log in to other similar backends that are logged out
for (var appid in Mavo.all) {
var storage = Mavo.all[appid].primaryBackend;
if (storage && storage.id === _this24.id && storage !== _this24 && !storage.isAuthenticated()) {
storage.login(true);
}
}
}
});
}
});
});
},
/**
* oAuth logout helper
*/
oAuthLogout: function oAuthLogout() {
if (this.isAuthenticated()) {
var id = this.id.toLowerCase();
localStorage.removeItem("mavo:" + id + "token");
delete this.accessToken;
this.permissions.off(["edit", "add", "delete", "save"]).on("login");
$.fire(this.mavo.element, "mv-logout", { backend: this });
}
return Promise.resolve();
},
static: {
// Return the appropriate backend(s) for this url
create: function create(url, o, type, existing) {
var Backend;
if (type) {
Backend = Mavo.Functions.get(_, type);
}
if (url && !Backend) {
Backend = _.types.filter(function (Backend) {
return Backend.test(url);
})[0] || _.Remote;
}
// Can we re-use the existing object perhaps?
if (Backend && existing && existing.constructor === Backend && existing.constructor.prototype.hasOwnProperty("update")) {
existing.update(url, o);
return existing;
}
return Backend ? new Backend(url, o) : null;
},
types: [],
register: function register(Class) {
_[Class.prototype.id] = Class;
_.types.push(Class);
return Class;
}
}
});
/**
* Save in an HTML element
*/
_.register($.Class({
id: "Element",
extends: _,
constructor: function constructor() {
this.permissions.on(["read", "edit", "save"]);
},
update: function update(url, o) {
this.super.update.call(this, url, o);
this.element = $(this.source) || $.create("script", {
type: "application/json",
id: this.source.slice(1),
inside: document.body
});
},
get: function get() {
return Promise.resolve(this.element.textContent);
},
put: function put(serialized) {
return Promise.resolve(this.element.textContent = serialized);
},
static: {
test: function test(url) {
return url.indexOf("#") === 0;
}
}
}));
// Load from a remote URL, no save
_.register($.Class({
id: "Remote",
extends: _,
constructor: function constructor() {
this.permissions.on("read");
},
static: {
test: function test(url) {
return false;
}
}
}));
// Save in localStorage
_.register($.Class({
extends: _,
id: "Local",
constructor: function constructor() {
this.permissions.on(["read", "edit", "save"]);
this.key = this.mavo.id;
},
get: function get() {
return Promise[this.key in localStorage ? "resolve" : "reject"](localStorage[this.key]);
},
put: function put(serialized) {
if (!serialized) {
delete localStorage[this.key];
} else {
localStorage[this.key] = serialized;
}
return Promise.resolve(serialized);
},
static: {
test: function test(value) {
return value == "local";
}
}
}));
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Formats = {};
var base = _.Base = $.Class({
abstract: true,
constructor: function constructor(backend) {
this.backend = backend;
},
proxy: {
"mavo": "backend"
},
// So that child classes can only override the static methods if they don't
// need access to any instance variables.
parse: function parse(content) {
return this.constructor.parse(content, this);
},
stringify: function stringify(data) {
return this.constructor.stringify(data, this);
},
static: {
parse: function parse(serialized) {
return Promise.resolve(serialized);
},
stringify: function stringify(data) {
return Promise.resolve(data);
},
extensions: [],
dependencies: [],
ready: function ready() {
return Promise.all(this.dependencies.map(function (d) {
return $.include(d.test(), d.url);
}));
}
}
});
var json = _.JSON = $.Class({
extends: _.Base,
static: {
parse: function parse(serialized) {
return Promise.resolve(serialized ? JSON.parse(serialized) : null);
},
stringify: function stringify(data) {
return Promise.resolve(Mavo.toJSON(data));
},
extensions: [".json", ".jsonld"]
}
});
var text = _.Text = $.Class({
extends: _.Base,
constructor: function constructor(backend) {
this.property = this.mavo.root.getNames("Primitive")[0];
},
static: {
extensions: [".txt"],
parse: function parse(serialized, me) {
return Promise.resolve(_defineProperty({}, me ? me.property : "content", serialized));
},
stringify: function stringify(data, me) {
return Promise.resolve(data[me ? me.property : "content"]);
}
}
});
var csv = _.CSV = $.Class({
extends: _.Base,
constructor: function constructor(backend) {
this.property = this.mavo.root.getNames("Collection")[0];
this.options = $.extend({}, _.CSV.defaultOptions);
},
static: {
extensions: [".csv", ".tsv"],
defaultOptions: {
header: true,
dynamicTyping: true,
skipEmptyLines: true
},
dependencies: [{
test: function test() {
return self.Papa;
},
url: "https://cdnjs.cloudflare.com/ajax/libs/PapaParse/4.1.4/papaparse.min.js"
}],
ready: base.ready,
parse: function parse(serialized, me) {
return csv.ready().then(function () {
var data = Papa.parse(serialized, csv.defaultOptions);
var property = me ? me.property : "content";
if (me) {
// Get delimiter & linebreak for serialization
me.options.delimiter = data.meta.delimiter;
me.options.linebreak = data.meta.linebreak;
}
if (data.meta.aborted) {
throw data.meta.errors.pop();
}
return _defineProperty({}, property, data.data);
});
},
stringify: function stringify(data, me) {
return csv.ready().then(function () {
var property = me ? me.property : "content";
var options = me ? me.options : csv.defaultOptions;
return Papa.unparse(data[property], options);
});
}
}
});
Object.defineProperty(_, "create", {
value: function value(format, backend) {
if (format && (typeof format === "undefined" ? "undefined" : _typeof(format)) === "object") {
return format;
}
if (typeof format === "string") {
// Search by id
format = format.toLowerCase();
for (var id in _) {
var Format = _[id];
if (id.toLowerCase() == format) {
return new Format(backend);
}
}
}
if (!format) {
var url = backend.url ? backend.url.pathname : backend.source;
var extension = Mavo.match(url, /\.\w+$/) || ".json";
var Format = _.JSON;
for (var id in _) {
if (_[id].extensions.indexOf(extension) > -1) {
// Do not return match, as we may find another match later
// and last match wins
Format = _[id];
}
}
return new Format(backend);
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Node = $.Class({
abstract: true,
constructor: function constructor(element, mavo) {
var _this25 = this;
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (!element || !mavo) {
throw new Error("Mavo.Node constructor requires an element argument and a mavo object");
}
var env = { context: this, options: options };
// Set these first, for debug reasons
this.uid = ++_.maxId;
this.nodeType = this.nodeType;
this.property = null;
this.element = element;
$.extend(this, env.options);
_.all.set(element, [].concat(_toConsumableArray(_.all.get(this.element) || []), [this]));
this.mavo = mavo;
this.group = this.parentGroup = env.options.group;
this.template = env.options.template;
this.alias = this.element.getAttribute("mv-alias");
if (this.template) {
this.template.copies.push(this);
} else {
// First (or only) of its kind
this.copies = [];
}
if (!this.fromTemplate("property", "type")) {
this.property = _.getProperty(element);
this.type = Mavo.Group.normalize(element);
this.storage = this.element.getAttribute("mv-storage");
}
this.modes = this.element.getAttribute("mv-mode");
Mavo.hooks.run("node-init-start", env);
this.mode = Mavo.getStyle(this.element, "--mv-mode") || "read";
this.collection = env.options.collection;
if (this.collection) {
// This is a collection item
this.group = this.parentGroup = this.collection.parentGroup;
}
// Must run before collections have a marker which messes up paths
var template = this.template;
if (template && template.expressions) {
// We know which expressions we have, don't traverse again
this.expressions = template.expressions.map(function (et) {
return new Mavo.DOMExpression({
template: et,
item: _this25,
mavo: _this25.mavo
});
});
}
if (this instanceof Mavo.Group || this.collection) {
// Handle mv-value
// TODO integrate with the code in Primitive that decides whether this is a computed property
var et = Mavo.DOMExpression.search(this.element).filter(function (et) {
return et.originalAttribute == "mv-value";
})[0];
if (et) {
et.mavoNode = this;
this.expressionText = et;
this.storage = this.storage || "none";
this.modes = "read";
if (this.collection) {
this.collection.expressions = [].concat(_toConsumableArray(this.collection.expressions || []), [et]);
et.mavoNode = this.collection;
this.collection.storage = this.collection.storage || "none";
this.collection.modes = "read";
}
}
}
Mavo.hooks.run("node-init-end", env);
},
get editing() {
return this.mode == "edit";
},
get isRoot() {
return !this.property;
},
get name() {
return Mavo.Functions.readable(this.property || this.type).toLowerCase();
},
get saved() {
return this.storage !== "none";
},
get parent() {
return this.collection || this.parentGroup;
},
/**
* Runs after the constructor is done (including the constructor of the inheriting class), synchronously
*/
postInit: function postInit() {
if (this.modes == "edit") {
this.edit();
}
},
destroy: function destroy() {
if (this.template) {
Mavo.delete(this.template.copies, this);
}
if (this.expressions) {
this.expressions.forEach(function (expression) {
return expression.destroy();
});
}
if (this.itembar) {
this.itembar.destroy();
}
},
getData: function getData() {
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
if (this.isDataNull(o)) {
return o.forceObjects ? Mavo.objectify(null) : null;
}
},
isDataNull: function isDataNull(o) {
var env = {
context: this,
options: o,
result: this.deleted || !this.saved && !o.live
};
Mavo.hooks.run("unit-isdatanull", env);
return env.result;
},
/**
* Execute a callback on every node of the Mavo tree
* If callback returns (strict) false, walk stops.
* @param callback {Function}
* @param path {Array} Initial path. Mostly used internally.
* @param o {Object} Options:
* - descentReturn {Boolean} If callback returns false, just don't descend
* @return false if was stopped via a false return value, true otherwise
*/
walk: function walk(callback) {
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var walker = function walker(obj, path) {
var ret = callback(obj, path);
if (ret !== false) {
for (var i in obj.children) {
var _node2 = obj.children[i];
if (_node2 instanceof Mavo.Node) {
var ret = walker.call(_node2, _node2, [].concat(_toConsumableArray(path), [i]));
if (ret === false && !o.descentReturn) {
return false;
}
}
}
}
return ret !== false;
};
return walker(this, path);
},
walkUp: function walkUp(callback) {
var group = this;
while (group = group.parentGroup) {
var ret = callback(group);
if (ret !== undefined) {
return ret;
}
}
},
edit: function edit() {
this.mode = "edit";
if (this.mode != "edit") {
return false;
}
$.fire(this.element, "mv-edit", {
mavo: this.mavo,
node: this
});
Mavo.hooks.run("node-edit-end", this);
},
done: function done() {
this.mode = Mavo.getStyle(this.element.parentNode, "--mv-mode") || "read";
if (this.mode != "read") {
return false;
}
$.unbind(this.element, ".mavo:edit");
$.fire(this.element, "mv-done", {
mavo: this.mavo,
node: this
});
this.propagate("done");
Mavo.hooks.run("node-done-end", this);
},
propagate: function propagate(callback) {
for (var i in this.children) {
var _node3 = this.children[i];
if (_node3 instanceof Mavo.Node) {
if (typeof callback === "function") {
callback.call(_node3, _node3);
} else if (callback in _node3) {
_node3[callback]();
}
}
}
},
propagated: ["save", "destroy"],
toJSON: Mavo.prototype.toJSON,
fromTemplate: function fromTemplate() {
var _this26 = this;
if (this.template) {
for (var _len4 = arguments.length, properties = Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
properties[_key4] = arguments[_key4];
}
properties.forEach(function (property) {
return _this26[property] = _this26.template[property];
});
}
return !!this.template;
},
render: function render(data) {
this.oldData = this.data;
this.data = data;
data = Mavo.subset(data, this.inPath);
var env = { context: this, data: data };
Mavo.hooks.run("node-render-start", env);
if (this.nodeType != "Collection" && Array.isArray(data)) {
// We are rendering an array on a singleton, what to do?
var properties;
if (this.isRoot && (properties = this.getNames("Collection")).length === 1) {
// If it's root with only one collection property, render on that property
env.data = _defineProperty({}, properties[0], env.data);
} else {
// Otherwise, render first item
this.inPath.push("0");
env.data = env.data[0];
}
}
if (this.editing) {
this.done();
this.dataRender(env.data);
this.edit();
} else {
this.dataRender(env.data);
}
this.save();
Mavo.hooks.run("node-render-end", env);
},
dataChanged: function dataChanged(action) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
$.fire(o.element || this.element, "mv-change", $.extend({
property: this.property,
action: action,
mavo: this.mavo,
node: this
}, o));
},
toString: function toString() {
return "#" + this.uid + ": " + this.nodeType + " (" + this.property + ")";
},
getClosestCollection: function getClosestCollection() {
var closestItem = this.closestItem;
return closestItem ? closestItem.collection : null;
},
getClosestItem: function getClosestItem() {
if (this.collection && this.collection.mutable) {
return this;
}
return this.parentGroup ? this.parentGroup.closestItem : null;
},
/**
* Check if this unit is either deleted or inside a deleted group
*/
isDeleted: function isDeleted() {
var ret = this.deleted;
if (this.deleted) {
return true;
}
return !!this.parentGroup && this.parentGroup.isDeleted();
},
// Resolve a property name from this node
resolve: function resolve(property) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
// First look in descendants
var ret = this.find(property, o);
if (ret === undefined) {
// Still not found, look in ancestors
ret = this.walkUp(function (group) {
if (group.property == property) {
return group;
}
if (property in group.children) {
return group.children[property];
}
});
}
if (ret === undefined) {
// Still not found, look anywhere
ret = this.mavo.root.find(property, o);
}
return ret;
},
relativizeData: self.Proxy ? function (data) {
var _this27 = this;
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : { live: true };
return new Proxy(data, {
get: function get(data, property, proxy) {
if (property in data) {
return data[property];
}
// Checking if property is in proxy might add it to the cache
if (property in proxy && property in _this27.proxyCache) {
return _this27.proxyCache[property];
}
},
has: function has(data, property) {
if (property in data || property in _this27.proxyCache) {
return true;
}
// Property does not exist, look for it elsewhere
// Special values
switch (property) {
case "$index":
_this27.proxyCache[property] = _this27.index || 0;
return true; // if index is 0 it's falsy and has would return false!
case "$next":
case "$previous":
if (_this27.closestCollection) {
_this27.proxyCache[property] = _this27.closestCollection.getData(options)[_this27.index + (property == "$next" ? 1 : -1)];
return true;
}
_this27.proxyCache[property] = null;
return false;
}
// First look in descendants
var ret = _this27.resolve(property);
if (ret !== undefined) {
if (Array.isArray(ret)) {
ret = ret.map(function (item) {
return item.getData(options);
}).filter(function (item) {
return item !== null;
});
} else if (ret instanceof Mavo.Node) {
ret = ret.getData(options);
}
_this27.proxyCache[property] = ret;
return true;
}
// Does it reference another Mavo?
if (property in Mavo.all && isNaN(property) && Mavo.all[property].root) {
return _this27.proxyCache[property] = Mavo.all[property].root.getData(options);
}
return false;
},
set: function set(data) {
var property = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "";
var value = arguments[2];
console.warn("You cannot set data via expressions. Attempt to set " + property.toString() + " to " + value + " ignored.");
return value;
}
});
} : function (data) {
return data;
},
createLiveData: function createLiveData() {
var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
this.liveData = obj;
this.liveData[Mavo.toNode] = this;
this.liveData[Mavo.toProxy] = this.relativizeData(this.liveData);
return this.liveData;
},
pathFrom: function pathFrom(node) {
var path = this.path;
var nodePath = node.path;
for (var i = 0; i < path.length && nodePath[i] == path[i]; i++) {}
return path.slice(i);
},
getDescendant: function getDescendant(path) {
return path.reduce(function (acc, cur) {
return acc.children[cur];
}, this);
},
/**
* Get same node in other item in same collection
* E.g. for same node in the next item, use an offset of -1
*/
getCousin: function getCousin(offset) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (!this.closestCollection) {
return null;
}
var collection = this.closestCollection;
var distance = Math.abs(offset);
var direction = offset < 0 ? -1 : 1;
if (collection.length < distance + 1) {
return null;
}
var index = this.closestItem.index + offset;
if (o.wrap) {
index = Mavo.wrap(index, collection.length);
}
for (var i = 0; i < collection.length; i++) {
var ind = index + i * direction;
ind = o.wrap ? Mavo.wrap(ind, collection.length) : ind;
var item = collection.children[ind];
if (!item || !item.isDeleted()) {
break;
}
}
if (!item || item.isDeleted() || item == this.closestItem) {
return null;
}
if (this.collection) {
return item;
}
var relativePath = this.pathFrom(this.closestItem);
return item.getDescendant(relativePath);
},
contains: function contains(node) {
do {
if (node === this) {
return true;
}
node = node.parent;
} while (node);
return false;
},
lazy: {
closestCollection: function closestCollection() {
return this.getClosestCollection();
},
closestItem: function closestItem() {
return this.getClosestItem();
},
// Are we only rendering and editing a subset of the data?
inPath: function inPath() {
var attribute = this.nodeType == "Collection" ? "mv-multiple-path" : "mv-path";
return (this.element.getAttribute(attribute) || "").split("/").filter(function (p) {
return p.length;
});
},
properties: function properties() {
if (this.template) {
return this.template.properties;
}
var ret = new Set(this.property && [this.property]);
if (this.nodeType == "Group") {
for (var property in this.children) {
ret = Mavo.union(ret, this.children[property].properties);
}
} else if (this.nodeType == "Collection") {
ret = Mavo.union(ret, this.itemTemplate.properties);
}
return ret;
},
proxyCache: function proxyCache() {
return {};
}
},
live: {
store: function store(value) {
$.toggleAttribute(this.element, "mv-storage", value);
},
unsavedChanges: function unsavedChanges(value) {
if (value && (!this.saved || !this.editing)) {
value = false;
}
this.element.classList.toggle("mv-unsaved-changes", value);
return value;
},
mode: function mode(value) {
var _this28 = this;
if (this._mode != value) {
// Is it allowed?
if (this.modes && value != this.modes) {
value = this.modes;
}
// If we don't do this, setting the attribute below will
// result in infinite recursion
this._mode = value;
if (!(this instanceof Mavo.Collection) && [null, "", "read", "edit"].indexOf(this.element.getAttribute("mv-mode")) > -1) {
// If attribute is not one of the recognized values, leave it alone
var set = this.modes || value == "edit";
Mavo.Observer.sneak(this.mavo.modeObserver, function () {
$.toggleAttribute(_this28.element, "mv-mode", value, set);
});
}
return value;
}
},
modes: function modes(value) {
if (value && value != "read" && value != "edit") {
return null;
}
this._modes = value;
if (value && this.mode != value) {
this.mode = value;
}
},
deleted: function deleted(value) {
var _this29 = this;
this.element.classList.toggle("mv-deleted", value);
if (value) {
// Soft delete, store element contents in a fragment
// and replace them with an undo prompt.
this.elementContents = document.createDocumentFragment();
$$(this.element.childNodes).forEach(function (node) {
_this29.elementContents.appendChild(node);
});
$.contents(this.element, [{
tag: "button",
className: "mv-close mv-ui",
textContent: "×",
events: {
"click": function click(evt) {
$.remove(this.parentNode);
}
}
}, "Deleted " + this.name, {
tag: "button",
className: "mv-undo mv-ui",
textContent: "Undo",
events: {
"click": function click(evt) {
return _this29.deleted = false;
}
}
}]);
this.element.classList.remove("mv-highlight");
this.itembar.remove();
} else if (this.deleted) {
// Undelete
this.element.textContent = "";
this.element.appendChild(this.elementContents);
// otherwise expressions won't update because this will still seem as deleted
// Alternatively, we could fire datachange with a timeout.
this._deleted = false;
this.dataChanged("undelete");
this.itembar.add();
}
},
path: {
get: function get() {
var path = this.parent ? this.parent.path : [];
return this.property ? [].concat(_toConsumableArray(path), [this.property]) : path;
}
}
},
static: {
maxId: 0,
all: new WeakMap(),
create: function create(element, mavo) {
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (Mavo.is("multiple", element) && !o.collection) {
return new Mavo.Collection(element, mavo, o);
}
return new Mavo[Mavo.is("group", element) ? "Group" : "Primitive"](element, mavo, o);
},
/**
* Get & normalize property name, if exists
*/
getProperty: function getProperty(element) {
var property = element.getAttribute("property") || element.getAttribute("itemprop");
if (!property) {
if (element.hasAttribute("property")) {
// property used without a value
property = element.name || element.id || element.classList[0];
} else if (element.matches(Mavo.selectors.multiple)) {
// mv-multiple used without property, generate name
property = element.getAttribute("mv-multiple") || "collection";
}
}
if (property) {
element.setAttribute("property", property);
}
return property;
},
get: function get(element, prioritizePrimitive) {
var nodes = (_.all.get(element) || []).filter(function (node) {
return !(node instanceof Mavo.Collection);
});
if (nodes.length < 2 || !prioritizePrimitive) {
return nodes[0];
}
if (nodes[0] instanceof Mavo.Group) {
return node[1];
}
},
getClosest: function getClosest(element, prioritizePrimitive) {
var node;
do {
node = _.get(element, prioritizePrimitive);
} while (!node && (element = element.parentNode));
return node;
},
/**
* Get all properties that are inside an element but not nested into other properties
*/
children: function children(element) {
var ret = Mavo.Node.get(element);
if (ret) {
// element is a Mavo node
return [ret];
}
ret = $$(Mavo.selectors.property, element).map(function (e) {
return Mavo.Node.get(e);
}).filter(function (e) {
return !element.contains(e.parentGroup.element);
}) // drop nested properties
.map(function (e) {
return e.collection || e;
});
return Mavo.Functions.unique(ret);
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Group = $.Class({
extends: Mavo.Node,
nodeType: "Group",
constructor: function constructor(element, mavo, o) {
var _this30 = this;
this.children = {};
this.group = this;
Mavo.hooks.run("group-init-start", this);
// Should this element also create a primitive?
if (Mavo.Primitive.getValueAttribute(this.element)) {
var obj = this.children[this.property] = new Mavo.Primitive(this.element, this.mavo, { group: this });
}
// Create Mavo objects for all properties in this group (primitives or groups),
// but not properties in descendant groups (they will be handled by their group)
var properties = $$(Mavo.selectors.property + ", " + Mavo.selectors.multiple, this.element).filter(function (element) {
return _this30.element === element.parentNode.closest(Mavo.selectors.group);
});
var propertyNames = properties.map(function (element) {
return Mavo.Node.getProperty(element);
});
properties.forEach(function (element, i) {
var property = propertyNames[i];
var template = _this30.template ? _this30.template.children[property] : null;
var options = { template: template, group: _this30 };
if (_this30.children[property]) {
// Already exists, must be a collection
var collection = _this30.children[property];
collection.add(element);
collection.mutable = collection.mutable || Mavo.is("multiple", element);
} else if (propertyNames.indexOf(property) != propertyNames.lastIndexOf(property)) {
// There are duplicates, so this should be a collection.
_this30.children[property] = new Mavo.Collection(element, _this30.mavo, options);
} else {
// Normal case
_this30.children[property] = Mavo.Node.create(element, _this30.mavo, options);
}
});
this.childrenNames = Object.keys(this.children);
var vocabElement = (this.isRoot ? this.element.closest("[vocab]") : null) || this.element;
this.vocab = vocabElement.getAttribute("vocab");
this.postInit();
Mavo.hooks.run("group-init-end", this);
},
get isRoot() {
return !this.property;
},
getNames: function getNames() {
var _this31 = this;
var type = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "Node";
return Object.keys(this.children).filter(function (p) {
return _this31.children[p] instanceof Mavo[type];
});
},
getData: function getData() {
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var env = {
context: this,
options: o,
data: this.super.getData.call(this, o)
};
if (env.data !== undefined) {
// Super method returned something
return env.data;
}
env.data = this.liveData;
if (this.childrenNames.length == 1 && this.childrenNames[0] == this.property) {
// {foo: {foo: 5}} should become {foo: 5}
var options = $.extend($.extend({}, env.options), { forceObjects: true });
env.data = this.children[this.property].getData(options);
} else {
for (var property in this.children) {
var obj = this.children[property];
if (obj.saved || env.options.live) {
var data = obj.getData(env.options);
}
if (env.options.live || obj.saved && Mavo.value(data) !== null) {
env.data[obj.property] = data;
} else {
delete env.data[obj.property];
}
}
}
if (env.options.live) {
this.proxyCache = {};
} else {
// Stored data again
// If storing, use the rendered data too
env.data = Mavo.subset(this.data, this.inPath, env.data);
if (!this.childrenNames.length && !this.isRoot) {
// Avoid {} in the data
env.data = null;
} else if (env.data && _typeof(env.data) === "object") {
// Add JSON-LD stuff
if (this.type && this.type != _.DEFAULT_TYPE) {
env.data["@type"] = this.type;
}
if (this.vocab) {
env.data["@context"] = this.vocab;
}
}
}
Mavo.hooks.run("node-getdata-end", env);
return (env.options.live ? env.data[Mavo.toProxy] : env.data) || env.data;
},
/**
* Search entire subtree for property, return relative value
* @return {Mavo.Node}
*/
find: function find(property) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (o.exclude === this) {
return;
}
if (this.property == property) {
return this;
}
if (property in this.children) {
return this.children[property].find(property, o);
}
if (!this.properties.has(property)) {
return;
}
var results = [],
returnArray,
ret;
for (var prop in this.children) {
ret = this.children[prop].find(property, o);
if (ret !== undefined) {
if (Array.isArray(ret)) {
results.push.apply(results, _toConsumableArray(ret));
returnArray = true;
} else {
results.push(ret);
}
}
}
return returnArray || results.length > 1 ? results : results[0];
},
edit: function edit() {
var _this32 = this;
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
if (this.super.edit.call(this) === false) {
return false;
}
return Promise.all(Object.keys(this.children).map(function (prop) {
return _this32.children[prop].edit(o);
}));
},
save: function save() {
this.unsavedChanges = false;
},
propagated: ["save", "import"],
// Do not call directly, call this.render() instead
dataRender: function dataRender(data) {
var _this33 = this;
if (!data) {
return;
}
// What if data is not an object?
if ((typeof data === "undefined" ? "undefined" : _typeof(data)) !== "object") {
var wasPrimitive = true;
// Data is a primitive, render it on this.property or failing that, any writable property
if (this.property in this.children) {
var property = this.property;
} else {
var type = $.type(data);
var score = function score(prop) {
return (_this33.children[prop] instanceof Mavo.Primitive) + (_this33.children[prop].datatype == type);
};
var property = Object.keys(this.children).filter(function (p) {
return !_this33.children[p].expressionText;
}).sort(function (prop1, prop2) {
return score(prop1) - score(prop2);
}).reverse()[0];
}
data = _defineProperty({}, property, data);
this.data = Mavo.subset(this.data, this.inPath, data);
}
var copy; // to handle renaming
this.propagate(function (obj) {
var propertyData = data[obj.property];
if (obj.alias && data[obj.alias] !== undefined) {
copy = copy || $.extend({}, data);
propertyData = data[obj.alias];
}
obj.render(propertyData);
});
// Rename properties. This needs to be done separately to handle swapping.
if (copy) {
this.propagate(function (obj) {
if (obj.alias) {
data[obj.property] = copy[obj.alias];
if (!(obj.alias in _this33.children)) {
delete data[obj.alias];
}
}
});
}
if (!wasPrimitive) {
// Fire mv-change events for properties not in the template,
// since nothing else will and they can still be referenced in expressions
var oldData = Mavo.subset(this.oldData, this.inPath);
for (var property in data) {
if (!(property in this.children)) {
var value = data[property];
if ((typeof value === "undefined" ? "undefined" : _typeof(value)) != "object" && (!oldData || oldData[property] != value)) {
this.dataChanged("propertychange", { property: property });
}
}
}
}
this.createLiveData(data);
},
lazy: {
liveData: function liveData() {
return this.createLiveData();
}
},
static: {
all: new WeakMap(),
DEFAULT_TYPE: "Item",
normalize: function normalize(element) {
// Get & normalize typeof name, if exists
if (Mavo.is("group", element)) {
var type = Mavo.getAttribute(element, "typeof", "itemtype") || _.DEFAULT_TYPE;
element.setAttribute("typeof", type);
return type;
}
return null;
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Primitive = $.Class({
extends: Mavo.Node,
nodeType: "Primitive",
constructor: function constructor(element, mavo, o) {
var _this34 = this;
if (!this.fromTemplate("config", "attribute", "templateValue", "originalEditor")) {
this.config = _.getConfig(element);
// Which attribute holds the data, if any?
// "null" or null for none (i.e. data is in content).
this.attribute = this.config.attribute;
}
this.datatype = this.config.datatype;
if ("modes" in this.config) {
// If modes are related to element type, this overrides everything
// because it means the other mode makes no sense for that element
this.modes = this.config.modes;
this.element.setAttribute("mv-mode", this.config.modes);
}
Mavo.hooks.run("primitive-init-start", this);
// Link primitive with its expressionText object
// We need to do this before any editing UI is generated
this.expressionText = this.expressionText || Mavo.DOMExpression.search(this.element, this.attribute);
if (this.expressionText && !this.expressionText.mavoNode) {
// Computed property
this.expressionText.primitive = this;
this.storage = this.storage || "none";
this.modes = "read";
this.element.setAttribute("aria-live", "polite");
}
/**
* Set up input widget
*/
// Linked widgets
if (!this.editor && this.element.hasAttribute("mv-edit")) {
if (!this.originalEditor) {
this.originalEditor = $(this.element.getAttribute("mv-edit"));
}
if (this.originalEditor) {
// Update editor if original mutates
// This means that expressions on mv-edit for individual collection items will not be picked up
if (!this.template) {
this.originalEditorObserver = new Mavo.Observer(this.originalEditor, "all", function (records) {
_this34.copies.concat(_this34).forEach(function (primitive) {
if (primitive.defaultSource == "editor") {
primitive.default = _this34.originalEditor.value;
}
if (primitive.editor) {
primitive.editor = _this34.originalEditor.cloneNode(true);
}
primitive.setValue(primitive.value, { force: true, silent: true });
});
});
}
}
}
// Nested widgets
if (!this.editor && !this.originalEditor && !this.attribute) {
this.editor = $$(this.element.children).filter(function (el) {
return el.matches(Mavo.selectors.formControl) && !el.matches(Mavo.selectors.property);
})[0];
if (this.editor) {
$.remove(this.editor);
}
}
var editorValue = this.editorValue;
if (!this.datatype && (typeof editorValue == "number" || typeof editorValue == "boolean")) {
this.datatype = typeof editorValue === "undefined" ? "undefined" : _typeof(editorValue);
}
if (this.config.init) {
this.config.init.call(this, this.element);
}
if (this.config.changeEvents) {
$.bind(this.element, this.config.changeEvents, function (evt) {
if (evt.target === _this34.element) {
_this34.value = _this34.getValue();
}
});
}
this.templateValue = this.getValue();
this._default = this.element.getAttribute("mv-default");
if (this.default === null) {
// no mv-default
this._default = this.modes ? this.templateValue : editorValue;
this.defaultSource = this.modes ? "template" : "editor";
} else if (this.default === "") {
// mv-default exists, no value, default is template value
this._default = this.templateValue;
this.defaultSource = "template";
} else {
// mv-default with value
this.defaultObserver = new Mavo.Observer(this.element, "mv-default", function (record) {
_this34.default = _this34.element.getAttribute("mv-default");
});
this.defaultSource = "attribute";
}
var keepTemplateValue = !this.template // not in a collection or first item
|| this.template.templateValue != this.templateValue // or different template value than first item
|| this.modes == "edit"; // or is always edited
if (this.default === undefined && keepTemplateValue) {
this.initialValue = this.templateValue;
} else {
this.initialValue = this.default;
}
if (this.initialValue === undefined) {
this.initialValue = this.emptyValue;
}
this.setValue(this.initialValue, { silent: true });
Mavo.setAttributeShy(this.element, "aria-label", this.label);
if (!this.attribute) {
Mavo.setAttributeShy(this.element, "mv-attribute", "none");
}
if (this.config.observer !== false) {
// Observe future mutations to this property, if possible
// Properties like input.checked or input.value cannot be observed that way
// so we cannot depend on mutation observers for everything :(
this.observer = new Mavo.Observer(this.element, this.attribute, function (records) {
if (_this34.observer.running && (_this34.attribute || !_this34.editing || _this34.config.subtree)) {
_this34.value = _this34.getValue();
}
}, { subtree: this.config.subtree, childList: this.config.subtree });
}
this.postInit();
Mavo.hooks.run("primitive-init-end", this);
},
get editorValue() {
var editor = this.editor || this.originalEditor;
if (editor) {
if (editor.matches(Mavo.selectors.formControl)) {
return _.getValue(editor, { datatype: this.datatype });
}
// if we're here, this.editor is an entire HTML structure
var output = $(Mavo.selectors.output + ", " + Mavo.selectors.formControl, editor);
if (output) {
return _.getValue(output);
}
}
},
set editorValue(value) {
if (this.config.setEditorValue && this.datatype !== "boolean") {
return this.config.setEditorValue.call(this, value);
}
if (this.editor) {
if (this.editor.matches(Mavo.selectors.formControl)) {
_.setValue(this.editor, value, { config: this.editorDefaults });
} else {
// if we're here, this.editor is an entire HTML structure
var output = $(Mavo.selectors.output + ", " + Mavo.selectors.formControl, this.editor);
if (output) {
_.setValue(output, value);
}
}
}
},
destroy: function destroy() {
this.super.destroy.call(this);
this.defaultObserver && this.defaultObserver.destroy();
this.observer && this.observer.destroy();
},
getData: function getData() {
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var env = {
context: this,
options: o,
data: this.super.getData.call(this, o)
};
if (env.data === undefined) {
env.data = this.value;
if (env.data === "") {
env.data = null;
}
}
if (env.options.live) {
if (this.collection || o.forceObjects) {
env.data = Mavo.objectify(env.data, _defineProperty({}, Mavo.toNode, this));
env.data[Mavo.toProxy] = this.relativizeData(env.data);
if (this.collection) {
// Turn primitive collection items into objects, so we can have $index etc, and their property
// name etc resolve relative to them, not their parent group
env.data[this.property] = env.data;
}
Mavo.hooks.run("node-getdata-end", env);
this.proxyCache = {};
return env.data[Mavo.toProxy];
}
} else if (this.inPath.length) {
env.data = Mavo.subset(this.data, this.inPath, env.data);
}
Mavo.hooks.run("node-getdata-end", env);
return env.data;
},
sneak: function sneak(callback) {
return Mavo.Observer.sneak(this.observer, callback);
},
save: function save() {
this.savedValue = this.value;
this.unsavedChanges = false;
},
// Called only the first time this primitive is edited
initEdit: function initEdit() {
var _this35 = this;
if (!this.editor && this.originalEditor) {
this.editor = this.originalEditor.cloneNode(true);
}
if (!this.editor) {
// No editor provided, use default for element type
// Find default editor for datatype
var editor = this.config.editor;
if (!editor || this.datatype == "boolean") {
editor = Mavo.Elements.defaultConfig[this.datatype || "string"].editor;
}
this.editor = $.create($.type(editor) === "function" ? editor.call(this) : editor);
this.editorValue = this.value;
}
$.bind(this.editor, {
"input change": function inputChange(evt) {
_this35.value = _this35.editorValue;
},
"mv-change": function mvChange(evt) {
if (evt.property === "output") {
evt.stopPropagation();
$.fire(_this35.editor, "input");
}
}
});
var multiline = this.editor.matches("textarea");
if (!multiline) {
this.editor.addEventListener("focus", function (evt) {
_this35.editor.select && _this35.editor.select();
});
}
// Enter should go to the next item or insert a new one
if (!this.popup && this.closestCollection && this.editor.matches(Mavo.selectors.textInput)) {
this.editor.addEventListener("keydown", function (evt) {
if (evt.keyCode == 13 && _this35.closestCollection.editing && (evt.shiftKey || !multiline)) {
// Enter
var copy = _this35.getCousin(1);
if (!copy) {
// It's the last item, insert new if top-down
if (_this35.bottomUp) {
return;
}
var next = _this35.closestCollection.add();
_this35.closestCollection.editItem(next, { immediately: true });
}
copy = _this35.getCousin(1);
copy.edit({ immediately: true }).then(function () {
return copy.editor.focus();
});
if (multiline) {
evt.preventDefault();
}
} else if (evt.keyCode == 8 && (_this35.empty && _this35.collection || evt[Mavo.superKey])) {
// Backspace on empty primitive or Cmd/Ctrl + Backspace should delete item
_this35.closestCollection.delete(_this35.closestItem);
// Focus on sibling
var sibling = _this35.getCousin(1) || _this35.getCousin(-1);
if (sibling) {
sibling.edit({ immediately: true }).then(function () {
return sibling.editor.focus();
});
}
}
});
}
if ("placeholder" in this.editor) {
this.editor.placeholder = "(" + this.label + ")";
}
// Copy any mv-edit-* attributes from the element to the editor
var dataInput = /^mv-edit-/i;
$$(this.element.attributes).forEach(function (attribute) {
if (dataInput.test(attribute.name)) {
this.editor.setAttribute(attribute.name.replace(dataInput, ""), attribute.value);
}
}, this);
if (this.attribute || this.config.popup) {
this.popup = new Mavo.UI.Popup(this);
}
if (!this.popup) {
this.editor.classList.add("mv-editor");
}
this.initEdit = null;
},
edit: function edit() {
var _this36 = this;
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
if (this.super.edit.call(this) === false) {
return false;
}
// Make element focusable, so it can actually receive focus
if (this.element.tabIndex === -1) {
Mavo.revocably.setAttribute(this.element, "tabindex", "0");
}
// Prevent default actions while editing
// e.g. following links etc
if (!this.modes) {
$.bind(this.element, "click.mavo:edit", function (evt) {
return evt.preventDefault();
});
}
this.preEdit = Mavo.defer(function (resolve) {
if (o.immediately) {
return resolve();
}
var timer;
var events = "click focus dragover dragenter".split(" ").map(function (e) {
return e + ".mavo:preedit";
}).join(" ");
$.bind(_this36.element, events, resolve);
}).then(function () {
return $.unbind(_this36.element, ".mavo:preedit");
});
if (this.config.edit) {
this.config.edit.call(this);
return;
}
return this.preEdit.then(function () {
_this36.sneak(function () {
// Actual edit
if (_this36.initEdit) {
_this36.initEdit();
}
if (_this36.popup) {
_this36.popup.prepare();
_this36.popup.show();
}
if (!_this36.attribute && !_this36.popup) {
if (_this36.editor.parentNode != _this36.element) {
_this36.editorValue = _this36.value;
_this36.element.textContent = "";
_this36.element.appendChild(_this36.editor);
}
if (!_this36.collection) {
if (document.activeElement === _this36.element) {
_this36.editor.focus();
}
Mavo.revocably.restoreAttribute(_this36.element, "tabindex");
}
}
});
});
}, // edit
done: function done() {
var _this37 = this;
if (this.super.done.call(this) === false) {
return false;
}
if ("preEdit" in this) {
$.unbind(this.element, ".mavo:preedit .mavo:edit");
}
this.sneak(function () {
if (_this37.config.done) {
_this37.config.done.call(_this37);
return;
}
if (_this37.popup) {
_this37.popup.close();
} else if (!_this37.attribute && _this37.editor) {
$.remove(_this37.editor);
_.setValue(_this37.element, _this37.editorValue, {
config: _this37.config,
attribute: _this37.attribute,
datatype: _this37.datatype,
map: _this37.originalEditor || _this37.editor
});
}
});
if (!this.collection) {
Mavo.revocably.restoreAttribute(this.element, "tabindex");
}
},
dataRender: function dataRender(data) {
if (data && (typeof data === "undefined" ? "undefined" : _typeof(data)) === "object") {
if (Symbol.toPrimitive in data) {
data = data[Symbol.toPrimitive]();
} else {
// Candidate properties to get a value from
var properties = Object.keys(data),
property;
if (properties.length === 1) {
property = properties[0];
} else {
var _arr = [this.property, "value", "content"];
for (var _i2 = 0; _i2 < _arr.length; _i2++) {
var p = _arr[_i2];
if (p in data) {
property = p;
break;
}
}
}
if (property) {
data = data[property];
this.inPath.push(property);
}
}
}
if (data === undefined) {
// New property has been added to the schema and nobody has saved since
if (!this.modes) {
this.value = this.closestCollection ? this.default : this.templateValue;
}
} else {
this.value = data;
}
},
find: function find(property) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (this.property == property && o.exclude !== this) {
return this;
}
},
/**
* Get value from the DOM
*/
getValue: function getValue(o) {
return _.getValue(this.element, {
config: this.config,
attribute: this.attribute,
datatype: this.datatype
});
},
lazy: {
label: function label() {
return Mavo.Functions.readable(this.property);
},
emptyValue: function emptyValue() {
switch (this.datatype) {
case "boolean":
return false;
case "number":
return 0;
}
return "";
},
editorDefaults: function editorDefaults() {
return this.editor && _.getConfig(this.editor);
}
},
setValue: function setValue(value) {
var _this38 = this;
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
this.sneak(function () {
// Convert nulls and undefineds to empty string
value = value || value === 0 || value === false ? value : "";
var oldDatatype = _this38.datatype;
// If there's no datatype, adopt that of the value
if (!_this38.datatype && (typeof value == "number" || typeof value == "boolean")) {
_this38.datatype = typeof value === "undefined" ? "undefined" : _typeof(value);
}
value = _.safeCast(value, _this38.datatype);
if (!o.force && value == _this38._value && oldDatatype == _this38.datatype) {
// Do nothing if value didn't actually change, unless forced to
return value;
}
if (_this38.editor && document.activeElement != _this38.editor) {
// If external forces are changing the value (i.e. not the editor)
// and an editor is present, set its value to match
_this38.editorValue = value;
}
if (_this38.popup || !_this38.editor || _this38.editor !== document.activeElement) {
// Prevent loops
if (_this38.config.setValue) {
_this38.config.setValue.call(_this38, _this38.element, value);
} else if (!o.dataOnly) {
_.setValue(_this38.element, value, {
config: _this38.config,
attribute: _this38.attribute,
datatype: _this38.datatype,
map: _this38.originalEditor || _this38.editor
});
}
}
_this38.empty = !value && value !== 0;
_this38._value = value;
if (!o.silent) {
if (_this38.saved) {
_this38.unsavedChanges = _this38.mavo.unsavedChanges = true;
}
_this38.dataChanged("propertychange", { value: value });
}
});
return value;
},
dataChanged: function dataChanged() {
var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "propertychange";
var o = arguments[1];
return this.super.dataChanged.call(this, action, o);
},
live: {
default: function _default(value) {
if (this.value == this._default) {
this.value = value;
}
},
value: function value(_value3) {
return this.setValue(_value3);
},
datatype: function datatype(value) {
if (value !== this._datatype) {
if (value == "boolean" && !this.attribute) {
this.attribute = Mavo.Elements.defaultConfig.boolean.attribute;
}
$.toggleAttribute(this.element, "datatype", value, value && value !== "string");
}
},
empty: function empty(value) {
var hide = value && // is empty
!this.modes && // and supports both modes
!(this.attribute && $(Mavo.selectors.property, this.element)); // and has no property inside
this.element.classList.toggle("mv-empty", !!hide);
}
},
static: {
all: new WeakMap(),
getValueAttribute: function getValueAttribute(element) {
var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : Mavo.Elements.search(element);
var ret = element.getAttribute("mv-attribute") || config.attribute;
if (!ret || ret === "null" || ret === "none") {
ret = null;
}
return ret;
},
/**
* Only cast if conversion is lossless
*/
safeCast: function safeCast(value, datatype) {
var existingType = typeof value === "undefined" ? "undefined" : _typeof(value);
var cast = _.cast(value, datatype);
if (value === null || value === undefined) {
return value;
}
if (datatype == "boolean") {
if (value === "false" || value === 0 || value === "") {
return false;
}
if (value === "true" || value > 0) {
return true;
}
return value;
}
if (datatype == "number") {
if (/^[-+]?[0-9.e]+$/i.test(value + "")) {
return cast;
}
return value;
}
return cast;
},
/**
* Cast to a different primitive datatype
*/
cast: function cast(value, datatype) {
switch (datatype) {
case "number":
return +value;
case "boolean":
return !!value;
case "string":
return value + "";
}
return value;
},
getValue: function getValue(element) {
var _ref4 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
config = _ref4.config,
attribute = _ref4.attribute,
datatype = _ref4.datatype;
if (!config) {
config = _.getConfig(element, attribute);
}
attribute = config.attribute;
datatype = config.datatype;
if (config.getValue && attribute == config.attribute) {
return config.getValue(element);
}
var ret;
if (attribute in element && _.useProperty(element, attribute)) {
// Returning properties (if they exist) instead of attributes
// is needed for dynamic elements such as checkboxes, sliders etc
ret = element[attribute];
} else if (attribute) {
ret = element.getAttribute(attribute);
} else {
ret = element.getAttribute("content") || element.textContent || null;
}
return _.safeCast(ret, datatype);
},
getConfig: function getConfig(element, attribute, datatype) {
if (attribute === undefined) {
attribute = element.getAttribute("mv-attribute") || undefined;
}
if (attribute == "null" || attribute == "none") {
attribute = null;
}
if (!datatype && attribute == _.getValueAttribute(element)) {
datatype = element.getAttribute("datatype") || undefined;
}
var config = Mavo.Elements.search(element, attribute, datatype);
if (config.attribute === undefined) {
config.attribute = attribute || null;
}
if (config.datatype === undefined) {
config.datatype = datatype;
}
return config;
},
setValue: function setValue(element, value) {
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (element.nodeType === 1) {
if (!o.config) {
o.config = _.getConfig(element, o.attribute);
}
o.attribute = o.attribute !== undefined ? o.attribute : o.config.attribute;
o.datatype = o.datatype !== undefined ? o.datatype : o.config.datatype;
if (o.config.setValue && o.attribute == o.config.attribute) {
return o.config.setValue(element, value, o.attribute);
}
}
if (o.attribute) {
if (o.attribute in element && _.useProperty(element, o.attribute) && element[o.attribute] !== value) {
// Setting properties (if they exist) instead of attributes
// is needed for dynamic elements such as checkboxes, sliders etc
try {
var previousValue = element[o.attribute];
var newValue = element[o.attribute] = value;
} catch (e) {}
if (previousValue != newValue && o.config.changeEvents) {
o.config.changeEvents.split(/\s+/).forEach(function (type) {
return $.fire(element, type);
});
}
}
// Set attribute anyway, even if we set a property because when
// they're not in sync it gets really fucking confusing.
if (o.datatype == "boolean") {
if (value != element.hasAttribute(o.attribute)) {
$.toggleAttribute(element, o.attribute, value, value);
}
} else if (element.getAttribute(o.attribute) != value) {
// intentionally non-strict, e.g. "3." !== 3
element.setAttribute(o.attribute, value);
}
} else {
var presentational = _.format(value, o);
if (presentational !== value) {
element.textContent = presentational;
if (element.setAttribute) {
element.setAttribute("content", value);
}
} else {
element.textContent = value;
}
}
},
/**
* Set/get a property or an attribute?
* @return {Boolean} true to use a property, false to use the attribute
*/
useProperty: function useProperty(element, attribute) {
if (["href", "src"].indexOf(attribute) > -1) {
// URL properties resolve "" as location.href, fucking up emptiness checks
return false;
}
if (element.namespaceURI == "http://www.w3.org/2000/svg") {
// SVG has a fucked up DOM, do not use these properties
return false;
}
return true;
},
format: function format(value) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (o.map && /^select$/i.test(o.map.nodeName)) {
for (var i = 0, option; option = o.map.options[i]; i++) {
if (option.value == value) {
return option.textContent;
}
}
}
if ($.type(value) === "number" || o.datatype == "number") {
var skipNumberFormatting = o.attribute || o.element && o.element.matches("style, pre");
if (!skipNumberFormatting) {
return _.formatNumber(value);
}
}
if (Array.isArray(value)) {
return value.map(_.format).join(", ");
}
if ($.type(value) === "object") {
// Oops, we have an object. Print something more useful than [object Object]
return Mavo.toJSON(value);
}
return value;
},
lazy: {
formatNumber: function formatNumber() {
var numberFormat = new Intl.NumberFormat(Mavo.locale, { maximumFractionDigits: 2 });
return function (value) {
if (value === Infinity || value === -Infinity) {
// Pretty print infinity
return value < 0 ? "-∞" : "∞";
}
return numberFormat.format(value);
};
}
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.UI.Popup = $.Class({
constructor: function constructor(primitive) {
var _this39 = this;
this.primitive = primitive;
// Need to be defined here so that this is what expected
this.position = function (evt) {
var bounds = _this39.primitive.element.getBoundingClientRect();
var x = bounds.left;
var y = bounds.bottom;
var pointDown = false;
if (_this39.element.offsetHeight) {
// Is in the DOM, check if it fits
_this39.height = _this39.element.getBoundingClientRect().height || _this39.height;
}
if (_this39.height + y + 20 > innerHeight) {
// Normal positioning means the popup would be cut off or too close to the edge, adjust
// Perhaps placing it above is better
if (bounds.top - _this39.height > 20) {
var pointDown = true;
y = bounds.top - _this39.height - 20;
} else {
// Nah, just raise it a bit
y = innerHeight - _this39.height - 20;
}
}
_this39.element.classList.toggle("mv-point-down", pointDown);
$.style(_this39.element, { top: y + "px", left: x + "px" });
};
this.element = $.create("div", {
className: "mv-popup",
hidden: true,
contents: {
tag: "fieldset",
contents: [{
tag: "legend",
textContent: this.primitive.label + ":"
}, this.editor]
},
events: {
keyup: function keyup(evt) {
if (evt.keyCode == 13 || evt.keyCode == 27) {
if (_this39.element.contains(document.activeElement)) {
_this39.primitive.element.focus();
}
evt.stopPropagation();
_this39.hide();
}
},
transitionend: this.position
}
});
// No point in having a dropdown in a popup
if (this.editor.matches("select")) {
this.editor.size = Math.min(10, this.editor.children.length);
}
},
show: function show() {
var _this40 = this;
$.unbind([this.primitive.element, this.element], ".mavo:showpopup");
this.shown = true;
this.hideCallback = function (evt) {
if (!_this40.element.contains(evt.target) && !_this40.primitive.element.contains(evt.target)) {
_this40.hide();
}
};
this.element.style.transition = "none";
this.element.removeAttribute("hidden");
this.position();
this.element.setAttribute("hidden", "");
this.element.style.transition = "";
document.body.appendChild(this.element);
setTimeout(function () {
_this40.element.removeAttribute("hidden");
}, 100); // trigger transition. rAF or timeouts < 100 don't seem to, oddly.
$.bind(document, "focus click", this.hideCallback, true);
window.addEventListener("scroll", this.position, { passive: true });
},
hide: function hide() {
var _this41 = this;
$.unbind(document, "focus click", this.hideCallback, true);
window.removeEventListener("scroll", this.position, { passive: true });
this.element.setAttribute("hidden", ""); // trigger transition
this.shown = false;
setTimeout(function () {
$.remove(_this41.element);
}, parseFloat(getComputedStyle(this.element).transitionDuration) * 1000 || 400); // TODO transition-duration could override this
},
prepare: function prepare() {
var _this42 = this;
$.bind(this.primitive.element, {
"click.mavo:edit": function clickMavoEdit(evt) {
_this42.show();
},
"keyup.mavo:edit": function keyupMavoEdit(evt) {
if ([13, 113].indexOf(evt.keyCode) > -1) {
// Enter or F2
_this42.show();
_this42.editor.focus();
}
}
});
},
close: function close() {
this.hide();
$.unbind(this.primitive.element, ".mavo:edit .mavo:preedit .mavo:showpopup");
},
proxy: {
"editor": "primitive"
}
});
})(Bliss, Bliss.$);
/**
* Configuration for different types of elements. Options:
* - attribute {String}
* - useProperty {Boolean}
* - datatype {"number"|"boolean"|"string"} Default is "string"
* - modes
* - editor {Object|Function}
* - setEditorValue temporary
* - edit
* - done
* - observe
* - default: If there is no attribute, can we use that rule to pick one?
* @
*/
(function ($, $$) {
var _ = Mavo.Elements = {};
Object.defineProperties(_, {
"register": {
value: function value(id, config) {
if (_typeof(arguments[0]) === "object") {
// Multiple definitions
for (var s in arguments[0]) {
_.register(s, arguments[0][s]);
}
return;
}
if (config.extend) {
var base = _[config.extend];
config = $.extend($.extend({}, base), config);
}
if (id.indexOf("@") > -1) {
var parts = id.split("@");
config.selector = config.selector || parts[0] || "*";
if (config.attribute === undefined) {
config.attribute = parts[1];
}
}
config.selector = config.selector || id;
config.id = id;
if (Array.isArray(config.attribute)) {
config.attribute.forEach(function (attribute) {
var o = $.extend({}, config);
o.attribute = attribute;
_[id + "@" + attribute] = o;
});
} else {
_[id] = config;
}
return _;
}
},
"search": {
value: function value(element, attribute, datatype) {
var matches = _.matches(element, attribute, datatype);
var lastMatch = matches[matches.length - 1];
if (lastMatch) {
return lastMatch;
}
var config = $.extend({}, _.defaultConfig[datatype || "string"]);
config.attribute = attribute === undefined ? config.attribute : attribute;
return config;
}
},
"matches": {
value: function value(element, attribute, datatype) {
var matches = [];
selectorloop: for (var id in _) {
var o = _[id];
// Passes attribute test?
var attributeMatches = attribute === undefined && o.default || attribute === o.attribute;
if (!attributeMatches) {
continue;
}
// Passes datatype test?
if (datatype !== undefined && datatype !== "string" && datatype !== o.datatype) {
continue;
}
// Passes selector test?
var selector = o.selector || id;
if (!element.matches(selector)) {
continue;
}
// Passes arbitrary test?
if (o.test && !o.test(element, attribute, datatype)) {
continue;
}
// All tests have passed
matches.push(o);
}
return matches;
}
},
isSVG: {
value: function value(e) {
return e.namespaceURI == "http://www.w3.org/2000/svg";
}
},
defaultConfig: {
value: {
"string": {
editor: { tag: "input" }
},
"number": {
editor: { tag: "input", type: "number" }
},
"boolean": {
attribute: "content",
editor: { tag: "input", type: "checkbox" }
}
}
}
});
_.register({
"@hidden": {
datatype: "boolean"
},
"@y": {
test: _.isSVG,
datatype: "number"
},
"@x": {
default: true,
test: _.isSVG,
datatype: "number"
},
"media": {
default: true,
selector: "img, video, audio",
attribute: "src",
editor: function editor() {
var _this43 = this;
var mainInput = $.create("input", {
"type": "url",
"placeholder": "http://example.com/image.png",
"className": "mv-output",
"aria-label": "URL to image"
});
if (this.mavo.uploadBackend && self.FileReader) {
var popup;
var type = this.element.nodeName.toLowerCase();
type = type == "img" ? "image" : type;
var path = this.element.getAttribute("mv-uploads") || type + "s";
var upload = function upload(file) {
var name = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : file.name;
if (!file || file.type.indexOf(type + "/") !== 0) {
return;
}
var tempURL = URL.createObjectURL(file);
_this43.sneak(function () {
return _this43.element.src = tempURL;
});
_this43.mavo.upload(file, path + "/" + name).then(function (url) {
// Backend claims image is uploaded, we should load it from remote to make sure everything went well
var attempts = 0;
var load = Mavo.rr(function () {
return Mavo.timeout(1000 + attempts * 500).then(function () {
attempts++;
_this43.element.src = url;
});
});
var cleanup = function cleanup() {
URL.revokeObjectURL(tempURL);
_this43.element.removeEventListener("load", onload);
_this43.element.removeEventListener("error", onload);
};
var onload = function onload(evt) {
if (_this43.element.src != tempURL) {
// Actual uploaded image has loaded, yay!
_this43.element.src = url;
cleanup();
}
};
var onerror = function onerror(evt) {
// Oops, failed. Put back temp URL and try again
if (attempts <= 10) {
_this43.sneak(function () {
return _this43.element.src = tempURL;
});
load();
} else {
// 11 + 0.5*10*11/2 = 38.5 seconds later, giving up
_this43.mavo.error(_this43.mavo._("cannot-load-uploaded-file") + " " + url);
cleanup();
}
};
mainInput.value = url;
_this43.element.addEventListener("load", onload);
_this43.element.addEventListener("error", onerror);
});
};
var uploadEvents = {
"paste": function paste(evt) {
var item = evt.clipboardData.items[0];
if (item.kind == "file" && item.type.indexOf(type + "/") === 0) {
// Is a file of the correct type, upload!
var defaultName = "pasted-" + type + "-" + Date.now() + "." + item.type.slice(6); // image, video, audio are all 5 chars
var name = prompt(_this43.mavo._("filename"), defaultName);
if (name === "") {
name = defaultName;
}
if (name !== null) {
upload(item.getAsFile(), name);
evt.preventDefault();
}
}
},
"drag dragstart dragend dragover dragenter dragleave drop": function dragDragstartDragendDragoverDragenterDragleaveDrop(evt) {
evt.preventDefault();
evt.stopPropagation();
},
"dragover dragenter": function dragoverDragenter(evt) {
popup.classList.add("mv-dragover");
_this43.element.classList.add("mv-dragover");
},
"dragleave dragend drop": function dragleaveDragendDrop(evt) {
popup.classList.remove("mv-dragover");
_this43.element.classList.remove("mv-dragover");
},
"drop": function drop(evt) {
upload(evt.dataTransfer.files[0]);
}
};
$.bind(this.element, uploadEvents);
return popup = $.create({
className: "mv-upload-popup",
contents: [mainInput, {
tag: "input",
type: "file",
"aria-label": "Upload image",
accept: type + "/*",
events: {
change: function change(evt) {
var file = evt.target.files[0];
if (!file) {
return;
}
upload(file);
}
}
}, {
className: "mv-tip",
innerHTML: "<strong>Tip:</strong> You can also drag & drop or paste!"
}],
events: uploadEvents
});
} else {
return mainInput;
}
}
},
"video, audio": {
attribute: ["autoplay", "buffered", "loop"],
datatype: "boolean"
},
"details": {
attribute: "open",
datatype: "boolean"
},
"a, link": {
default: true,
attribute: "href"
},
"input, select, button, textarea": {
attribute: "disabled",
datatype: "boolean"
},
"formControl": {
selector: "input",
default: true,
attribute: "value",
modes: "edit",
changeEvents: "input change",
edit: function edit() {},
done: function done() {},
init: function init() {
this.editor = this.element;
}
},
"select": {
extend: "formControl",
selector: "select",
subtree: true
},
"textarea": {
extend: "formControl",
selector: "textarea",
attribute: null,
getValue: function getValue(element) {
return element.value;
},
setValue: function setValue(element, value) {
return element.value = value;
}
},
"formNumber": {
extend: "formControl",
selector: "input[type=range], input[type=number]",
datatype: "number",
setValue: function setValue(element, value) {
element.value = value;
element.setAttribute("value", value);
var attribute = value > element.value ? "max" : "min";
if (!isNaN(value) && element.value != value && !Mavo.data(element, "boundObserver")) {
// Value out of bounds, maybe race condition? See #295
// Observe min/max attrs until user interaction or data change
var observer = new Mavo.Observer(element, attribute, function (r) {
element.value = value;
});
requestAnimationFrame(function () {
$.bind(element, "input mv-change", function handler() {
observer.destroy();
Mavo.data(element, "boundObserver", undefined);
$.unbind(element, "input mv-change", handler);
});
});
// Prevent creating same observer twice
Mavo.data(element, "boundObserver", observer);
}
}
},
"checkbox": {
extend: "formControl",
selector: "input[type=checkbox]",
attribute: "checked",
datatype: "boolean",
changeEvents: "click"
},
"radio": {
extend: "formControl",
selector: "input[type=radio]",
attribute: "checked",
modes: "edit",
getValue: function getValue(element) {
if (element.form) {
return element.form[element.name].value;
}
var checked = $("input[type=radio][name=\"" + element.name + "\"]:checked");
return checked && checked.value;
},
setValue: function setValue(element, value) {
if (element.form) {
element.form[element.name].value = value;
return;
}
var toCheck = $("input[type=radio][name=\"" + element.name + "\"][value=\"" + value + "\"]");
$.properties(toCheck, { checked: true });
},
init: function init(element) {
var _this44 = this;
this.mavo.element.addEventListener("change", function (evt) {
if (evt.target.name == element.name) {
_this44.value = _this44.getValue();
}
});
}
},
"counter": {
extend: "formControl",
selector: "button, .counter",
attribute: "mv-clicked",
datatype: "number",
init: function init(element) {
var _this45 = this;
if (this.attribute === "mv-clicked") {
element.setAttribute("mv-clicked", "0");
element.addEventListener("click", function (evt) {
var clicked = +element.getAttribute("mv-clicked") || 0;
_this45.value = ++clicked;
});
}
}
},
"meter, progress": {
default: true,
attribute: "value",
datatype: "number",
edit: function edit() {
var _this46 = this;
var min = +this.element.getAttribute("min") || 0;
var max = +this.element.getAttribute("max") || 1;
var range = max - min;
var step = +this.element.getAttribute("mv-edit-step") || (range > 1 ? 1 : range / 100);
$.bind(this.element, "mousemove.mavo:edit", function (evt) {
// Change property as mouse moves
var left = _this46.element.getBoundingClientRect().left;
var offset = Math.max(0, (evt.clientX - left) / _this46.element.offsetWidth);
var newValue = min + range * offset;
var mod = newValue % step;
newValue += mod > step / 2 ? step - mod : -mod;
newValue = Math.max(min, Math.min(newValue, max));
_this46.sneak(function () {
return _this46.element.setAttribute("value", newValue);
});
});
$.bind(this.element, "mouseleave.mavo:edit", function (evt) {
// Return to actual value
_this46.sneak(function () {
return _this46.element.setAttribute("value", _this46.value);
});
});
$.bind(this.element, "click.mavo:edit", function (evt) {
// Register change
_this46.value = _this46.getValue();
});
$.bind(this.element, "keydown.mavo:edit", function (evt) {
// Edit with arrow keys
if (evt.target == _this46.element && (evt.keyCode == 37 || evt.keyCode == 39)) {
var increment = step * (evt.keyCode == 39 ? 1 : -1) * (evt.shiftKey ? 10 : 1);
var newValue = _this46.value + increment;
newValue = Math.max(min, Math.min(newValue, max));
_this46.element.setAttribute("value", newValue);
evt.preventDefault();
}
});
},
done: function done() {
$.unbind(this.element, ".mavo:edit");
}
},
"meta": {
default: true,
attribute: "content"
},
"block": {
default: true,
selector: "p, div, dt, dd, h1, h2, h3, h4, h5, h6, article, section, address",
editor: function editor() {
var cs = getComputedStyle(this.element);
var display = cs.display;
var tag = display.indexOf("inline") === 0 ? "input" : "textarea";
var editor = $.create(tag);
if (tag == "textarea") {
// Actually multiline
var width = this.element.offsetWidth;
if (width) {
editor.width = width;
}
// We cannot collapse whitespace because then users
// are adding characters they don’t see (#300).
editor.style.whiteSpace = {
"normal": "pre-wrap",
"nowrap": "pre"
}[cs.whiteSpace] || "inherit";
}
return editor;
},
setEditorValue: function setEditorValue(value) {
if (this.datatype && this.datatype != "string") {
value = value + "";
}
var cs = getComputedStyle(this.element);
value = value || "";
if (["normal", "nowrap"].indexOf(cs.whiteSpace) > -1) {
// Collapse lines
value = value.replace(/\r?\n/g, " ");
}
if (["normal", "nowrap", "pre-line"].indexOf(cs.whiteSpace) > -1) {
// Collapse whitespace
value = value.replace(/^[ \t]+|[ \t]+$/gm, "").replace(/[ \t]+/g, " ");
}
this.editor.value = value;
return true;
}
},
"time": {
attribute: "datetime",
default: true,
init: function init() {
if (!this.fromTemplate("dateType")) {
var dateFormat = Mavo.DOMExpression.search(this.element, null);
var datetime = this.element.getAttribute("datetime") || "YYYY-MM-DD";
for (var type in this.config.dateTypes) {
if (this.config.dateTypes[type].test(datetime)) {
break;
}
}
this.dateType = type;
if (!dateFormat) {
// TODO what about mv-expressions?
this.element.textContent = this.config.defaultFormats[this.dateType](this.property);
this.mavo.expressions.extract(this.element, null);
}
}
},
dateTypes: {
"month": /^[Y\d]{4}-[M\d]{2}$/i,
"time": /^[H\d]{2}:[M\d]{2}/i,
"datetime-local": /^[Y\d]{4}-[M\d]{2}-[D\d]{2} [H\d]{2}:[Mi\d]{2}/i,
"date": /^[Y\d]{4}-[M\d]{2}-[D\d]{2}$/i
},
defaultFormats: {
"date": function date(name) {
return "[day(" + name + ")] [month(" + name + ").shortname] [year(" + name + ")]";
},
"month": function month(name) {
return "[month(" + name + ").name] [year(" + name + ")]";
},
"time": function time(name) {
return "[hour(" + name + ").twodigit]:[minute(" + name + ").twodigit]";
},
"datetime-local": function datetimeLocal(name) {
return this.date(name) + " " + this.time(name);
}
},
editor: function editor() {
return { tag: "input", type: this.dateType };
}
},
"circle@r": {
default: true,
datatype: "number"
},
"circle": {
attribute: ["cx", "cy"],
datatype: "number"
},
"text": {
default: true,
popup: true
},
".mv-toggle": {
default: true,
attribute: "aria-checked",
datatype: "boolean",
edit: function edit() {
var _this47 = this;
Mavo.revocably.setAttribute(this.element, "role", "checkbox");
$.bind(this.element, "click.mavo:edit keyup.mavo:edit keydown.mavo:edit", function (evt) {
if (evt.type == "click" || evt.key == " " || evt.key == "Enter") {
if (evt.type != "keydown") {
_this47.value = !_this47.value;
}
evt.preventDefault();
evt.stopPropagation();
}
});
},
done: function done() {
Mavo.revocably.restoreAttribute(this.element, "role");
$.unbind(this.element, ".mavo:edit");
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
Mavo.attributes.push("mv-multiple", "mv-order", "mv-accepts");
var _ = Mavo.Collection = $.Class({
extends: Mavo.Node,
nodeType: "Collection",
constructor: function constructor(element, mavo, o) {
/*
* Create the template, remove it from the DOM and store it
*/
this.templateElement = this.element;
this.children = [];
// ALL descendant property names as an array
if (!this.fromTemplate("mutable", "templateElement", "accepts", "optional", "like", "likeNode")) {
this.mutable = this.templateElement.matches(Mavo.selectors.multiple);
this.accepts = (this.templateElement.getAttribute("mv-accepts") || "").split(/\s+/);
this.like = this.templateElement.getAttribute("mv-like");
if (this.like) {
this.likeNode = this.resolve(this.like, { exclude: this });
this.likeNode = this.likeNode || this.likeNode.template;
if (!this.likeNode) {
this.like = null;
}
}
this.optional = !!this.like || this.templateElement.hasAttribute("mv-optional");
// Must clone because otherwise once expressions are parsed on the template element
// we will not be able to pick them up from subsequent items
this.templateElement = this.templateElement.cloneNode(true);
}
if (this.likeNode) {
this.itemTemplate = this.likeNode;
var templateElement = $.value(this.likeNode.collection, "templateElement") || this.likeNode.element;
this.templateElement = templateElement.cloneNode(true);
this.templateElement.setAttribute("property", this.property);
this.properties = this.likeNode.properties;
} else if (!this.optional || !this.template) {
var item = this.createItem(this.element);
this.add(item, undefined, { silent: true });
if (this.optional) {
this.delete(item, true);
}
}
if (this.optional) {
this.element.remove();
}
this.postInit();
Mavo.hooks.run("collection-init-end", this);
},
get length() {
return this.children.length;
},
getData: function getData() {
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var env = {
context: this,
options: o,
data: this.liveData
};
if (env.options.live) {
this.proxyCache = {};
}
for (var i = 0, j = 0; item = this.children[i]; i++) {
if (!item.deleted || env.options.live) {
var itemData = item.getData(env.options);
if (env.options.live || Mavo.value(itemData) !== null) {
env.data[j] = itemData;
j++;
}
}
}
env.data.length = j;
if (!this.mutable) {
// If immutable, drop nulls
Mavo.filter(env.data, function (item) {
return Mavo.value(item) !== null;
});
if (env.options.live && env.data.length === 1) {
// If immutable with only 1 item, return the item
// See https://github.com/LeaVerou/mavo/issues/50#issuecomment-266079652
env.data = env.data[0];
} else if (this.data && !env.options.live) {
var rendered = Mavo.subset(this.data, this.inPath);
env.data = env.data.concat(rendered.slice(env.data.length));
}
}
if (!env.options.live) {
env.data = Mavo.subset(this.data, this.inPath, env.data);
}
Mavo.hooks.run("node-getdata-end", env);
return (env.options.live ? env.data[Mavo.toProxy] : env.data) || env.data;
},
// Create item but don't insert it anywhere
// Mostly used internally
createItem: function createItem(element) {
if (!element) {
element = this.templateElement.cloneNode(true);
}
var template = this.itemTemplate || (this.template ? this.template.itemTemplate : null);
var item = Mavo.Node.create(element, this.mavo, {
collection: this,
template: template,
property: this.property,
type: this.type
});
if (!this.itemTemplate) {
this.itemTemplate = template || item;
}
return item;
},
/**
* Add a new item to this collection
* @param item {Node|Mavo.Node} Optional. Element or Mavo object for the new item
* @param index {Number} Optional. Index of existing item, will be added opposite to list direction
* @param silent {Boolean} Optional. Throw a datachange event? Mainly used internally.
*/
add: function add(item, index) {
var _this48 = this;
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (item instanceof Node) {
item = Mavo.Node.get(item) || this.createItem(item);
} else {
item = item || this.createItem();
}
if (item.collection != this) {
this.adopt(item);
}
if (this.mutable) {
// Add it to the DOM, or fix its place
var rel = this.children[index] ? this.children[index].element : this.marker;
$[this.bottomUp ? "after" : "before"](item.element, rel);
if (index === undefined) {
index = this.bottomUp ? 0 : this.length;
}
} else {
index = this.length;
}
var env = { context: this, item: item };
env.previousIndex = item.index;
// Update internal data model
env.changed = this.splice({
remove: env.item
}, {
index: index,
add: env.item
});
if (env.item.itembar) {
env.item.itembar.reposition();
}
if (this.mavo.expressions.active && !o.silent) {
requestAnimationFrame(function () {
env.changed.forEach(function (i) {
i.dataChanged(i == env.item && env.previousIndex === undefined ? "add" : "move");
i.unsavedChanges = true;
});
_this48.unsavedChanges = _this48.mavo.unsavedChanges = true;
_this48.mavo.expressions.update(env.item);
});
}
Mavo.hooks.run("collection-add-end", env);
return env.item;
},
splice: function splice() {
var _this49 = this;
for (var _len5 = arguments.length, actions = Array(_len5), _key5 = 0; _key5 < _len5; _key5++) {
actions[_key5] = arguments[_key5];
}
actions.forEach(function (action) {
if (action.index === undefined && action.remove && isNaN(action.remove)) {
// Remove is an item
action.index = _this49.children.indexOf(action.remove);
action.remove = 1;
}
});
// Sort in reverse index order
actions.sort(function (a, b) {
return b.index - a.index;
});
// FIXME this could still result in buggy behavior.
// Think of e.g. adding items on i, then removing > 1 items on i-1.
// The new items would get removed instead of the old ones.
// Not a pressing issue though since we always remove 1 max when adding things too.
actions.forEach(function (action) {
if (action.index > -1 && (action.remove || action.add)) {
var _children;
action.remove = action.remove || 0;
action.add = Mavo.toArray(action.add);
(_children = _this49.children).splice.apply(_children, [action.index, +action.remove].concat(_toConsumableArray(action.add)));
}
});
var changed = [];
for (var i = 0; i < this.length; i++) {
var _item = this.children[i];
if (_item && _item.index !== i) {
_item.index = i;
changed.push(_item);
}
}
return changed;
},
adopt: function adopt(item) {
var _this50 = this;
if (item.collection) {
// It belongs to another collection, delete from there first
item.collection.splice({ remove: item });
item.collection.dataChanged("delete");
}
// Update collection & closestCollection properties
this.walk(function (obj) {
if (obj.closestCollection === item.collection) {
obj.closestCollection = _this50;
}
// Belongs to another Mavo?
if (item.mavo != _this50.mavo) {
item.mavo = _this50.mavo;
}
});
item.collection = this;
// Adjust templates and their copies
if (item.template) {
Mavo.delete(item.template.copies, item);
item.template = this.itemTemplate;
}
},
delete: function _delete(item, hard) {
var _this51 = this;
if (hard) {
// Hard delete
$.remove(item.element);
this.splice({ remove: item });
item.destroy();
return;
}
return $.transition(item.element, { opacity: 0 }).then(function () {
item.deleted = true; // schedule for deletion
item.element.style.opacity = "";
item.dataChanged("delete");
_this51.unsavedChanges = item.unsavedChanges = _this51.mavo.unsavedChanges = true;
});
},
/**
* Move existing item to a new position. Wraps around if position is out of bounds.
* @offset relative position
*/
move: function move(item, offset) {
var index = item.index + offset + (offset > 0);
index = Mavo.wrap(index, this.children.length + 1);
this.add(item, index);
if (item instanceof Mavo.Primitive && item.itembar) {
item.itembar.reposition();
}
},
editItem: function editItem(item) {
var _this52 = this;
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var when = o.immediately ? Promise.resolve() : Mavo.inView.when(item.element);
return when.then(function () {
if (_this52.mutable) {
if (!item.itembar) {
item.itembar = new Mavo.UI.Itembar(item);
}
item.itembar.add();
}
return item.edit(o);
});
},
edit: function edit() {
var _this53 = this;
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
if (this.super.edit.call(this) === false) {
return false;
}
if (this.mutable) {
// Insert the add button if it's not already in the DOM
if (!this.addButton.parentNode) {
var tag = this.element.tagName.toLowerCase();
var containerSelector = Mavo.selectors.container[tag];
var rel = containerSelector ? this.marker.parentNode.closest(containerSelector) : this.marker;
$[this.bottomUp ? "before" : "after"](this.addButton, rel);
}
// Set up drag & drop
_.dragula.then(function () {
_this53.getDragula();
});
}
// Edit items, maybe insert item bar
return Promise.all(this.children.map(function (item) {
return _this53.editItem(item, o);
}));
},
done: function done() {
if (this.super.done.call(this) === false) {
return false;
}
if (this.mutable) {
if (this.addButton.parentNode) {
this.addButton.remove();
}
this.propagate(function (item) {
if (item.itembar) {
item.itembar.remove();
}
});
}
},
dataChanged: function dataChanged(action) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
o.element = o.element || this.marker;
return this.super.dataChanged.call(this, action, o);
},
save: function save() {
var _this54 = this;
this.children.forEach(function (item) {
if (item.deleted) {
_this54.delete(item, true);
} else {
item.unsavedChanges = false;
}
});
},
propagated: ["save"],
dataRender: function dataRender(data) {
var _this55 = this;
if (data === undefined) {
return;
}
data = data === null ? [] : Mavo.toArray(data).filter(function (i) {
return i !== null;
});
if (!this.mutable) {
this.children.forEach(function (item, i) {
return item.render(data && data[i]);
});
} else {
// First render on existing items
for (var i = 0; i < this.children.length; i++) {
var item = this.children[i];
if (i < data.length) {
item.render(data[i]);
} else {
item.dataChanged("delete");
this.delete(item, true);
i--;
}
}
if (data.length > i) {
// There are still remaining items
// Using document fragments improves performance by 60%
var fragment = document.createDocumentFragment();
for (var j = i; j < data.length; j++) {
var item = this.createItem();
item.render(data[j]);
this.children.push(item);
item.index = j;
fragment.appendChild(item.element);
var env = { context: this, item: item };
Mavo.hooks.run("collection-add-end", env);
}
if (this.bottomUp) {
$.after(fragment, i > 0 ? this.children[i - 1].element : this.marker);
} else {
$.before(fragment, this.marker);
}
for (var j = i; j < this.children.length; j++) {
this.children[j].dataChanged("add");
if (this.mavo.expressions.active) {
requestAnimationFrame(function () {
return _this55.mavo.expressions.update(_this55.children[j]);
});
}
}
}
}
this.createLiveData(data || []);
},
find: function find(property) {
var o = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
if (o.exclude === this) {
return;
}
var items = this.children.filter(function (item) {
return !item.deleted && !item.hidden;
});
if (this.property == property) {
return o.collections ? this : items;
}
if (this.properties.has(property)) {
var ret = items.map(function (item) {
return item.find(property, o);
});
return Mavo.flatten(ret);
}
},
isCompatible: function isCompatible(c) {
return c && this.itemTemplate.nodeType == c.itemTemplate.nodeType && (c === this || c.template == this || this.template == c || this.template && this.template == c.template || this.accepts.indexOf(c.property) > -1);
},
live: {
mutable: function mutable(value) {
if (value && value !== this.mutable) {
// Why is all this code here? Because we want it executed
// every time mutable changes, not just in the constructor
// (think multiple elements with the same property name, where only one has mv-multiple)
this._mutable = value;
// Keep position of the template in the DOM, since we might remove it
this.marker = document.createComment("mv-marker");
Mavo.data(this.marker, "collection", this);
var ref = this.templateElement.parentNode ? this.templateElement : this.children[this.length - 1].element;
$.after(this.marker, ref);
}
}
},
// Make sure to only call after dragula has loaded
getDragula: function getDragula() {
var _this56 = this;
if (this.dragula) {
return this.dragula;
}
if (this.template) {
Mavo.pushUnique(this.template.getDragula().containers, this.marker.parentNode);
return this.dragula = this.template.dragula || this.template.getDragula();
}
var me = this;
this.dragula = dragula({
containers: [this.marker.parentNode],
isContainer: function isContainer(el) {
if (_this56.accepts.length) {
return Mavo.flatten(_this56.accepts.map(function (property) {
return _this56.mavo.root.find(property, { collections: true });
})).filter(function (c) {
return c && c instanceof _;
}).map(function (c) {
return c.marker.parentNode;
}).indexOf(el) > -1;
}
return false;
},
moves: function moves(el, container, handle) {
return handle.classList.contains("mv-drag-handle") && handle.closest(Mavo.selectors.multiple) == el;
},
accepts: function accepts(el, target, source, next) {
if (el.contains(target)) {
return false;
}
var previous = next ? next.previousElementSibling : target.lastElementChild;
var collection = _.get(previous) || _.get(next);
if (!collection) {
return false;
}
var item = Mavo.Node.get(el);
return item && item.collection.isCompatible(collection);
}
});
this.dragula.on("drop", function (el, target, source) {
var item = Mavo.Node.get(el);
var oldIndex = item && item.index;
var next = el.nextElementSibling;
var previous = el.previousElementSibling;
var collection = _.get(previous) || _.get(next);
var closestItem = Mavo.Node.get(previous) || Mavo.Node.get(next);
if (closestItem && closestItem.collection != collection) {
closestItem = null;
}
if (item.collection.isCompatible(collection)) {
var index = closestItem ? closestItem.index + (closestItem.element === previous) : collection.length;
collection.add(item, index);
} else {
return _this56.dragula.cancel(true);
}
});
_.dragulas.push(this.dragula);
return this.dragula;
},
lazy: {
bottomUp: function bottomUp() {
/*
* Add new items at the top or bottom?
*/
if (!this.mutable) {
return false;
}
var order = this.templateElement.getAttribute("mv-order");
if (order !== null) {
// Attribute has the highest priority and overrides any heuristics
return (/^desc\b/i.test(order)
);
}
if (!this.addButton.parentNode) {
// If add button not in DOM, do the default
return false;
}
// If add button is already in the DOM and *before* our template, then we default to prepending
return !!(this.addButton.compareDocumentPosition(this.marker) & Node.DOCUMENT_POSITION_FOLLOWING);
},
closestCollection: function closestCollection() {
var parent = this.marker ? this.marker.parentNode : this.templateElement.parentNode;
return parent.closest(Mavo.selectors.multiple);
},
addButton: function addButton() {
var _this57 = this;
// Find add button if provided, or generate one
var selector = "button.mv-add-" + this.property;
var group = this.closestCollection || this.marker.parentNode.closest(Mavo.selectors.group);
if (group) {
var button = $$(selector, group).filter(function (button) {
return !_this57.templateElement.contains(button) // is outside the template element
&& !Mavo.data(button, "collection"); // and does not belong to another collection
})[0];
}
if (!button) {
button = $.create("button", {
className: "mv-add",
textContent: this.mavo._("add-item", this)
});
};
button.classList.add("mv-ui", "mv-add");
Mavo.data(button, "collection", this);
if (this.property) {
button.classList.add("mv-add-" + this.property);
}
button.addEventListener("click", function (evt) {
evt.preventDefault();
_this57.editItem(_this57.add());
});
return button;
},
liveData: function liveData() {
return this.createLiveData([]);
}
},
static: {
dragulas: [],
get: function get(element) {
// Is it an add button or a marker?
var collection = Mavo.data(element, "collection");
if (collection) {
return collection;
}
// Maybe it's a collection item?
var item = Mavo.Node.get(element);
return item && item.collection || null;
},
lazy: {
dragula: function dragula() {
return $.include(self.dragula, "https://cdnjs.cloudflare.com/ajax/libs/dragula/3.7.2/dragula.min.js");
}
}
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.UI.Itembar = $.Class({
constructor: function constructor(item) {
var _this58 = this;
this.item = item;
this.element = $$(".mv-item-bar:not([mv-rel]), .mv-item-bar[mv-rel=\"" + this.item.property + "\"]", this.item.element).filter(function (el) {
// Remove item controls meant for other collections
return el.closest(Mavo.selectors.multiple) == _this58.item.element && !Mavo.data(el, "item");
})[0];
if (!this.element && this.item.template && this.item.template.itembar) {
// We can clone the buttons from the template
this.element = this.item.template.itembar.element.cloneNode(true);
this.dragHandle = $(".mv-drag-handle", this.element) || this.item.element;
} else {
// First item of this type
this.element = this.element || $.create({
className: "mv-item-bar mv-ui"
});
var buttons = [{
tag: "button",
title: this.mavo._("delete-item", this.item),
className: "mv-delete"
}, {
tag: "button",
title: this.mavo._("add-item-" + (this.collection.bottomUp ? "after" : "before"), this.item),
className: "mv-add"
}];
if (this.item instanceof Mavo.Group) {
this.dragHandle = $.create({
tag: "button",
title: this.mavo._("drag-to-reorder", this.item),
className: "mv-drag-handle"
});
buttons.push(this.dragHandle);
} else {
this.dragHandle = this.item.element;
}
$.set(this.element, {
"mv-rel": this.item.property,
contents: buttons
});
}
this.element.setAttribute("hidden", "");
$.bind([this.item.element, this.element], "focusin mouseover", this);
$.bind(this.element, {
mouseenter: function mouseenter(evt) {
_this58.item.element.classList.add("mv-highlight");
},
mouseleave: function mouseleave(evt) {
_this58.item.element.classList.remove("mv-highlight");
}
});
this.dragHandle.addEventListener("keydown", function (evt) {
if (evt.target === _this58.dragHandle && _this58.item.editing && evt.keyCode >= 37 && evt.keyCode <= 40) {
// Arrow keys
_this58.collection.move(_this58.item, evt.keyCode <= 38 ? -1 : 1);
evt.stopPropagation();
evt.preventDefault();
evt.target.focus();
}
});
var selectors = {
add: this.buttonSelector("add"),
delete: this.buttonSelector("delete"),
drag: this.buttonSelector("drag")
};
this.element.addEventListener("click", function (evt) {
if (_this58.item.collection.editing) {
if (evt.target.matches(selectors.add)) {
var newItem = _this58.collection.add(null, _this58.item.index);
if (evt[Mavo.superKey]) {
newItem.render(_this58.item.getData());
}
Mavo.scrollIntoViewIfNeeded(newItem.element);
return _this58.collection.editItem(newItem);
} else if (evt.target.matches(selectors.delete)) {
_this58.item.collection.delete(item);
} else if (evt.target.matches(selectors["drag-handle"])) {
(function (evt) {
return evt.target.focus();
});
}
}
});
Mavo.data(this.element, "item", this.item);
},
destroy: function destroy() {
this.hide();
},
show: function show(sticky) {
var _this59 = this;
_.visible.forEach(function (instance) {
if (instance != _this59 && (!_this59.sticky || instance.sticky)) {
clearTimeout(instance.hideTimeout);
instance.hide(sticky, _.DELAY);
}
});
_.visible.add(this);
if (this.element.hasAttribute("hidden") || sticky && !this.sticky) {
this.element.removeAttribute("hidden");
this.sticky = this.sticky || sticky;
$.bind([this.item.element, this.element], "focusout mouseleave", this);
if (this.adjacent) {
// Position
$.style(this.element, {
"--mv-item-width": this.item.element.offsetWidth + "px",
"--mv-item-height": this.item.element.offsetHeight + "px",
"--mv-item-left": this.item.element.offsetLeft + "px",
"--mv-item-top": this.item.element.offsetTop + "px"
});
}
}
},
hide: function hide(sticky) {
var _this60 = this;
var timeout = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
if (!this.sticky || sticky) {
if (timeout) {
this.hideTimeout = setTimeout(function () {
return _this60.hide(sticky);
}, timeout);
} else {
this.element.setAttribute("hidden", "");
$.unbind([this.item.element, this.element], "focusout mouseleave", this);
this.sticky = false;
_.visible.delete(this);
}
}
},
handleEvent: function handleEvent(evt) {
var sticky = evt.type.indexOf("mouse") === -1;
if (this.isWithinItem(evt.target)) {
clearTimeout(this.hideTimeout);
if (["mouseleave", "focusout", "blur"].indexOf(evt.type) > -1) {
if (!this.isWithinItem(evt.relatedTarget)) {
this.hide(sticky, _.DELAY);
}
} else {
this.show(sticky);
evt.stopPropagation();
}
}
},
isWithinItem: function isWithinItem(element) {
if (!element) {
return false;
}
var itemBar = element.closest(".mv-item-bar");
return itemBar ? itemBar === this.element : element.closest(Mavo.selectors.item) === this.item.element;
},
add: function add() {
if (!this.element.parentNode) {
if (!Mavo.revocably.add(this.element)) {
if (this.item instanceof Mavo.Primitive && !this.item.attribute) {
this.adjacent = true;
$.after(this.element, this.item.element);
} else {
this.item.element.appendChild(this.element);
}
}
}
if (this.dragHandle == this.item.element) {
this.item.element.classList.add("mv-drag-handle");
}
},
remove: function remove() {
Mavo.revocably.remove(this.element);
if (this.dragHandle == this.item.element) {
this.item.element.classList.remove("mv-drag-handle");
}
},
reposition: function reposition() {
if (this.item instanceof Mavo.Primitive) {
// This is only needed for lists of primitives, because the item element
// does not contain the minibar. In lists of groups, this can be harmful
// because it will remove custom positioning
this.element.remove();
this.add();
}
},
buttonSelector: function buttonSelector(type) {
return ".mv-" + type + "[mv-rel=\"" + this.item.property + "\"], [mv-rel=\"" + this.item.property + "\"] > .mv-" + type;
},
live: {
sticky: function sticky(v) {
this.element.classList.toggle("mv-sticky", v);
},
adjacent: function adjacent(v) {
this.element.classList.toggle("mv-adjacent", v);
}
},
proxy: {
collection: "item",
mavo: "item"
},
static: {
DELAY: 100,
visible: new Set()
}
});
})(Bliss, Bliss.$);
(function ($, $$) {
Mavo.attributes.push("mv-expressions");
var _ = Mavo.Expression = $.Class({
constructor: function constructor(expression) {
this.expression = expression;
},
eval: function _eval(data) {
Mavo.hooks.run("expression-eval-beforeeval", this);
try {
if (!this.function) {
this.function = Mavo.Script.compile(this.expression);
}
return this.function(data);
} catch (exception) {
console.info("%cExpression error!", "color: red; font-weight: bold", exception.message + " in expression " + this.expression, "\nNot an expression? Use mv-expressions=\"none\" to disable expressions on an element and its descendants.");
Mavo.hooks.run("expression-eval-error", { context: this, exception: exception });
return exception;
}
},
toString: function toString() {
return this.expression;
},
changedBy: function changedBy(evt) {
return _.changedBy(this.identifiers, evt);
},
live: {
expression: function expression(value) {
this.function = null;
this.identifiers = value.match(/[$a-z][$\w]*/ig) || [];
}
},
static: {
changedBy: function changedBy(identifiers, evt) {
if (!evt) {
return true;
}
if (!identifiers) {
return false;
}
if (identifiers.indexOf(evt.property) > -1) {
return true;
}
if (Mavo.Functions.intersects(evt.properties, identifiers)) {
return true;
}
if (evt.action == "propertychange") {
return Mavo.Functions.intersects(identifiers, evt.node.path);
} else {
if (Mavo.Functions.intersects(["$index", "$previous", "$next"], identifiers)) {
return true;
}
var collection = evt.node.collection || evt.node;
if (Mavo.Functions.intersects(collection.properties, identifiers)) {
return true;
}
}
return false;
}
}
});
_.Syntax = $.Class({
constructor: function constructor(start, end) {
this.start = start;
this.end = end;
this.regex = RegExp(Mavo.escapeRegExp(start) + "([\\S\\s]+?)" + Mavo.escapeRegExp(end), "gi");
},
test: function test(str) {
this.regex.lastIndex = 0;
return this.regex.test(str);
},
tokenize: function tokenize(str) {
var match,
ret = [],
lastIndex = 0;
this.regex.lastIndex = 0;
while ((match = this.regex.exec(str)) !== null) {
// Literal before the expression
if (match.index > lastIndex) {
ret.push(str.substring(lastIndex, match.index));
}
lastIndex = this.regex.lastIndex;
ret.push(new Mavo.Expression(match[1]));
}
// Literal at the end
if (lastIndex < str.length) {
ret.push(str.substring(lastIndex));
}
return ret;
},
static: {
create: function create(element) {
if (element) {
var syntax = element.getAttribute("mv-expressions");
if (syntax) {
syntax = syntax.trim();
return (/\s/.test(syntax) ? new (Function.prototype.bind.apply(_.Syntax, [null].concat(_toConsumableArray(syntax.split(/\s+/)))))() : _.Syntax.ESCAPE
);
}
}
},
ESCAPE: -1
}
});
_.Syntax.default = new _.Syntax("[", "]");
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.DOMExpression = $.Class({
constructor: function constructor() {
var _this61 = this;
var o = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
this.mavo = o.mavo;
this.template = o.template && o.template.template || o.template;
var _arr2 = ["item", "path", "syntax", "fallback", "attribute", "originalAttribute", "expression", "parsed"];
for (var _i3 = 0; _i3 < _arr2.length; _i3++) {
var prop = _arr2[_i3];
this[prop] = o[prop] === undefined && this.template ? this.template[prop] : o[prop];
}
this.node = o.node;
if (!this.node) {
// No node provided, figure it out from path
this.node = Mavo.elementPath(this.item.element, this.path);
}
this.element = this.node;
this.attribute = this.attribute || null;
Mavo.hooks.run("domexpression-init-start", this);
if (this.attribute == "mv-value") {
this.originalAttribute = "mv-value";
this.attribute = Mavo.Primitive.getValueAttribute(this.element);
this.fallback = this.fallback || Mavo.Primitive.getValue(this.element, { attribute: this.attribute });
var expression = this.element.getAttribute("mv-value");
this.element.removeAttribute("mv-value");
this.parsed = [new Mavo.Expression(expression)];
this.expression = this.syntax.start + expression + this.syntax.end;
}
if (this.node.nodeType === 3 && this.element === this.node) {
this.element = this.node.parentNode;
// If no element siblings make this.node the element, which is more robust
// Same if attribute, there are no attributes on a text node!
if (!this.node.parentNode.children.length || this.attribute) {
this.node = this.element;
this.element.normalize();
}
}
if (!this.expression) {
// Still unhandled?
if (this.attribute) {
// Some web components (e.g. AFrame) hijack getAttribute()
var value = Element.prototype.getAttribute.call(this.node, this.attribute);
this.expression = (value || "").trim();
} else {
// Move whitespace outside to prevent it from messing with types
this.node.normalize();
if (this.node.firstChild && this.node.childNodes.length === 1 && this.node.firstChild.nodeType === 3) {
var whitespace = this.node.firstChild.textContent.match(/^\s*|\s*$/g);
if (whitespace[1]) {
this.node.firstChild.splitText(this.node.firstChild.textContent.length - whitespace[1].length);
$.after(this.node.lastChild, this.node);
}
if (whitespace[0]) {
this.node.firstChild.splitText(whitespace[0].length);
this.node.parentNode.insertBefore(this.node.firstChild, this.node);
}
}
this.expression = this.node.textContent;
}
this.parsed = o.template ? o.template.parsed : this.syntax.tokenize(this.expression);
}
this.oldValue = this.value = this.parsed.map(function (x) {
return x instanceof Mavo.Expression ? x.expression : x;
});
this.item = Mavo.Node.get(this.element.closest(Mavo.selectors.item));
this.mavo.treeBuilt.then(function () {
if (!_this61.template && !_this61.item) {
// Only collection items and groups can have their own expressions arrays
_this61.item = Mavo.Node.getClosest(_this61.element);
if (_this61.item.nodeType === "Primitive" && !_this61.item.collection) {
_this61.item = _this61.item.parent;
}
}
if (_this61.originalAttribute == "mv-value" && _this61.mavoNode && _this61.mavoNode == _this61.item.collection) {
Mavo.delete(_this61.item.expressions, _this61);
}
Mavo.hooks.run("domexpression-init-treebuilt", _this61);
});
Mavo.hooks.run("domexpression-init-end", this);
_.elements.set(this.element, [].concat(_toConsumableArray(_.elements.get(this.element) || []), [this]));
},
destroy: function destroy() {
_.special.delete(this);
},
changedBy: function changedBy(evt) {
if (this.originalAttribute == "mv-value" && this.mavoNode && !(this.mavoNode instanceof Mavo.Primitive)) {
// Just prevent the same node from triggering changes, everything else is game
return !evt || !this.mavoNode.contains(evt.node);
}
if (!this.identifiers) {
this.identifiers = Mavo.flatten(this.parsed.map(function (x) {
return x.identifiers || [];
}));
// Any identifiers that need additional updating?
_.special.add(this);
}
return Mavo.Expression.changedBy(this.identifiers, evt);
},
update: function update() {
var _this62 = this;
var data = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.data;
var event = arguments[1];
var env = { context: this, event: event };
var parentEnv = env;
this.data = data;
Mavo.hooks.run("domexpression-update-start", env);
this.oldValue = this.value;
var changed = false;
env.value = this.value = this.parsed.map(function (expr, i) {
if (expr instanceof Mavo.Expression) {
if (expr.changedBy(parentEnv.event)) {
var env = { context: _this62, expr: expr, parentEnv: parentEnv };
Mavo.hooks.run("domexpression-update-beforeeval", env);
env.value = Mavo.value(env.expr.eval(data));
Mavo.hooks.run("domexpression-update-aftereval", env);
changed = true;
if (env.value instanceof Error) {
return _this62.fallback !== undefined ? _this62.fallback : _this62.syntax.start + env.expr.expression + _this62.syntax.end;
}
if (env.value === undefined || env.value === null) {
// Don’t print things like "undefined" or "null"
return "";
}
return env.value;
} else {
return _this62.oldValue[i];
}
}
return expr;
});
if (!changed) {
// If nothing changed, no need to do anything
return;
}
if (env.value.length === 1) {
env.value = env.value[0];
} else {
env.value = env.value.map(function (v) {
return Mavo.Primitive.format(v, {
attribute: _this62.attribute,
element: _this62.element
});
}).join("");
}
this.output(env.value);
Mavo.hooks.run("domexpression-update-end", env);
},
output: function output(value) {
if (this.primitive) {
this.primitive.value = value;
} else if (this.mavoNode) {
this.mavoNode.render(value);
} else {
Mavo.Primitive.setValue(this.node, value, { attribute: this.attribute });
}
},
live: {
item: function (_item2) {
function item(_x52) {
return _item2.apply(this, arguments);
}
item.toString = function () {
return _item2.toString();
};
return item;
}(function (item) {
if (item && this._item != item) {
if (this._item) {
// Previous item, delete from its expressions
Mavo.delete(this._item.expressions, this);
}
item.expressions = item.expressions || [];
item.expressions.push(this);
}
})
},
static: {
elements: new WeakMap(),
/**
* Search for Mavo.DOMExpression object(s) associated with a given element
* and optionally an attribute.
*
* @return If one argument, array of matching DOMExpression objects.
* If two arguments, the matching DOMExpression object or null
*/
search: function search(element, attribute) {
if (element === null) {
return element;
}
var all = _.elements.get(element) || [];
if (arguments.length > 1) {
if (!all.length) {
return null;
}
return all.filter(function (et) {
return et.attribute === attribute;
})[0] || null;
}
return all;
},
special: {
add: function add(domexpression, name) {
if (name) {
var o = this.vars[name];
if (o && domexpression.identifiers.indexOf(name) > -1) {
o.all = o.all || new Set();
o.all.add(domexpression);
if (o.all.size === 1) {
o.observe();
} else if (!o.all.size) {
o.unobserve();
}
}
} else {
// All names
for (var name in this.vars) {
this.add(domexpression, name);
}
}
},
delete: function _delete(domexpression, name) {
if (name) {
var o = this.vars[name];
o.all = o.all || new Set();
o.all.delete(domexpression);
if (!o.all.size) {
o.unobserve();
}
} else {
// All names
for (var name in this.vars) {
this.delete(domexpression, name);
}
}
},
update: function update() {
if (this.update) {
this.update.apply(this, arguments);
}
this.all.forEach(function (domexpression) {
return domexpression.update();
});
},
event: function event(name) {
var _ref5 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
type = _ref5.type,
update = _ref5.update,
_ref5$target = _ref5.target,
target = _ref5$target === undefined ? document : _ref5$target;
this.vars[name] = {
observe: function observe() {
this.callback = this.callback || _.special.update.bind(this);
target.addEventListener(type, this.callback);
},
unobserve: function unobserve() {
target.removeEventListener(type, this.callback);
}
};
if (update) {
this.vars[name].update = function (evt) {
Mavo.Functions[name] = update(evt);
};
}
},
vars: {
"$now": {
observe: function observe() {
var _this63 = this;
var callback = function callback() {
_.special.update.call(_this63);
_this63.timer = requestAnimationFrame(callback);
};
this.timer = requestAnimationFrame(callback);
},
unobserve: function unobserve() {
cancelAnimationFrame(this.timer);
}
}
}
}
}
});
_.special.event("$mouse", {
type: "mousemove",
update: function update(evt) {
return { x: evt.clientX, y: evt.clientY };
}
});
_.special.event("$hash", {
type: "hashchange",
target: window
});
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Expressions = $.Class({
constructor: function constructor(mavo) {
var _this64 = this;
this.mavo = mavo;
this.active = true;
this.expressions = [];
var syntax = Mavo.Expression.Syntax.create(this.mavo.element.closest("[mv-expressions]")) || Mavo.Expression.Syntax.default;
this.traverse(this.mavo.element, undefined, syntax);
this.scheduled = {};
this.mavo.treeBuilt.then(function () {
_this64.expressions = [];
// Watch changes and update value
document.documentElement.addEventListener("mv-change", function (evt) {
if (!_this64.active) {
return;
}
var scheduled = _this64.scheduled[evt.action] = _this64.scheduled[evt.action] || new Set();
if (evt.node.template) {
// Throttle events in collections and events from other Mavos
if (!scheduled.has(evt.node.template)) {
setTimeout(function () {
scheduled.delete(evt.node.template);
_this64.update(evt);
}, _.THROTTLE);
scheduled.add(evt.node.template);
}
} else {
requestAnimationFrame(function () {
return _this64.update(evt);
});
}
});
_this64.update();
});
},
update: function update(evt) {
if (!this.active) {
return;
}
var root, rootObject;
if (evt instanceof Mavo.Node) {
rootObject = evt;
evt = null;
} else if (evt instanceof Element) {
root = evt.closest(Mavo.selectors.item);
rootObject = Mavo.Node.get(root);
evt = null;
} else {
rootObject = this.mavo.root;
}
var allData = rootObject.getData({ live: true });
rootObject.walk(function (obj, path) {
if (obj.expressions && obj.expressions.length && !obj.isDeleted()) {
var data = $.value.apply($, [allData].concat(_toConsumableArray(path)));
obj.expressions.forEach(function (et) {
if (et.changedBy(evt)) {
et.update(data, evt);
}
});
}
});
},
extract: function extract(node, attribute, path) {
var syntax = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : Mavo.Expression.Syntax.default;
if (attribute && attribute.name == "mv-expressions") {
return;
}
if (attribute && _.directives.indexOf(attribute.name) > -1 || syntax !== Mavo.Expression.Syntax.ESCAPE && syntax.test(attribute ? attribute.value : node.textContent)) {
if (path === undefined) {
path = Mavo.elementPath(node.closest(Mavo.selectors.item), node);
}
this.expressions.push(new Mavo.DOMExpression({
node: node, syntax: syntax, path: path,
attribute: attribute && attribute.name,
mavo: this.mavo
}));
}
},
// Traverse an element, including attribute nodes, text nodes and all descendants
traverse: function traverse(node) {
var _this65 = this;
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var syntax = arguments[2];
if (node.nodeType === 8) {
// We don't want expressions to be picked up from comments!
// Commenting stuff out is a common debugging technique
return;
}
if (node.nodeType === 3) {
// Text node
// Leaf node, extract references from content
this.extract(node, null, path, syntax);
} else {
node.normalize();
syntax = Mavo.Expression.Syntax.create(node) || syntax;
if (Mavo.is("item", node)) {
path = [];
}
$$(node.attributes).forEach(function (attribute) {
return _this65.extract(node, attribute, path, syntax);
});
var index = -1,
offset = 0;
if (!node.matches("script:not([mv-expressions])")) {
$$(node.childNodes).forEach(function (child) {
if (child.nodeType == 1) {
offset = 0;
index++;
} else {
offset++;
}
if (child.nodeType == 1 || child.nodeType == 3) {
var segment = offset > 0 ? index + "." + offset : index;
_this65.traverse(child, [].concat(_toConsumableArray(path || []), [segment]), syntax);
}
});
}
}
},
static: {
directives: ["mv-value"],
THROTTLE: 50,
directive: function directive(name, o) {
_.directives.push(name);
Mavo.attributes.push(name);
Mavo.Plugins.register(name, o);
}
}
});
})(Bliss, Bliss.$);
// mv-if plugin
(function ($, $$) {
Mavo.Expressions.directive("mv-if", {
extend: {
"Primitive": {
live: {
"hidden": function hidden(value) {
if (this._hidden !== value) {
this._hidden = value;
this.dataChanged();
}
}
}
},
"DOMExpression": {
lazy: {
"childProperties": function childProperties() {
var _this66 = this;
var properties = $$(Mavo.selectors.property, this.element).filter(function (el) {
return el.closest("[mv-if]") == _this66.element;
}).map(function (el) {
return Mavo.Node.get(el);
});
// When the element is detached, datachange events from properties
// do not propagate up to the group so expressions do not recalculate.
// We must do this manually.
this.element.addEventListener("mv-change", function (evt) {
// Cannot redispatch synchronously [why??]
requestAnimationFrame(function () {
if (!_this66.element.parentNode) {
// out of the DOM?
_this66.item.element.dispatchEvent(evt);
}
});
});
return properties;
}
}
}
},
hooks: {
"domexpression-init-start": function domexpressionInitStart() {
if (this.attribute != "mv-if") {
return;
}
this.expression = this.element.getAttribute("mv-if");
this.parsed = [new Mavo.Expression(this.expression)];
this.expression = this.syntax.start + this.expression + this.syntax.end;
this.parentIf = this.element.parentNode && Mavo.DOMExpression.search(this.element.parentNode.closest("[mv-if]"), "mv-if");
if (this.parentIf) {
this.parentIf.childIfs = (this.parentIf.childIfs || new Set()).add(this);
}
},
"domexpression-update-end": function domexpressionUpdateEnd() {
var _this67 = this;
if (this.attribute != "mv-if") {
return;
}
var value = this.value[0];
var oldValue = this.oldValue[0];
// Only apply this after the tree is built, otherwise any properties inside the if will go missing!
this.item.mavo.treeBuilt.then(function () {
if (_this67.parentIf) {
var parentValue = _this67.parentIf.value[0];
_this67.value[0] = value = value && parentValue;
}
if (parentValue !== false) {
// If parent if was false, it wouldn't matter whether this is in the DOM or not
if (value) {
// Is removed from the DOM and needs to get back
Mavo.revocably.add(_this67.element);
} else if (_this67.element.parentNode) {
// Is in the DOM and needs to be removed
Mavo.revocably.remove(_this67.element, "mv-if");
}
}
if (value !== oldValue) {
// Mark any properties inside as hidden or not
if (_this67.childProperties) {
_this67.childProperties.forEach(function (property) {
return property.hidden = !value;
});
}
if (_this67.childIfs) {
_this67.childIfs.forEach(function (childIf) {
return childIf.update();
});
}
}
});
},
"unit-isdatanull": function unitIsdatanull(env) {
env.result = env.result || this.hidden && env.options.live;
}
}
});
})(Bliss, Bliss.$);
/**
* Functions available inside Mavo expressions
*/
(function ($, val) {
var _ = Mavo.Functions = {
operators: {
"=": "eq"
},
/**
* Get a property of an object. Used by the . operator to prevent TypeErrors
*/
get: function get(obj, property) {
var meta = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
property = meta.property = val(property);
var canonicalProperty = Mavo.getCanonicalProperty(obj, property);
if (canonicalProperty !== undefined) {
meta.property = canonicalProperty;
var ret = obj[canonicalProperty];
if (typeof ret === "function" && ret.name.indexOf("bound") !== 0) {
return ret.bind(obj);
}
return ret;
}
if (Array.isArray(obj) && property && isNaN(property)) {
// Array and non-numerical property
var eqIndex = property.indexOf("=");
if (eqIndex > -1) {
// Property query
meta.query = {
property: property.slice(0, eqIndex),
value: property.slice(eqIndex + 1)
};
meta.property = [];
ret = obj.filter(function (e, i) {
var passes = _.get(e, meta.query.property) == meta.query.value;
if (passes) {
meta.property.push(i);
}
return passes;
});
if (meta.query.property == "id") {
meta.property = meta.property[0];
ret = ret[0];
}
if (ret === undefined) {
meta.property = obj.length;
} else if (ret.length === 0) {
meta.property = [obj.length];
}
return ret;
} else {
// Not a property query, get from objects inside
// TODO meta.property = ??
return obj.map(function (e) {
return _.get(e, property);
});
}
}
// Not found :(
return null;
},
url: function url(id) {
var _url = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : location;
if (id === undefined) {
return location.href;
}
if (id) {
id = str(id).replace(/[^\w-:]/g);
var ret = _url.search.match(RegExp("[?&]" + id + "(?:=(.+?))?(?=$|&)")) || _url.pathname.match(RegExp("(?:^|\\/)" + id + "\\/([^\\/]*)"));
}
return ret === null || !id ? null : decodeURIComponent(ret[1]) || "";
},
// TODO return first/last non-null?
first: function first(arr) {
return arr && arr[0] || "";
},
last: function last(arr) {
return arr && arr[arr.length - 1] || "";
},
unique: function unique(arr) {
if (!Array.isArray(arr)) {
return arr;
}
return [].concat(_toConsumableArray(new Set(arr.map(val))));
},
/**
* Do two arrays or sets have a non-empty intersection?
* @return {Boolean}
*/
intersects: function intersects(arr1, arr2) {
if (arr1 && arr2) {
var set2 = new Set(arr2.map ? arr2.map(val) : arr2);
arr1 = arr1.map ? arr1.map(val) : [].concat(_toConsumableArray(arr1));
return !arr1.every(function (el) {
return !set2.has(el);
});
}
},
/*********************
* Number functions
*********************/
/**
* Aggregate sum
*/
sum: function sum(array) {
return $u.numbers(array, arguments).reduce(function (prev, current) {
return +prev + (+current || 0);
}, 0);
},
/**
* Average of an array of numbers
*/
average: function average(array) {
array = $u.numbers(array, arguments);
return array.length && _.sum(array) / array.length;
},
/**
* Min of an array of numbers
*/
min: function min(array) {
return Math.min.apply(Math, _toConsumableArray($u.numbers(array, arguments)));
},
/**
* Max of an array of numbers
*/
max: function max(array) {
return Math.max.apply(Math, _toConsumableArray($u.numbers(array, arguments)));
},
count: function count(array) {
return Mavo.toArray(array).filter(function (a) {
return !empty(a);
}).length;
},
reverse: function reverse(array) {
return Mavo.toArray(array).slice().reverse();
},
round: function round(num, decimals) {
if (not(num) || not(decimals) || !isFinite(num)) {
return Math.round(num);
}
return +num.toLocaleString("en-US", {
useGrouping: false,
maximumFractionDigits: decimals
});
},
ordinal: function ordinal(num) {
if (empty(num)) {
return "";
}
if (ord < 10 || ord > 20) {
var ord = ["th", "st", "nd", "th"][num % 10];
}
return ord || "th";
},
iff: function iff(condition) {
var iftrue = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : condition;
var iffalse = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : "";
if (Array.isArray(condition)) {
return condition.map(function (c, i) {
var ret = val(c) ? iftrue : iffalse;
return Array.isArray(ret) ? ret[Math.min(i, ret.length - 1)] : ret;
});
}
return val(condition) ? iftrue : iffalse;
},
/*********************
* String functions
*********************/
/**
* Replace all occurences of a string with another string
*/
replace: function replace(haystack, needle) {
var replacement = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : "";
var iterations = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 1;
if (Array.isArray(haystack)) {
return haystack.map(function (item) {
return _.replace(item, needle, replacement);
});
}
// Simple string replacement
var needleRegex = RegExp(Mavo.escapeRegExp(needle), "g");
var ret = haystack,
prev;
var counter = 0;
while (ret != prev && counter++ < iterations) {
prev = ret;
ret = ret.replace(needleRegex, replacement);
}
return ret;
},
len: function len(text) {
return str(text).length;
},
/**
* Case insensitive search
*/
search: function search(haystack, needle) {
return haystack && needle ? str(haystack).toLowerCase().indexOf((needle + "").toLowerCase()) : -1;
},
starts: function starts(haystack, needle) {
return _.search(str(haystack), str(needle)) === 0;
},
ends: function ends(haystack, needle) {
var _ref6 = [str(haystack), str(needle)];
haystack = _ref6[0];
needle = _ref6[1];
var i = _.search(haystack, needle);
return i > -1 && i === haystack.length - needle.length;
},
join: function join(array, glue) {
return Mavo.toArray(array).join(str(glue));
},
idify: function idify(readable) {
return str(readable).normalize("NFD").replace(/[\u0300-\u036f]/g, "") // Convert accented letters to ASCII
.replace(/[^\w\s-]/g, "") // Remove remaining non-ASCII characters
.trim().replace(/\s+/g, "-") // Convert whitespace to hyphens
.toLowerCase();
},
// Convert an identifier to readable text that can be used as a label
readable: function readable(identifier) {
// Is it camelCase?
return str(identifier).replace(/([a-z])([A-Z])(?=[a-z])/g, function ($0, $1, $2) {
return $1 + " " + $2.toLowerCase();
}) // camelCase?
.replace(/([a-z0-9])[_\/-](?=[a-z0-9])/g, "$1 ") // Hyphen-separated / Underscore_separated?
.replace(/^[a-z]/, function ($0) {
return $0.toUpperCase();
}); // Capitalize
},
uppercase: function uppercase(text) {
return str(text).toUpperCase();
},
lowercase: function lowercase(text) {
return str(text).toLowerCase();
},
from: function from(haystack, needle) {
return _.between(haystack, needle);
},
fromlast: function fromlast(haystack, needle) {
return _.between(haystack, needle, "", true);
},
to: function to(haystack, needle) {
return _.between(haystack, "", needle);
},
tofirst: function tofirst(haystack, needle) {
return _.between(haystack, "", needle, true);
},
between: function between(haystack, from, to, tight) {
var _ref7 = [str(haystack), str(from), str(to)];
haystack = _ref7[0];
from = _ref7[1];
to = _ref7[2];
var i1 = from ? haystack[tight ? "lastIndexOf" : "indexOf"](from) : -1;
var i2 = haystack[tight ? "indexOf" : "lastIndexOf"](to);
if (from && i1 === -1 || i2 === -1) {
return "";
}
return haystack.slice(i1 + 1, i2 === -1 || !to ? haystack.length : i2);
},
filename: function filename(url) {
return Mavo.match(new URL(str(url), Mavo.base).pathname, /[^/]+?$/);
},
json: function json(data) {
return Mavo.safeToJSON(data);
},
/*********************
* Date functions
*********************/
get $now() {
return new Date();
},
$startup: new Date(), // Like $now, but doesn't update
get $today() {
return _.date(new Date());
},
year: getDateComponent("year"),
month: getDateComponent("month"),
day: getDateComponent("day"),
weekday: getDateComponent("weekday"),
hour: getDateComponent("hour"),
minute: getDateComponent("minute"),
second: getDateComponent("second"),
ms: getDateComponent("ms"),
date: function date(_date) {
_date = $u.date(_date);
return _date ? _.year(_date) + "-" + _.month(_date).twodigit + "-" + _.day(_date).twodigit : "";
},
time: function time(date) {
date = $u.date(date);
return date ? _.hour(date).twodigit + ":" + _.minute(date).twodigit + ":" + _.second(date).twodigit : "";
},
minutes: function minutes(seconds) {
return Math.floor(Math.abs(seconds) / 60) || 0;
},
hours: function hours(seconds) {
return Math.floor(Math.abs(seconds) / 3600) || 0;
},
days: function days(seconds) {
return Math.floor(Math.abs(seconds) / 86400) || 0;
},
weeks: function weeks(seconds) {
return Math.floor(Math.abs(seconds) / 604800) || 0;
},
months: function months(seconds) {
return Math.floor(Math.abs(seconds) / (30.4368 * 86400)) || 0;
},
years: function years(seconds) {
return Math.floor(Math.abs(seconds) / (30.4368 * 86400 * 12)) || 0;
},
localTimezone: -new Date().getTimezoneOffset(),
// Log to the console and return
log: function log() {
var _console2;
for (var _len6 = arguments.length, args = Array(_len6), _key6 = 0; _key6 < _len6; _key6++) {
args[_key6] = arguments[_key6];
}
(_console2 = console).log.apply(_console2, _toConsumableArray(args.map(val)));
return args[0];
},
// Other special variables (some updated via events)
$mouse: { x: 0, y: 0 },
get $hash() {
return location.hash.slice(1);
},
// "Private" helpers
util: {
numbers: function numbers(array, args) {
array = Array.isArray(array) ? array : args ? $$(args) : [array];
return array.filter(function (number) {
return !isNaN(number) && val(number) !== "" && val(number) !== null;
}).map(function (n) {
return +n;
});
},
fixDateString: function fixDateString(date) {
date = date.trim();
var hasDate = /^\d{4}-\d{2}(-\d{2})?/.test(date);
var hasTime = date.indexOf(":") > -1;
if (!hasDate && !hasTime) {
return null;
}
// Fix up time format
if (!hasDate) {
// No date, add today’s
date = _.$today + " " + date;
} else {
// Only year-month, add day
date = date.replace(/^(\d{4}-\d{2})(?!-\d{2})/, "$1-01");
}
if (!hasTime) {
// Add a time if one doesn't exist
date += "T00:00:00";
} else {
// Make sure time starts with T, due to Safari bug
date = date.replace(/\-(\d{2})\s+(?=\d{2}:)/, "-$1T");
}
// Remove all whitespace
date = date.replace(/\s+/g, "");
return date;
},
date: function date(_date2) {
_date2 = val(_date2);
if (!_date2) {
return null;
}
if ($.type(_date2) === "string") {
_date2 = $u.fixDateString(_date2);
if (_date2 === null) {
return null;
}
var timezone = Mavo.match(_date2, /[+-]\d{2}:?\d{2}|Z$/);
if (timezone) {
// parse as ISO format
_date2 = new Date(_date2);
} else {
// construct date in local timezone
var fields = _date2.match(/\d+/g);
_date2 = new Date(
// year, month, date,
fields[0], (fields[1] || 1) - 1, fields[2] || 1,
// hours, minutes, seconds, milliseconds,
fields[3] || 0, fields[4] || 0, fields[5] || 0, fields[6] || 0);
}
} else {
_date2 = new Date(_date2);
}
if (isNaN(_date2)) {
return null;
}
return _date2;
}
}
};
var $u = _.util;
// Make function names case insensitive
_._Trap = self.Proxy ? new Proxy(_, {
get: function get(functions, property) {
var ret;
var canonicalProperty = Mavo.getCanonicalProperty(functions, property) || Mavo.getCanonicalProperty(Math, property);
if (canonicalProperty) {
ret = functions[canonicalProperty];
if (ret === undefined) {
ret = Math[canonicalProperty];
}
}
if (ret !== undefined) {
if (typeof ret === "function") {
// For when function names are used as unquoted strings, see #160
ret.toString = function () {
return property;
};
}
return ret;
}
// Still not found? Maybe it's a global
if (property in self) {
return self[property];
}
// Prevent undefined at all costs
return property;
},
// Super ugly hack, but otherwise data is not
// the local variable it should be, but the string "data"
// so all property lookups fail.
has: function has(functions, property) {
return property != "data";
}
}) : _;
/**
* Private helper methods
*/
// Convert argument to string
function str() {
var str = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "";
str = val(str);
return !str && str !== 0 ? "" : str + "";
}
function empty(v) {
v = Mavo.value(v);
return v === null || v === false || v === "";
}
function not(v) {
return !val(v);
}
function toLocaleString(date, options) {
var ret = date.toLocaleString(Mavo.locale, options);
ret = ret.replace(/\u200e/g, ""); // Stupid Edge bug
return ret;
}
var numeric = {
year: function year(d) {
return d.getFullYear();
},
month: function month(d) {
return d.getMonth() + 1;
},
day: function day(d) {
return d.getDate();
},
weekday: function weekday(d) {
return d.getDay() || 7;
},
hour: function hour(d) {
return d.getHours();
},
minute: function minute(d) {
return d.getMinutes();
},
second: function second(d) {
return d.getSeconds();
},
ms: function ms(d) {
return d.getMilliseconds();
}
};
function getDateComponent(component) {
return function (date) {
date = $u.date(date);
if (!date) {
return "";
}
var ret = numeric[component](date);
// We don't want years to be formatted like 2,017!
ret = new self[component == "year" ? "String" : "Number"](ret);
if (component == "month" || component == "weekday") {
ret.name = toLocaleString(date, _defineProperty({}, component, "long"));
ret.shortname = toLocaleString(date, _defineProperty({}, component, "short"));
}
if (component != "weekday") {
ret.twodigit = (ret % 100 < 10 ? "0" : "") + ret % 100;
}
return ret;
};
}
})(Bliss, Mavo.value);
(function ($, val, $u) {
var _ = Mavo.Script = {
addUnaryOperator: function addUnaryOperator(name, o) {
if (o.symbol) {
// Build map of symbols to function names for easy rewriting
Mavo.toArray(o.symbol).forEach(function (symbol) {
Mavo.Script.unarySymbols[symbol] = name;
jsep.addUnaryOp(symbol);
});
}
return Mavo.Functions[name] = function (operand) {
return Array.isArray(operand) ? operand.map(val).map(o.scalar) : o.scalar(val(operand));
};
},
/**
* Extend a scalar operator to arrays, or arrays and scalars
* The operation between arrays is applied element-wise.
* The operation operation between a scalar and an array will result in
* the operation being applied between the scalar and every array element.
*/
addBinaryOperator: function addBinaryOperator(name, o) {
if (o.symbol) {
// Build map of symbols to function names for easy rewriting
Mavo.toArray(o.symbol).forEach(function (symbol) {
Mavo.Script.symbols[symbol] = name;
if (o.precedence) {
jsep.addBinaryOp(symbol, o.precedence);
}
});
}
o.identity = o.identity === undefined ? 0 : o.identity;
return Mavo.Functions[name] = o.code || function () {
for (var _len7 = arguments.length, operands = Array(_len7), _key7 = 0; _key7 < _len7; _key7++) {
operands[_key7] = arguments[_key7];
}
if (operands.length === 1) {
if (Array.isArray(operands[0])) {
// Operand is an array of operands, expand it out
operands = [].concat(_toConsumableArray(operands[0]));
}
}
if (!o.raw) {
operands = operands.map(val);
}
var prev = o.logical ? o.identity : operands[0],
result;
var _loop2 = function _loop2(i) {
var a = o.logical ? operands[i - 1] : prev;
var b = operands[i];
if (Array.isArray(b)) {
if (typeof o.identity == "number") {
b = $u.numbers(b);
}
if (Array.isArray(a)) {
result = [].concat(_toConsumableArray(b.map(function (n, i) {
return o.scalar(a[i] === undefined ? o.identity : a[i], n);
})), _toConsumableArray(a.slice(b.length)));
} else {
result = b.map(function (n) {
return o.scalar(a, n);
});
}
} else if (Array.isArray(a)) {
result = a.map(function (n) {
return o.scalar(n, b);
});
} else {
result = o.scalar(a, b);
}
if (o.reduce) {
prev = o.reduce(prev, result, a, b);
} else if (o.logical) {
prev = prev && result;
} else {
prev = result;
}
};
for (var i = 1; i < operands.length; i++) {
_loop2(i);
}
return prev;
};
},
/**
* Mapping of operator symbols to function name.
* Populated via addOperator() and addLogicalOperator()
*/
symbols: {},
unarySymbols: {},
getOperatorName: function getOperatorName(op, unary) {
return Mavo.Script[unary ? "unarySymbols" : "symbols"][op] || op;
},
/**
* Operations for elements and scalars.
* Operations between arrays happen element-wise.
* Operations between a scalar and an array will result in the operation being performed between the scalar and every array element.
* Ordered by precedence (higher to lower)
* @param scalar {Function} The operation between two scalars
* @param identity The operation’s identity element. Defaults to 0.
*/
operators: {
"not": {
symbol: "!",
scalar: function scalar(a) {
return !a;
}
},
"multiply": {
scalar: function scalar(a, b) {
return a * b;
},
identity: 1,
symbol: "*"
},
"divide": {
scalar: function scalar(a, b) {
return a / b;
},
identity: 1,
symbol: "/"
},
"add": {
scalar: function scalar(a, b) {
return +a + +b;
},
symbol: "+"
},
"plus": {
scalar: function scalar(a) {
return +a;
},
symbol: "+"
},
"subtract": {
scalar: function scalar(a, b) {
if (isNaN(a) || isNaN(b)) {
// Handle dates
var dateA = $u.date(a),
dateB = $u.date(b);
if (dateA && dateB) {
return (dateA - dateB) / 1000;
}
}
return a - b;
},
symbol: "-"
},
"minus": {
scalar: function scalar(a) {
return -a;
},
symbol: "-"
},
"mod": {
scalar: function scalar(a, b) {
var ret = a % b;
ret += ret < 0 ? b : 0;
return ret;
},
symbol: "mod",
precedence: 6
},
"lte": {
logical: true,
scalar: function scalar(a, b) {
var _Mavo$Script$getNumer = Mavo.Script.getNumericalOperands(a, b);
var _Mavo$Script$getNumer2 = _slicedToArray(_Mavo$Script$getNumer, 2);
a = _Mavo$Script$getNumer2[0];
b = _Mavo$Script$getNumer2[1];
return a <= b;
},
identity: true,
symbol: "<="
},
"lt": {
logical: true,
scalar: function scalar(a, b) {
var _Mavo$Script$getNumer3 = Mavo.Script.getNumericalOperands(a, b);
var _Mavo$Script$getNumer4 = _slicedToArray(_Mavo$Script$getNumer3, 2);
a = _Mavo$Script$getNumer4[0];
b = _Mavo$Script$getNumer4[1];
return a < b;
},
identity: true,
symbol: "<"
},
"gte": {
logical: true,
scalar: function scalar(a, b) {
var _Mavo$Script$getNumer5 = Mavo.Script.getNumericalOperands(a, b);
var _Mavo$Script$getNumer6 = _slicedToArray(_Mavo$Script$getNumer5, 2);
a = _Mavo$Script$getNumer6[0];
b = _Mavo$Script$getNumer6[1];
return a >= b;
},
identity: true,
symbol: ">="
},
"gt": {
logical: true,
scalar: function scalar(a, b) {
var _Mavo$Script$getNumer7 = Mavo.Script.getNumericalOperands(a, b);
var _Mavo$Script$getNumer8 = _slicedToArray(_Mavo$Script$getNumer7, 2);
a = _Mavo$Script$getNumer8[0];
b = _Mavo$Script$getNumer8[1];
return a > b;
},
identity: true,
symbol: ">"
},
"eq": {
logical: true,
scalar: function scalar(a, b) {
return a == b;
},
symbol: ["=", "=="],
identity: true,
precedence: 6
},
"neq": {
logical: true,
scalar: function scalar(a, b) {
return a != b;
},
symbol: ["!="],
identity: true
},
"and": {
logical: true,
scalar: function scalar(a, b) {
return !!a && !!b;
},
identity: true,
symbol: ["&&", "and"],
precedence: 2
},
"or": {
logical: true,
scalar: function scalar(a, b) {
return a || b;
},
reduce: function reduce(p, r) {
return p || r;
},
identity: false,
symbol: ["||", "or"],
precedence: 2
},
"concatenate": {
symbol: "&",
identity: "",
scalar: function scalar(a, b) {
return "" + (a || "") + (b || "");
},
precedence: 10
},
// Filter is listed here because it's an easy way to handle multiple
// array filters without having to code it
"filter": {
scalar: function scalar(a, b) {
return val(b) ? a : null;
},
raw: true
}
},
getNumericalOperands: function getNumericalOperands(a, b) {
if (isNaN(a) || isNaN(b)) {
// Try comparing as dates
var da = $u.date(a),
db = $u.date(b);
if (da && db) {
// Both valid dates
return [da, db];
}
}
return [a, b];
},
/**
* These serializers transform the AST into JS
*/
serializers: {
"BinaryExpression": function BinaryExpression(node) {
return _.serialize(node.left) + " " + node.operator + " " + _.serialize(node.right);
},
"UnaryExpression": function UnaryExpression(node) {
return "" + node.operator + _.serialize(node.argument);
},
"CallExpression": function CallExpression(node) {
return _.serialize(node.callee) + "(" + node.arguments.map(_.serialize).join(", ") + ")";
},
"ConditionalExpression": function ConditionalExpression(node) {
return _.serialize(node.test) + "? " + _.serialize(node.consequent) + " : " + _.serialize(node.alternate);
},
"MemberExpression": function MemberExpression(node) {
var property = node.computed ? _.serialize(node.property) : "\"" + node.property.name + "\"";
return "get(" + _.serialize(node.object) + ", " + property + ")";
},
"ArrayExpression": function ArrayExpression(node) {
return "[" + node.elements.map(_.serialize).join(", ") + "]";
},
"Literal": function Literal(node) {
return node.raw;
},
"Identifier": function Identifier(node) {
return node.name;
},
"ThisExpression": function ThisExpression(node) {
return "this";
},
"Compound": function Compound(node) {
return node.body.map(_.serialize).join(" ");
}
},
/**
* These are run before the serializers and transform the expression to support MavoScript
*/
transformations: {
"BinaryExpression": function BinaryExpression(node) {
var name = Mavo.Script.getOperatorName(node.operator);
// Flatten same operator calls
var nodeLeft = node;
var args = [];
do {
args.unshift(nodeLeft.right);
nodeLeft = nodeLeft.left;
} while (Mavo.Script.getOperatorName(nodeLeft.operator) === name);
args.unshift(nodeLeft);
if (args.length > 1) {
return name + "(" + args.map(_.serialize).join(", ") + ")";
}
},
"UnaryExpression": function UnaryExpression(node) {
var name = Mavo.Script.getOperatorName(node.operator, true);
if (name) {
return name + "(" + _.serialize(node.argument) + ")";
}
},
"CallExpression": function CallExpression(node) {
if (node.callee.type == "Identifier") {
if (node.callee.name == "if") {
node.callee.name = "iff";
}
node.callee.name = "Mavo.Functions._Trap." + node.callee.name;
}
}
},
serialize: function serialize(node) {
var ret = _.transformations[node.type] && _.transformations[node.type](node);
if (ret !== undefined) {
return ret;
}
return _.serializers[node.type](node);
},
rewrite: function rewrite(code) {
try {
return _.serialize(_.parse(code));
} catch (e) {
// Parsing as MavoScript failed, falling back to plain JS
return code;
}
},
compile: function compile(code) {
code = _.rewrite(code);
return new Function("data", "with(Mavo.Functions._Trap)\n\t\t\t\twith (data || {}) {\n\t\t\t\t\treturn (" + code + ");\n\t\t\t\t}");
},
parse: self.jsep
};
_.serializers.LogicalExpression = _.serializers.BinaryExpression;
_.transformations.LogicalExpression = _.transformations.BinaryExpression;
for (var name in Mavo.Script.operators) {
var details = Mavo.Script.operators[name];
if (details.scalar.length < 2) {
Mavo.Script.addUnaryOperator(name, details);
} else {
Mavo.Script.addBinaryOperator(name, details);
}
}
var aliases = {
average: "avg",
iff: "iff IF",
multiply: "mult product",
divide: "div",
lt: "smaller",
gt: "larger bigger",
eq: "equal equality",
ordinal: "th"
};
var _loop3 = function _loop3(_name) {
aliases[_name].split(/\s+/g).forEach(function (alias) {
return Mavo.Functions[alias] = Mavo.Functions[_name];
});
};
for (var _name in aliases) {
_loop3(_name);
}
})(Bliss, Mavo.value, Mavo.Functions.util);
(function ($, $$) {
var _ = Mavo.Backend.register($.Class({
extends: Mavo.Backend,
id: "Dropbox",
constructor: function constructor() {
this.permissions.on(["login", "read"]);
this.key = this.mavo.element.getAttribute("mv-dropbox-key") || "2mx6061p054bpbp";
this.login(true);
},
update: function update(url, o) {
this.super.update.call(this, url, o);
this.url = _.fixShareURL(this.url);
},
upload: function upload(file, path) {
var _this68 = this;
path = this.path.replace(/[^/]+$/, "") + path;
return this.put(file, path).then(function (fileInfo) {
return _this68.getURL(path);
});
},
getURL: function getURL(path) {
return this.request("sharing/create_shared_link_with_settings", { path: path }, "POST").then(function (shareInfo) {
return _.fixShareURL(shareInfo.url);
});
},
/**
* Saves a file to the backend.
* @param {Object} file - An object with name & data keys
* @return {Promise} A promise that resolves when the file is saved.
*/
put: function put(serialized) {
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.path;
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
return this.request("https://content.dropboxapi.com/2/files/upload", serialized, "POST", {
headers: {
"Dropbox-API-Arg": JSON.stringify({
path: path,
mode: "overwrite"
}),
"Content-Type": "application/octet-stream"
}
});
},
oAuthParams: function oAuthParams() {
return "&redirect_uri=" + encodeURIComponent("https://auth.mavo.io") + "&response_type=code";
},
getUser: function getUser() {
var _this69 = this;
if (this.user) {
return Promise.resolve(this.user);
}
return this.request("users/get_current_account", "null", "POST").then(function (info) {
_this69.user = {
username: info.email,
name: info.name.display_name,
avatar: info.profile_photo_url,
info: info
};
$.fire(_this69.mavo.element, "mv-login", { backend: _this69 });
});
},
login: function login(passive) {
var _this70 = this;
return this.oAuthenticate(passive).then(function () {
return _this70.getUser();
}).then(function (u) {
if (_this70.user) {
_this70.permissions.logout = true;
// Check if can actually edit the file
_this70.request("sharing/get_shared_link_metadata", {
"url": _this70.source
}, "POST").then(function (info) {
_this70.path = info.path_lower;
_this70.permissions.on(["edit", "save"]);
});
}
});
},
logout: function logout() {
return this.oAuthLogout();
},
static: {
apiDomain: "https://api.dropboxapi.com/2/",
oAuth: "https://www.dropbox.com/oauth2/authorize",
test: function test(url) {
url = new URL(url, Mavo.base);
return (/dropbox.com/.test(url.host)
);
},
// Transform the dropbox shared URL into something raw and CORS-enabled
fixShareURL: function fixShareURL(url) {
url = new URL(url, Mavo.base);
url.hostname = "dl.dropboxusercontent.com";
url.search = url.search.replace(/\bdl=0|^$/, "raw=1");
return url;
}
}
}));
})(Bliss, Bliss.$);
(function ($, $$) {
var _ = Mavo.Backend.register($.Class({
extends: Mavo.Backend,
id: "Github",
constructor: function constructor() {
this.permissions.on(["login", "read"]);
this.key = this.mavo.element.getAttribute("mv-github-key") || "7e08e016048000bc594e";
// Extract info for username, repo, branch, filepath from URL
var extension = this.format.constructor.extensions[0] || ".json";
this.defaults = {
repo: "mv-data",
filename: "" + this.mavo.id + extension
};
this.info = _.parseURL(this.source, this.defaults);
$.extend(this, this.info);
this.login(true);
},
update: function update(url, o) {
this.super.update.call(this, url, o);
this.info = _.parseURL(this.source, this.defaults);
$.extend(this, this.info);
},
get: function get(url) {
if (this.isAuthenticated() || !this.path || url) {
// Authenticated or raw API call
var info = url ? _.parseURL(url) : this.info;
if (info.apiData) {
// GraphQL
return this.request(info.apiCall, info.apiData, "POST").then(function (response) {
if (response.errors && response.errors.length) {
return Promise.reject(response.errors.map(function (x) {
return x.message;
}).join("\n"));
}
return response.data;
});
}
return this.request(info.apiCall, null, "GET", {
headers: {
"Accept": "application/vnd.github.squirrel-girl-preview"
}
}).then(function (response) {
return Promise.resolve(info.repo ? _.atob(response.content) : response);
});
} else {
// Unauthenticated, use simple GET request to avoid rate limit
url = new URL("https://raw.githubusercontent.com/" + this.username + "/" + this.repo + "/" + (this.branch || "master") + "/" + this.path);
url.searchParams.set("timestamp", Date.now()); // ensure fresh copy
return $.fetch(url.href).then(function (xhr) {
return Promise.resolve(xhr.responseText);
}, function () {
return Promise.resolve(null);
});
}
},
upload: function upload(file) {
var _this71 = this;
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.path;
return Mavo.readFile(file).then(function (dataURL) {
var base64 = dataURL.slice(5); // remove data:
var media = base64.match(/^\w+\/[\w+]+/)[0];
base64 = base64.replace(RegExp("^" + media + "(;base64)?,"), "");
path = _this71.path.replace(/[^/]+$/, "") + path; // make upload path relative to existing path
return _this71.put(base64, path, { isEncoded: true });
}).then(function (fileInfo) {
return _this71.getURL(path, fileInfo.commit.sha);
});
},
/**
* Saves a file to the backend.
* @param {String} serialized - Serialized data
* @param {String} path - Optional file path
* @return {Promise} A promise that resolves when the file is saved.
*/
put: function put(serialized) {
var _this72 = this;
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.path;
var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
if (!path) {
// Raw API calls are read-only for now
return;
}
var repoCall = "repos/" + this.username + "/" + this.repo;
var fileCall = repoCall + "/contents/" + path;
var commitPrefix = this.mavo.element.getAttribute("mv-github-commit-prefix") || "";
// Create repo if it doesn’t exist
var repoInfo = this.repoInfo || this.request("user/repos", { name: this.repo }, "POST").then(function (repoInfo) {
return _this72.repoInfo = repoInfo;
});
serialized = o.isEncoded ? serialized : _.btoa(serialized);
return Promise.resolve(repoInfo).then(function (repoInfo) {
if (!_this72.canPush()) {
// Does not have permission to commit, create a fork
return _this72.request(repoCall + "/forks", { name: _this72.repo }, "POST").then(function (forkInfo) {
fileCall = "repos/" + forkInfo.full_name + "/contents/" + path;
return _this72.forkInfo = forkInfo;
}).then(function (forkInfo) {
// Ensure that fork is created (they take a while)
var timeout;
var test = function test(resolve, reject) {
clearTimeout(timeout);
_this72.request("repos/" + forkInfo.full_name + "/commits", { until: "1970-01-01T00:00:00Z" }, "HEAD").then(function (x) {
resolve(forkInfo);
}).catch(function (x) {
// Try again after 1 second
timeout = setTimeout(test, 1000);
});
};
return new Promise(test);
});
}
return repoInfo;
}).then(function (repoInfo) {
return _this72.request(fileCall, {
ref: _this72.branch
}).then(function (fileInfo) {
return _this72.request(fileCall, {
message: commitPrefix + _this72.mavo._("gh-updated-file", { name: fileInfo.name || "file" }),
content: serialized,
branch: _this72.branch,
sha: fileInfo.sha
}, "PUT");
}, function (xhr) {
if (xhr.status == 404) {
// File does not exist, create it
return _this72.request(fileCall, {
message: commitPrefix + "Created file",
content: serialized,
branch: _this72.branch
}, "PUT");
}
return xhr;
});
}).then(function (fileInfo) {
if (_this72.forkInfo) {
// We saved in a fork, do we have a pull request?
_this72.request("repos/" + _this72.username + "/" + _this72.repo + "/pulls", {
head: _this72.user.username + ":" + _this72.branch,
base: _this72.branch
}).then(function (prs) {
_this72.pullRequest(prs[0]);
});
}
return fileInfo;
});
},
pullRequest: function pullRequest(existing) {
var _this73 = this;
var previewURL = new URL(location);
previewURL.searchParams.set(this.mavo.id + "-storage", "https://github.com/" + this.forkInfo.full_name + "/" + this.path);
var message = this.mavo._("gh-edit-suggestion-saved-in-profile", { previewURL: previewURL });
if (this.notice) {
this.notice.close();
}
if (existing) {
// We already have a pull request, ask about closing it
this.notice = this.mavo.message(message + "\n\t\t\t\t" + this.mavo._("gh-edit-suggestion-notreviewed") + "\n\t\t\t\t<form onsubmit=\"return false\">\n\t\t\t\t\t<button class=\"mv-danger\">" + this.mavo._("gh-edit-suggestion-revoke") + "</button>\n\t\t\t\t</form>", {
classes: "mv-inline",
dismiss: ["button", "submit"]
});
this.notice.closed.then(function (form) {
if (!form) {
return;
}
// Close PR
_this73.request("repos/" + _this73.username + "/" + _this73.repo + "/pulls/" + existing.number, {
state: "closed"
}, "POST").then(function (prInfo) {
new Mavo.UI.Message(_this73.mavo, "<a href=\"" + prInfo.html_url + "\">" + _this73.mavo._("gh-edit-suggestion-cancelled") + "</a>", {
dismiss: ["button", "timeout"]
});
_this73.pullRequest();
});
});
} else {
// Ask about creating a PR
this.notice = this.mavo.message(message + "\n\t\t\t\t" + this.mavo._("gh-edit-suggestion-instructions") + "\n\t\t\t\t<form onsubmit=\"return false\">\n\t\t\t\t\t<textarea name=\"edits\" class=\"mv-autosize\" placeholder=\"" + this.mavo._("gh-edit-suggestion-reason-placeholder") + "\"></textarea>\n\t\t\t\t\t<button>" + this.mavo._("gh-edit-suggestion-send") + "</button>\n\t\t\t\t</form>", {
classes: "mv-inline",
dismiss: ["button", "submit"]
});
this.notice.closed.then(function (form) {
if (!form) {
return;
}
// We want to send a pull request
_this73.request("repos/" + _this73.username + "/" + _this73.repo + "/pulls", {
title: _this73.mavo._("gh-edit-suggestion-title"),
body: _this73.mavo._("gh-edit-suggestion-body", {
description: form.elements.edits.value,
previewURL: previewURL
}),
head: _this73.user.username + ":" + _this73.branch,
base: _this73.branch
}, "POST").then(function (prInfo) {
new Mavo.UI.Message(_this73.mavo, "<a href=\"" + prInfo.html_url + "\">" + _this73.mavo._("gh-edit-suggestion-sent") + "</a>", {
dismiss: ["button", "timeout"]
});
_this73.pullRequest(prInfo);
});
});
}
},
login: function login(passive) {
var _this74 = this;
return this.oAuthenticate(passive).then(function () {
return _this74.getUser();
}).catch(function (xhr) {
if (xhr.status == 401) {
// Unauthorized. Access token we have is invalid, discard it
_this74.logout();
}
}).then(function (u) {
if (_this74.user) {
_this74.permissions.on("logout");
if (_this74.info.path) {
_this74.permissions.on(["edit", "save"]);
}
if (_this74.repo) {
return _this74.request("repos/" + _this74.username + "/" + _this74.repo).then(function (repoInfo) {
if (_this74.branch === undefined) {
_this74.branch = repoInfo.default_branch;
}
return _this74.repoInfo = repoInfo;
});
}
}
});
},
canPush: function canPush() {
if (this.repoInfo) {
return this.repoInfo.permissions.push;
}
// Repo does not exist so we can't check permissions
// Just check if authenticated user is the same as our URL username
return this.user && this.user.username.toLowerCase() == this.username.toLowerCase();
},
oAuthParams: function oAuthParams() {
return "&scope=repo,gist";
},
logout: function logout() {
var _this75 = this;
return this.oAuthLogout().then(function () {
_this75.user = null;
});
},
getUser: function getUser() {
var _this76 = this;
if (this.user) {
return Promise.resolve(this.user);
}
return this.request("user").then(function (info) {
_this76.user = {
username: info.login,
name: info.name || info.login,
avatar: info.avatar_url,
url: "https://github.com/" + info.login,
info: info
};
$.fire(_this76.mavo.element, "mv-login", { backend: _this76 });
});
},
getURL: function getURL() {
var _this77 = this;
var path = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.path;
var sha = arguments[1];
var repoInfo = this.forkInfo || this.repoInfo;
var repo = repoInfo.full_name;
path = path.replace(/ /g, "%20");
repoInfo.pagesInfo = repoInfo.pagesInfo || this.request("repos/" + repo + "/pages", {}, "GET", {
headers: {
"Accept": "application/vnd.github.mister-fantastic-preview+json"
}
});
return repoInfo.pagesInfo.then(function (pagesInfo) {
return pagesInfo.html_url + path;
}).catch(function (xhr) {
// No Github Pages, return rawgit URL
if (sha) {
return "https://cdn.rawgit.com/" + repo + "/" + sha + "/" + path;
} else {
return "https://rawgit.com/" + repo + "/" + _this77.branch + "/" + path;
}
});
},
static: {
apiDomain: "https://api.github.com/",
oAuth: "https://github.com/login/oauth/authorize",
test: function test(url) {
url = new URL(url, Mavo.base);
return (/\bgithub.com|raw.githubusercontent.com/.test(url.host)
);
},
/**
* Parse Github URLs, return username, repo, branch, path
*/
parseURL: function parseURL(source) {
var defaults = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var ret = {};
var url = new URL(source, Mavo.base);
var path = url.pathname.slice(1).split("/");
ret.username = path.shift();
ret.repo = path.shift() || defaults.repo;
if (/raw.githubusercontent.com$/.test(url.host)) {
ret.branch = path.shift();
} else if (/api.github.com$/.test(url.host)) {
// Raw API call
var apiCall = url.pathname.slice(1) + url.search;
var data = Mavo.Functions.from(source, "#"); // url.* drops line breaks
return {
apiCall: apiCall,
apiData: apiCall == "graphql" ? { query: data } : data
};
} else if (path[0] == "blob") {
path.shift();
ret.branch = path.shift();
}
var lastSegment = path[path.length - 1];
if (/\.\w+$/.test(lastSegment)) {
ret.filename = lastSegment;
path.splice(path.length - 1, 1);
} else {
ret.filename = defaults.filename;
}
ret.filepath = path.join("/") || defaults.filepath || "";
ret.path = (ret.filepath ? ret.filepath + "/" : "") + ret.filename;
ret.apiCall = "repos/" + ret.username + "/" + ret.repo + "/contents/" + ret.path;
return ret;
},
// Fix atob() and btoa() so they can handle Unicode
btoa: function (_btoa) {
function btoa(_x70) {
return _btoa.apply(this, arguments);
}
btoa.toString = function () {
return _btoa.toString();
};
return btoa;
}(function (str) {
return btoa(unescape(encodeURIComponent(str)));
}),
atob: function atob(str) {
return decodeURIComponent(escape(window.atob(str)));
}
}
}));
})(Bliss, Bliss.$);
console.log("local");
// Mavo.Performance.time("Mavo.DOMExpression#output");
// Mavo.Performance.time("Mavo.Node#dataChanged");
// document.addEventListener("focus", evt => { console.log(evt.type, evt.target, evt); }, true);
// document.addEventListener("blur", evt => { console.log(evt.type, evt.target, evt, document.activeElement)}, true);
//# sourceMappingURL=maps/mavo-nodeps.js.map
//# sourceMappingURL=maps/mavo.es5.js.map
|
import { assert } from 'ember-debug';
import calculateLocationDisplay from '../system/calculate-location-display';
export default function assertReservedNamedArguments(env) {
let { moduleName } = env.meta;
return {
name: 'assert-reserved-named-arguments',
visitors: {
PathExpression(node) {
if (node.original[0] === '@') {
assert(assertMessage(moduleName, node));
}
}
}
};
}
function assertMessage(moduleName, node) {
let path = node.original;
let source = calculateLocationDisplay(moduleName, node.loc);
return `'${path}' is not a valid path. ${source}`;
}
|
"""Some convenient utils functions."""
import datetime
import os
import socket
import sys
import traceback
import uuid
import pytz
from ndscheduler.corescheduler import constants
def import_from_path(path):
"""Import a module / class from a path string.
:param str path: class path, e.g., ndscheduler.corescheduler.job
:return: class object
:rtype: class
"""
components = path.split('.')
module = __import__('.'.join(components[:-1]))
for comp in components[1:-1]:
module = getattr(module, comp)
return getattr(module, components[-1])
def get_current_datetime():
"""Retrieves the current datetime.
:return: A datetime representing the current time.
:rtype: datetime
"""
return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
def get_job_name(job):
"""Returns job name.
:param Job job: An apscheduler.job.Job instance.
:return: task name
:rtype: str
"""
return job.args[0]
def get_job_args(job):
"""Returns arguments of a job.
:param Job job: An apscheduler.job.Job instance.
:return: task arguments
:rtype: list of str
"""
return job.args[constants.JOB_ARGS:]
def get_job_kwargs(job):
"""Returns keyword arguments of a job.
:param Job job: An apscheduler.job.Job instance.
:return: keyword arguments
:rtype: dict
"""
return job.kwargs
def get_cron_strings(job):
"""Returns cron strings.
:param Job job: An apscheduler.job.Job instance.
:return: cron strings
:rtype: dict
"""
return {
'month': str(job.trigger.fields[1]),
'day': str(job.trigger.fields[2]),
'week': str(job.trigger.fields[3]),
'day_of_week': str(job.trigger.fields[4]),
'hour': str(job.trigger.fields[5]),
'minute': str(job.trigger.fields[6])}
def generate_uuid():
"""Generates 32-digit hex uuid.
Example: d8f376e858a411e4b6ae22001ac68d05
:return: uuid hex string
:rtype: str
"""
return uuid.uuid4().hex
def get_stacktrace():
"""Returns the full stack trace."""
type_, value_, traceback_ = sys.exc_info()
return ''.join(traceback.format_exception(type_, value_, traceback_))
def get_hostname():
"""Returns the host name."""
return socket.gethostname()
def get_pid():
"""Returns the process ID"""
return os.getpid()
def get_datastore_instance(datastore_class_path, db_config=None, db_tablenames=None):
datastore_class = import_from_path(datastore_class_path)
return datastore_class.get_instance(db_config, db_tablenames)
|
import struct
import bz2
def write_len(fp, l):
"""
A frequently used utility function to write an integer
to a given stream.
"""
fp.write(struct.pack('!L', l))
def conv_len(bytes):
"""
This takes some bytes and converts them to an integer following
the same conventions used by the other routines in this file.
"""
up = struct.unpack('!L', bytes)
return up[0]
def read_len(fp, ignoreEOF=False, verbose=False):
"""
This reads a length from the stream. If the ignoreEOF flag
is set, a failure to read the length simple results in
a None being returned (vs. an exception being thrown)
"""
lbytes = fp.read(4)
#if verbose:
# print "Raw length bytes: "+str(repr(lbytes))
if len(lbytes)!=4:
if ignoreEOF:
return None
else: # pragma no cover
raise IOError("Failed to read length data")
up = struct.unpack('!L', lbytes)
return up[0]
# Transforms
T_INV = "inv"
T_AFF = "aff"
class InvTransform:
def __init__(self):
pass
def apply(self, data):
def afunc(x):
if type(x)==bool:
return not x
if type(x)==float:
return -x
if type(x)==int: # pragma: no cover
return -x
if type(x)==long: # pragma: no cover
return -x
else: # pragma: no cover
return x
return map(lambda x: afunc(x), data)
class AffineTransform:
def __init__(self, scale, offset):
self.scale = scale
self.offset = offset
def apply(self, data):
def sfunc(x):
# TODO: Are these sufficient?
if type(x)==float or type(x)==int or type(x)==long:
return x*self.scale+self.offset
else: # pragma: no cover
return x
return map(lambda x: sfunc(x), data)
def parse_transform(t):
if t==None:
return None
if type(t)!=str:
return None
trans = t.replace(" ","")
if trans==T_INV:
return InvTransform()
if trans.startswith(T_AFF+"(") and trans.endswith(")"):
try:
(s, o) = map(lambda x: float(x), trans[4:-1].split(","))
return AffineTransform(s, o)
except:
return None
|
# Complete the test function to perform a hypothesis test
# on list l under the null that the mean is h
from math import sqrt
def mean(l):
return float(sum(l))/len(l)
def var(l):
m = mean(l)
return sum([(x-m)**2 for x in l])/len(l)
def factor(l):
return 1.96
def conf(l):
return factor(l) * sqrt(var(l) / len(l))
def test(h, mu, ci):
return abs(h-mu) <= ci
l = [0.79, 0.70, 0.73, 0.66, 0.65, 0.70, 0.74, 0.81, 0.71, 0.70]
mu = mean(l)
ci = conf(l)
print("{} - {}".format(mu - ci, mu + ci))
|
/*
* @name 增量/减量
* @description "a++" 等于 "a = a + 1"。 "a--" 等于 "a = a - 1"。
*/
let a;
let b;
let direction;
function setup() {
createCanvas(710, 400);
colorMode(RGB, width);
a = 0;
b = width;
direction = true;
frameRate(30);
}
function draw() {
a++;
if (a > width) {
a = 0;
direction = !direction;
}
if (direction === true) {
stroke(a);
} else {
stroke(width - a);
}
line(a, 0, a, height / 2);
b--;
if (b < 0) {
b = width;
}
if (direction === true) {
stroke(width - b);
} else {
stroke(b);
}
line(b, height / 2 + 1, b, height);
}
|
import Ember from 'ember';
const {computed, get, observer, RSVP, set} = Ember;
import ENV from '../../config/environment';
const {apiURL} = ENV;
export default Ember.Controller.extend({
i18n: Ember.inject.service(),
featureToggle: Ember.inject.service(),
buildermodSearchService: Ember.inject.service(),
departmentCityFilterService: Ember.inject.service(),
vistkNetworkService: Ember.inject.service(),
queryParams: ['startDate', 'endDate'],
categoriesFilterList: [],
elementId: 'product_space',
VCRValue: 1,
setSelectedProductsbyId: function (id) {
var selected_products = {}
selected_products[id] = this.getPrimariesSecondaries2(parseInt(id))
//console.log(selected_products)
this.set("selectedProducts", selected_products)
},
linkDatlas: computed('startDate', function () {
var startDate = this.get("startDate");
return `https://atlas.cid.harvard.edu/explore?country=undefined&product=undefined&year=${startDate}&tradeDirection=import&productClass=HS&target=Product&partner=undefined&startYear=undefined`
}),
selectedProducts: computed('model.[]', function () {
return this.get("initialSelectedProducts");
}),
getProduct: function (id) {
let indexedData = _.indexBy(this.get('networkData'), 'id');
return indexedData[id]
},
product_selected: computed('model', 'center', function () {
var id = this.get("center")
var product = this.getProduct(id)
var product_copy = {}
Object.assign(product_copy, product)
return product_copy
}),
product_primaries: [],
product_primaries_total: null,
product_secondaries: [],
product_secondaries_total: null,
center: computed("model", function () {
return this.get("model.entity.id");
}),
categoriesObject: computed('model', 'i18n.locale', function() {
var products = this.get('model.metaData.products')
var products_list = []
var nested_data
for(let id of Object.keys(products)){
products_list.push(products[id])
}
var updatedData = products_list.map(item => {
if(_.get(item, `parent_name_${this.get('i18n').display}`) === _.get(item, `name_${this.get('i18n').display}`)){
return {
color: _.get(item, "color"),
icon: _.get(item, "icon"),
item: item
};
}
else{
return {
color: _.get(item, "color"),
icon: _.get(item, "icon"),
item: item,
group: _.get(item, `parent_name_${this.get('i18n').display}`),
parent_code: _.get(item, `parent_code`),
};
}
});
if(updatedData[0] !== undefined){
if(updatedData[0].hasOwnProperty("group")){
if(updatedData[0].group == undefined){
nested_data = d3.nest().entries(updatedData);
}
nested_data = d3.nest().key(function(d) { return d.group; }).entries(updatedData);
}
else{
nested_data = d3.nest().entries(updatedData);
}
}
else{
nested_data = []
}
var categories = nested_data.map(item => {
var color = "#33691e";
var icon = "fas fa-atom";
var icon_color = "#FFFFFF";
if(item.hasOwnProperty("color")){
color = item.color;
}
else{
if(item.hasOwnProperty("values")){
if(item.values.length > 0){
if(item.values[0].hasOwnProperty("color")){
color = item.values[0].color;
}
}
}
}
if(item.hasOwnProperty("icon")){
icon = item.icon;
}
else{
if(item.hasOwnProperty("values")){
if(item.values.length > 0){
if(item.values[0].hasOwnProperty("icon")){
icon = item.values[0].icon;
}
}
}
}
return {
name: item.key,
color: color,
icon: icon,
icon_color: icon_color,
hide: false,
isolate: false
};
});
//console.log(categories)
Ember.run.later(this , function() {
$('.category-button').on("mouseover", function(e) {
$(this).find("div.tooltip").removeClass("d-none")
})
$('.category-button').on("mouseleave", function(e) {
$(this).find("div.tooltip").addClass("d-none");
})
}, 100);
return categories;
}),
searchFilter: observer('buildermodSearchService.search', function() {
var data = this.get("model.metaData.products");
var selected = this.get("selectedProducts");
let search = _.deburr(this.get('buildermodSearchService.search'));
var self = this;
var elementId = this.get("elementId");
var initialSelectedProducts = this.get("initialSelectedProducts")
if(search === ""){
//var id_principal = this.get("model.entity.id");
//d3.selectAll(".tooltip_network").classed("d-none", true);
//d3.selectAll(`.tooltip_${id_principal}_${elementId}`).classed("d-none", false);
//this.set("selectedProducts", initialSelectedProducts);
this.set('vistkNetworkService.updated', new Date());
}
else {
var regexp = new RegExp(search.replace(/(\S+)/g, function(s) { return "\\b(" + s + ")(.*)"; })
.replace(/\s+/g, ''), "gi");
var result = _.filter(data, (d) => {
let parentName = get(d,`parent_name_${this.get('i18n').display}`);
let longName = get(d,`name_${this.get('i18n').display}`);
let shortName = get(d,`name_short_${this.get('i18n').display}`);
let code = get(d, 'code');
var result_city = _.deburr(`${shortName} ${longName} ${code}`).match(regexp)
if(result_city !== null){
return result_city;
}
return _.deburr(`${parentName} ${code}`).match(regexp);
});
result.map(item => {
//selected.push(String(item.id))
self.set("center", item.id)
self.setSelectedProductsbyId(item.id)
//this.transitionToRoute('product.ringchart', item.id, {queryParams: { endDate: this.get("endDate"), startDate: this.get("startDate"), centerId: this.get("center") }});
Ember.run.later(this , function() {
$(`.d3plus-id-${item.id}`).click(),
2000
});
//self.set('vistkNetworkService.updated', new Date());
//d3.selectAll(`.tooltip_${item.id}_${elementId}`).classed('d-none', false);
});
}
}),
formatNumber: (number, key, i18n) => {
var decimalVars = [
'export_rca',
'eci',
'industry_eci',
'rca',
'complexity',
'distance',
'cog',
'coi',
'industry_coi',
'population',
'yield_ratio',
'yield_index',
'average_livestock_load',
];
var percentVars = [
'share',
'employment_growth'
];
var wageVarsInThousands = [
'wages',
'avg_wages',
'avg_wage',
];
var moneyVars = [
'gdp_pc_real',
'gdp_real',
];
var largeNumbers = [
'export_value',
'import_value',
'monthly_wages',
'average_wages',
'area',
'production_tons',
'land_sown',
'land_harvested',
'num_farms',
'num_livestock',
];
if(_.include(wageVarsInThousands, key)){
return numeral(number).divide(1000).format('0,0');
} else if(_.include(decimalVars, key)){
var result = numeral(number).format('0.00a')
return result;
} else if(key === 'employment'){
return numeral(Math.ceil(number)).format('0,0');
} else if(key === 'num_establishments' || key === 'export_num_plants'){
if(parseInt(number) < 6) {
return i18n.t('graph_builder.table.less_than_5');
}
return numeral(number).format('0,0');
} else if(_.include(percentVars, key)){
return numeral(number).format('0.00%');
} else if(_.include(largeNumbers, key)) {
return numeral(number).format('0,0');
} else if(_.include(moneyVars, key)) {
return numeral(number).format('$0.00a');
} else {
return number;
}
},
observerCenter: observer("center", function () {
var center = this.get("center")
this.setSelectedProductsbyId(center)
this.set('vistkNetworkService.updated', new Date());
}),
filteredDataTable: computed("model", 'vistkNetworkService.updated', 'departmentCityFilterService.data', 'endDate', function () {
var selectedProducts = this.get("selectedProducts")
var self = this;
var ids = []
for(let id of Object.keys(selectedProducts)){
ids.push(id)
for(let id2 of Object.keys(selectedProducts[id])){
ids.push(id2)
for(let id3 of selectedProducts[id][id2]){
ids.push(id3)
}
}
}
var productsData = this.get("productsData")
var result = productsData.filter(item => ids.includes(String(item.id)))
var indexed_result = _.indexBy(result, 'id');
var primaries = []
var secondaries = []
for(let id of Object.keys(selectedProducts)){
var product_selected_copy = {}
Object.assign(product_selected_copy, indexed_result[id])
product_selected_copy.export_rca = self.formatNumber(product_selected_copy.export_rca, 'export_rca', this.get("i18n"))
product_selected_copy.export_value = self.formatNumber(product_selected_copy.export_value, 'export_value', this.get("i18n"))
product_selected_copy.complexity = self.formatNumber(product_selected_copy.complexity, 'complexity', this.get("i18n"))
this.set("product_selected", product_selected_copy)
for(let id2 of Object.keys(selectedProducts[id])){
primaries.push(indexed_result[id2])
for(let id3 of selectedProducts[id][id2]){
secondaries.push(indexed_result[id3])
}
}
if(primaries.length > 5){
this.set("product_primaries_count", primaries.length - 5)
}
else{
this.set("product_primaries_count", 0)
}
this.set("product_primaries", primaries.splice(0,5))
if(secondaries.length > 5){
this.set("product_secondaries_count", secondaries.length - 5)
}
else{
this.set("product_secondaries_count", 0)
}
this.set("product_secondaries", secondaries.splice(0,5))
}
//console.log(indexed_result)
return result
}),
rangeYears: computed('firstYear', 'lastYear', function(){
var min = this.get("firstYear");
var max = this.get("lastYear");
return [...Array(max - min + 1).keys()].map(i => i + min);
}),
entityType: "product",
source: "products",
location: computed("departmentCityFilterService.name", function (){
this.get("departmentCityFilterService.data")
this.get('buildermodSearchService.search')
return this.get("departmentCityFilterService.name");
}),
locationId: computed("departmentCityFilterService.id", function (){
return this.get("departmentCityFilterService.id");
}),
departmentsDataSelect: computed("model", function () {
this.set("selectedProducts", this.get("initialSelectedProducts"))
var all_locations = Object.values(this.get("model.metaData.locations"))
var locations = all_locations.filter(item => item.level === "department").map( (item) => {
var chained = all_locations.filter(item2 => item.id === item2.parent_id).map(item => ({'id': item.id, 'text': `${item.name_es} (${item.code})`}))
return {'id': item.id, 'text': `${item.name_es} (${item.code})`, 'chained': chained}
})
return locations
}),
productSpace: computed.alias('model.metaData.productSpace'),
productsData: computed('model', 'endDate', 'departmentCityFilterService.data', 'VCRValue', 'categoriesFilterList', function () {
var id = this.get("departmentCityFilterService.id");
var startDate = this.get("startDate");
var endDate = this.get("endDate");
if(id == 0){
$("#spinner_complexmap").addClass("d-none")
$("#complexmap").removeClass("d-none")
$("#complexmaptable").removeClass("d-none")
return this.get("model.products_col").filter(item => item.year >= startDate && item.year <= endDate);
}
var data = this.get("departmentCityFilterService.data");
var data_filtered = data.filter(item => item.year >= startDate && item.year <= endDate);
$("#spinner_complexmap").addClass("d-none")
$("#complexmap").removeClass("d-none")
$("#complexmaptable").removeClass("d-none")
return data_filtered
}),
dateExtent: computed('model', function() {
//this.set('startDate', this.get('lastYear'));
//this.set('endDate', this.get('lastYear'));
return [this.get('firstYear'), this.get('lastYear')];
}),
productsDataValues: computed('model', function(){
var locations = Object.entries(this.get('model.metaData.products'))
return locations.filter(item => item[1].level === "4digit").map((item) => {
var name = get(item[1], `name_short_${this.get('i18n').display}`)
return {id:item[1].id, text: `${name} (${item[1].code})`}
})
}),
placeHolderText: computed('i18n.locale', 'source', function(){
return this.get('i18n').t(`visualization.source.${this.get('source')}`).string
}),
filteredDataAsync: observer("departmentCityFilterService.id", function () {
var id = this.get("departmentCityFilterService.id");
var productsMetadata = this.get("model.metaData.products")
var self = this
var products = $.getJSON(`${apiURL}/data/location/${id}/products?level=4digit`)
var promises = [products]
var result = RSVP.allSettled(promises).then((array) => {
let productsData = array[0].value.data;
let productsDataResponse = _.reduce(productsData, (memo, d) => {
let product = productsMetadata[d.product_id];
product.complexity = _.result(_.find(product.pci_data, { year: d.year }), 'pci');
memo.push(_.merge(d, product));
return memo;
}, []);
self.set("departmentCityFilterService.data", productsDataResponse)
return productsDataResponse
});
}),
filterData: computed('model', 'endDate', function(){
var startDate = this.get("startDate");
var endDate = this.get("endDate");
var partners = this.get("model.partners").filter(item => item.year >= startDate && item.year <= endDate);
return partners
}),
firstYear: computed.alias('featureToggle.first_year'),
lastYear: computed.alias('featureToggle.last_year'),
occupationsData: computed.alias('model.occupationsData'),
modelData: computed.alias('model.entity'),
exportDataLocations: computed('model.data', 'startDate', function (){
return this.get("model.locationsData").filter(item => item.year === this.get("startDate"));
}),
updateCategoriesObject: function (index, attr) {
var temp = this.get('categoriesObject').objectAt(index);
let newValue = !_.get(temp, attr);
if(attr === "hide"){
if(newValue === true){
this.get('categoriesObject').map((item, index_item) =>{
var temp = this.get('categoriesObject').objectAt(index_item);
if(index_item === index){
set(temp, "hide", true);
set(temp, "isolate", false);
set(temp, "icon_color", "#292A48");
}
else{
set(temp, "isolate", false);
}
});
}
else{
this.get('categoriesObject').map((item, index_item) =>{
var temp = this.get('categoriesObject').objectAt(index_item);
if(index_item === index){
set(temp, "hide", false);
set(temp, "isolate", false);
set(temp, "icon_color", "#FFFFFF");
}
else{
set(temp, "isolate", false);
}
});
}
}
else if(attr === "isolate"){
if(newValue === true){
this.get('categoriesObject').map((item, index_item) =>{
var temp = this.get('categoriesObject').objectAt(index_item);
if(index_item !== index){
set(temp, "isolate", false);
set(temp, "hide", true);
set(temp, "icon_color", "#292A48");
}
else{
set(temp, "isolate", true);
set(temp, "hide", false);
set(temp, "icon_color", "#FFFFFF");
}
});
}
else{
this.get('categoriesObject').map((item, index_item) =>{
var temp = this.get('categoriesObject').objectAt(index_item);
set(temp, "isolate", false);
set(temp, "hide", false);
set(temp, "icon_color", "#FFFFFF");
});
}
}
var categoriesFilter = [];
for(let category of this.get('categoriesObject')) {
var isolate = category.isolate;
var hide = category.hide;
if(isolate === true){
categoriesFilter = [category.color];
break;
}
if(hide === false){
categoriesFilter.push(category.color);
}
}
d3.selectAll("circle").style("fill", "#fff");
categoriesFilter.map(color => {
d3.selectAll("circle").filter(function(d){return d.node.color === color}).style("fill", color);
})
//console.log(categoriesFilter)
//this.set("categoriesFilterList", categoriesFilter);
//this.set("treemapService.filter_update", new Date())
//this.set("treemapService.filter_updated_data", updatedData)
//this.set("updatedData", updatedData);
},
actions: {
setStartYear(){
var year = parseInt($("#selectYear").val());
this.set('startDate', year);
this.set('endDate', year);
},
decreaseYear() {
var firstYear = parseInt(this.get("firstYear"))
var endDate = parseInt(this.get("endDate"))
if(endDate-1 >= firstYear){
this.set("startDate", endDate-1);
this.set("endDate", endDate-1);
$("#selectYear").val(endDate-1);
}
},
increaseYear() {
var lastYear = parseInt(this.get("lastYear"))
var endDate = parseInt(this.get("endDate"))
if(endDate+1 <= lastYear){
this.set("startDate", endDate+1);
this.set("endDate", endDate+1);
$("#selectYear").val(endDate+1);
}
}
}
});
|
from express.properties.scalar import ScalarProperty
class SurfaceEnergy(ScalarProperty):
def __init__(self, name, parser, *args, **kwargs):
super(SurfaceEnergy, self).__init__(name, parser, *args, **kwargs)
self.value = kwargs["value"]
|
#pragma once
#include <unordered_map>
#include <array>
#include "Texture.h"
class TextureAtlas : public Texture
{
public:
TextureAtlas(const std::string& filename, bool mipmaps = false, bool flipped = true);
~TextureAtlas();
inline void addItem(const std::string& name, unsigned int x, unsigned int y)
{
items_coords[name] = { x, y };
items_positions[name] = calculateGridPos(x, y);
}
inline const std::array<unsigned int, 2> getItemCoords(const std::string& name) const
{
auto item = items_coords.find(name);
if (item != items_coords.end())
return item->second;
return std::array<unsigned int, 2>();
}
inline const std::array<float, 4> getItemPosition(const std::string& name) const
{
auto item = items_positions.find(name);
if (item != items_positions.end())
return item->second;
return std::array<float, 4>();
}
private:
inline const std::array<float, 4> calculateGridPos(unsigned int x, unsigned int y)
{
int w = getWidth(),
h = getHeight();
std::array<float, 2> pos = {
size[0] * y,
size[1] * x
};
pos[0] += offset[0] * y;
pos[1] += offset[1] * x;
return {
pos[0] / w,
pos[1] / h,
(pos[0] + size[0]) / w,
(pos[1] + size[1]) / h
};
}
std::array<float, 2> offset;
std::array<float, 2> size;
std::unordered_map<std::string, std::array<unsigned int, 2>> items_coords;
std::unordered_map<std::string, std::array<float, 4>> items_positions;
};
|
/**
* @license
* Copyright (c) 2014, 2021, Oracle and/or its affiliates.
* Licensed under The Universal Permissive License (UPL), Version 1.0
* as shown at https://oss.oracle.com/licenses/upl/
* @ignore
*/
define(['ojs/ojlogger'],
function(Logger)
{
"use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var ConverterUtils = function ConverterUtils() {
_classCallCheck(this, ConverterUtils);
};
ConverterUtils.getConverterInstance = function (converterOption) {
var cTypeStr = '';
var cOptions = {};
var converterInstance = null;
if (converterOption) {
if (_typeof(converterOption) === 'object') {
// TODO: Should we check that it duck types Converter?
if (converterOption.parse && typeof converterOption.parse === 'function' || converterOption.format && typeof converterOption.format === 'function') {
// we are dealing with a converter instance
converterInstance = converterOption;
} else {
// check if there is a type set
cTypeStr = converterOption.type;
cOptions = converterOption.options || {};
}
}
if (!converterInstance) {
// either we have an object literal or just plain string.
cTypeStr = cTypeStr || converterOption;
if (cTypeStr && typeof cTypeStr === 'string') {
// if we are passed a string get registered type.
if (oj.Validation && oj.Validation.converterFactory) {
var cf = oj.Validation.converterFactory(cTypeStr);
return cf.createConverter(cOptions);
} else {
Logger.error('oj.Validation.converterFactory is not available and it is needed to support the deprecated json format for the converters property. Please include the backward compatibility "ojvalidation-base" module.');
}
}
}
}
return converterInstance;
};
;return ConverterUtils;
});
|
import { camelizeKeys } from './util';
import { mapValues, mapKeys } from 'lodash';
const fixture = {
camelCaseKey: 'lorem',
null_value: null,
snake_case_key: 'ipsum',
'kebab-case-key': 'dolor',
PascalCaseKey: 'sit',
'Strangely-formatted_KeyName': 'Strangely-formatted_KeyValue',
array_with_objects: [
{ snake_case: 'snake_case' },
{ 'kebab-case': 'kebab-case' },
],
camel_case_subgroup: {
number_key: 1,
letter_key: 'a',
array_key: [0, 1, 2],
object_key: {
'kebab-case-key': 'value',
},
},
};
it('keeps camelCase keys intact', () => {
expect(camelizeKeys(fixture)).toHaveProperty('camelCaseKey');
});
it('converts snake_case keys to camelCase', () => {
expect(camelizeKeys(fixture)).toHaveProperty('snakeCaseKey', 'ipsum');
});
it('converts kebab-case keys to camelCase', () => {
expect(camelizeKeys(fixture)).toHaveProperty('kebabCaseKey', 'dolor');
});
it('converts PascalCase keys to camelCase', () => {
expect(camelizeKeys(fixture)).toHaveProperty('pascalCaseKey', 'sit');
});
it('converts Stragely-formatted_KeyNames to camelCase', () => {
expect(camelizeKeys(fixture)).toHaveProperty(
'strangelyFormattedKeyName',
'Strangely-formatted_KeyValue'
);
});
it('converts array items', () => {
const camelizedArray = camelizeKeys(fixture).arrayWithObjects;
expect(camelizedArray[0]).toHaveProperty('snakeCase', 'snake_case');
expect(camelizedArray[1]).toHaveProperty('kebabCase', 'kebab-case');
});
it('converts nested keys', () => {
expect(camelizeKeys(fixture).camelCaseSubgroup).toHaveProperty('numberKey');
});
it('converts deeply nested keys', () => {
const { camelCaseSubgroup } = camelizeKeys(fixture);
expect(camelCaseSubgroup.objectKey).toHaveProperty('kebabCaseKey');
});
describe('values', () => {
it('keeps strings intact', () => {
expect(camelizeKeys(fixture).camelCaseKey).toBe('lorem');
});
it('keeps numbers intact', () => {
const { camelCaseSubgroup } = camelizeKeys(fixture);
expect(camelCaseSubgroup.numberKey).toBe(1);
});
it('keeps arrays intact', () => {
expect(Array.isArray(camelizeKeys(fixture).arrayWithObjects)).toBeTruthy();
});
it('keeps null values intact', () => {
expect(camelizeKeys(fixture).nullValue).toBeNull();
});
});
|
import React from "react";
const Home = () => {
return (
<>
<h1>This is the home component!</h1>
<p>Also known as the landing page</p>
</>
);
};
export default Home;
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
function main(project, region) {
// [START compute_v1_generated_TargetPools_List_async]
/**
* TODO(developer): Uncomment these variables before running the sample.
*/
/**
* A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`. For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`. You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels. To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
*/
// const filter = 'abc123'
/**
* The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
*/
// const maxResults = 1234
/**
* Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by `name` or `creationTimestamp desc` is supported.
*/
// const orderBy = 'abc123'
/**
* Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
*/
// const pageToken = 'abc123'
/**
* Project ID for this request.
*/
// const project = 'my-project'
/**
* Name of the region scoping this request.
*/
// const region = 'us-central1'
/**
* Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
*/
// const returnPartialSuccess = true
// Imports the Compute library
const {TargetPoolsClient} = require('@google-cloud/compute').v1;
// Instantiates a client
const computeClient = new TargetPoolsClient();
async function callList() {
// Construct request
const request = {
project,
region,
};
// Run request
const iterable = await computeClient.listAsync(request);
for await (const response of iterable) {
console.log(response);
}
}
callList();
// [END compute_v1_generated_TargetPools_List_async]
}
process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
main(...process.argv.slice(2));
|
import stylelint from 'stylelint';
import ruleName from './rule-name';
export default stylelint.utils.ruleMessages(ruleName, {
unexpectedProp(physicalProperty, logicalProperty) {
return `Unexpected "${physicalProperty}" property. Use "${logicalProperty}".`;
},
unexpectedValue(property, physicalValue, logicalValue) {
return `Unexpected "${physicalValue}" value in "${property}" property. Use "${logicalValue}".`;
}
});
|
from . import sale_currency
from . import pricelist
from . import multico
|
# Licensed to Modin Development Team under one or more contributor license agreements.
# See the NOTICE file distributed with this work for additional information regarding
# copyright ownership. The Modin Development Team licenses this file to you under the
# Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
The module contains the functionality that is used when benchmarking Modin commits.
In the case of using utilities from the main Modin code, there is a chance that when
benchmarking old commits, the utilities changed, which in turn can unexpectedly affect
the performance results, hence some utility functions are duplicated here.
"""
import os
import logging
import modin.pandas as pd
import pandas
import numpy as np
import uuid
from typing import Optional, Union
RAND_LOW = 0
RAND_HIGH = 100
random_state = np.random.RandomState(seed=42)
try:
from modin.config import NPartitions
NPARTITIONS = NPartitions.get()
except ImportError:
NPARTITIONS = pd.DEFAULT_NPARTITIONS
try:
from modin.config import TestDatasetSize, AsvImplementation, Engine
ASV_USE_IMPL = AsvImplementation.get()
ASV_DATASET_SIZE = TestDatasetSize.get() or "Small"
ASV_USE_ENGINE = Engine.get()
except ImportError:
# The same benchmarking code can be run for different versions of Modin, so in
# case of an error importing important variables, we'll just use predefined values
ASV_USE_IMPL = os.environ.get("MODIN_ASV_USE_IMPL", "modin")
ASV_DATASET_SIZE = os.environ.get("MODIN_TEST_DATASET_SIZE", "Small")
ASV_USE_ENGINE = os.environ.get("MODIN_ENGINE", "Ray")
ASV_USE_IMPL = ASV_USE_IMPL.lower()
ASV_DATASET_SIZE = ASV_DATASET_SIZE.lower()
ASV_USE_ENGINE = ASV_USE_ENGINE.lower()
assert ASV_USE_IMPL in ("modin", "pandas")
assert ASV_DATASET_SIZE in ("big", "small")
assert ASV_USE_ENGINE in ("ray", "dask", "python")
BINARY_OP_DATA_SIZE = {
"big": [
((5000, 5000), (5000, 5000)),
# the case extremely inefficient
# ((20, 500_000), (10, 1_000_000)),
((500_000, 20), (1_000_000, 10)),
],
"small": [
((250, 250), (250, 250)),
((20, 10_000), (10, 25_000)),
((10_000, 20), (25_000, 10)),
],
}
UNARY_OP_DATA_SIZE = {
"big": [
(5000, 5000),
# the case extremely inefficient
# (10, 1_000_000),
(1_000_000, 10),
],
"small": [
(250, 250),
(10, 10_000),
(10_000, 10),
],
}
GROUPBY_NGROUPS = {
"big": [100, "huge_amount_groups"],
"small": [5],
}
IMPL = {
"modin": pd,
"pandas": pandas,
}
def translator_groupby_ngroups(groupby_ngroups: Union[str, int], shape: tuple) -> int:
"""
Translate a string representation of the number of groups, into a number.
Parameters
----------
groupby_ngroups : str or int
Number of groups that will be used in `groupby` operation.
shape : tuple
Same as pandas.Dataframe.shape.
Returns
-------
int
"""
if ASV_DATASET_SIZE == "big":
if groupby_ngroups == "huge_amount_groups":
return min(shape[0] // 2, 5000)
return groupby_ngroups
else:
return groupby_ngroups
class weakdict(dict): # noqa: GL08
__slots__ = ("__weakref__",)
data_cache = dict()
dataframes_cache = dict()
def gen_int_data(nrows: int, ncols: int, rand_low: int, rand_high: int) -> dict:
"""
Generate int data with caching.
The generated data are saved in the dictionary and on a subsequent call,
if the keys match, saved data will be returned. Therefore, we need
to carefully monitor the changing of saved data and make its copy if needed.
Parameters
----------
nrows : int
Number of rows.
ncols : int
Number of columns.
rand_low : int
Low bound for random generator.
rand_high : int
High bound for random generator.
Returns
-------
dict
Number of keys - `ncols`, each of them store np.ndarray of `nrows` length.
"""
cache_key = ("int", nrows, ncols, rand_low, rand_high)
if cache_key in data_cache:
return data_cache[cache_key]
logging.info(
"Generating int data {} rows and {} columns [{}-{}]".format(
nrows, ncols, rand_low, rand_high
)
)
data = {
"col{}".format(i): random_state.randint(rand_low, rand_high, size=(nrows))
for i in range(ncols)
}
data_cache[cache_key] = weakdict(data)
return data
def gen_str_int_data(nrows: int, ncols: int, rand_low: int, rand_high: int) -> dict:
"""
Generate int data and string data with caching.
The generated data are saved in the dictionary and on a subsequent call,
if the keys match, saved data will be returned. Therefore, we need
to carefully monitor the changing of saved data and make its copy if needed.
Parameters
----------
nrows : int
Number of rows.
ncols : int
Number of columns.
rand_low : int
Low bound for random generator.
rand_high : int
High bound for random generator.
Returns
-------
dict
Number of keys - `ncols`, each of them store np.ndarray of `nrows` length.
One of the columns with string values.
"""
cache_key = ("str_int", nrows, ncols, rand_low, rand_high)
if cache_key in data_cache:
return data_cache[cache_key]
logging.info(
"Generating str_int data {} rows and {} columns [{}-{}]".format(
nrows, ncols, rand_low, rand_high
)
)
data = gen_int_data(nrows, ncols, rand_low, rand_high).copy()
# convert values in arbitary column to string type
key = list(data.keys())[0]
data[key] = [f"str_{x}" for x in data[key]]
data_cache[cache_key] = weakdict(data)
return data
def gen_data(
data_type: str, nrows: int, ncols: int, rand_low: int, rand_high: int
) -> dict:
"""
Generate data with caching.
The generated data are saved in the dictionary and on a subsequent call,
if the keys match, saved data will be returned. Therefore, we need
to carefully monitor the changing of saved data and make its copy if needed.
Parameters
----------
data_type : {"int", "str_int"}
Type of data generation.
nrows : int
Number of rows.
ncols : int
Number of columns.
rand_low : int
Low bound for random generator.
rand_high : int
High bound for random generator.
Returns
-------
dict
Number of keys - `ncols`, each of them store np.ndarray of `nrows` length.
When `data_type`=="str_int" some of the columns will be of string type.
"""
if data_type == "int":
return gen_int_data(nrows, ncols, rand_low, rand_high)
elif data_type == "str_int":
return gen_str_int_data(nrows, ncols, rand_low, rand_high)
else:
assert False
def generate_dataframe(
impl: str,
data_type: str,
nrows: int,
ncols: int,
rand_low: int,
rand_high: int,
groupby_ncols: Optional[int] = None,
count_groups: Optional[int] = None,
) -> Union[pd.DataFrame, pandas.DataFrame]:
"""
Generate DataFrame with caching.
The generated dataframes are saved in the dictionary and on a subsequent call,
if the keys match, one of the saved dataframes will be returned. Therefore, we need
to carefully monitor that operations that change the dataframe work with its copy.
Parameters
----------
impl : str
Implementation used to create the dataframe;
supported implemetations: {"modin", "pandas"}.
data_type : str
Type of data generation;
supported types: {"int", "str_int"}.
nrows : int
Number of rows.
ncols : int
Number of columns.
rand_low : int
Low bound for random generator.
rand_high : int
High bound for random generator.
groupby_ncols : int, default: None
Number of columns for which `groupby` will be called in the future;
to get more stable performance results, we need to have the same number of values
in each group every benchmarking time.
count_groups : int, default: None
Count of groups in groupby columns.
Returns
-------
modin.pandas.DataFrame or pandas.DataFrame [and list]
Notes
-----
the list of groupby columns names returns when groupby columns are generated
"""
assert not (
(groupby_ncols is None) ^ (count_groups is None)
), "You must either specify both parameters 'groupby_ncols' and 'count_groups' or none of them."
if groupby_ncols and count_groups:
ncols -= groupby_ncols
cache_key = (
impl,
data_type,
nrows,
ncols,
rand_low,
rand_high,
groupby_ncols,
count_groups,
)
if cache_key in dataframes_cache:
return dataframes_cache[cache_key]
logging.info(
"Allocating {} DataFrame {}: {} rows and {} columns [{}-{}]".format(
impl, data_type, nrows, ncols, rand_low, rand_high
)
)
data = gen_data(data_type, nrows, ncols, rand_low, rand_high)
if groupby_ncols and count_groups:
groupby_columns = [f"groupby_col{x}" for x in range(groupby_ncols)]
for groupby_col in groupby_columns:
data[groupby_col] = np.tile(np.arange(count_groups), nrows // count_groups)
if impl == "modin":
df = pd.DataFrame(data)
elif impl == "pandas":
df = pandas.DataFrame(data)
else:
assert False
if groupby_ncols and count_groups:
dataframes_cache[cache_key] = df, groupby_columns
return df, groupby_columns
dataframes_cache[cache_key] = df
return df
def random_string() -> str:
"""
Create a 36-character random string.
Returns
-------
str
"""
return str(uuid.uuid4())
def random_columns(df_columns: list, columns_number: int) -> list:
"""
Pick sublist of random columns from a given sequence.
Parameters
----------
df_columns : list
Columns to choose from.
columns_number : int
How many columns to pick.
Returns
-------
list
"""
return list(random_state.choice(df_columns, size=columns_number))
def random_booleans(number: int) -> list:
"""
Create random list of booleans with `number` elements.
Parameters
----------
number : int
Count of booleans in result list.
Returns
-------
list
"""
return list(random_state.choice([True, False], size=number))
def execute(df: Union[pd.DataFrame, pandas.DataFrame]):
"""
Make sure the calculations are finished.
Parameters
----------
df : modin.pandas.DataFrame or pandas.Datarame
"""
if ASV_USE_IMPL == "modin":
partitions = df._query_compiler._modin_frame._partitions
all(
map(
lambda partition: partition.drain_call_queue() or True,
partitions.flatten(),
)
)
if ASV_USE_ENGINE == "ray":
from ray import wait
all(map(lambda partition: wait([partition.oid]), partitions.flatten()))
elif ASV_USE_ENGINE == "dask":
from dask.distributed import wait
all(map(lambda partition: wait(partition.future), partitions.flatten()))
elif ASV_USE_ENGINE == "python":
pass
elif ASV_USE_IMPL == "pandas":
pass
def get_shape_id(shape: tuple) -> str:
"""
Join shape numbers into a string with `_` delimiters.
Parameters
----------
shape : tuple
Same as pandas.Dataframe.shape.
Returns
-------
str
"""
return "_".join([str(element) for element in shape])
|
from collections import OrderedDict
from functools import partial
from django.forms.fields import Field
def patch_document(function, instance):
setattr(instance, function.__name__, partial(function, instance))
def get_declared_fields(bases, attrs, with_base_fields=True):
"""
Create a list of form field instances from the passed in 'attrs', plus any
similar fields on the base classes (in 'bases'). This is used by both the
Form and ModelForm metaclasses.
If 'with_base_fields' is True, all fields from the bases are used.
Otherwise, only fields in the 'declared_fields' attribute on the bases are
used. The distinction is useful in ModelForm subclassing.
Also integrates any additional media definitions.
"""
fields = [
(field_name, attrs.pop(field_name))
for field_name, obj in list(attrs.items()) if isinstance(obj, Field)
]
fields.sort(key=lambda x: x[1].creation_counter)
# If this class is subclassing another Form, add that Form's fields.
# Note that we loop over the bases in *reverse*. This is necessary in
# order to preserve the correct order of fields.
if with_base_fields:
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = list(base.base_fields.items()) + fields
else:
for base in bases[::-1]:
if hasattr(base, 'declared_fields'):
fields = list(base.declared_fields.items()) + fields
return OrderedDict(fields)
|
/* Autogenerated with Kurento Idl */
/*
* (C) Copyright 2013-2015 Kurento (http://kurento.org/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Media API for the Kurento Web SDK
*
* @module pointerdetector/complexTypes
*
* @copyright 2013-2015 Kurento (http://kurento.org/)
* @license ALv2
*/
var PointerDetectorWindowMediaParam = require('./PointerDetectorWindowMediaParam');
var WindowParam = require('./WindowParam');
exports.PointerDetectorWindowMediaParam = PointerDetectorWindowMediaParam;
exports.WindowParam = WindowParam;
|
define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source"], function(Tone){
"use strict";
/**
* @class Tone.Player is an audio file player with start, loop, and stop functions.
*
* @constructor
* @extends {Tone.Source}
* @param {string|AudioBuffer} url Either the AudioBuffer or the url from
* which to load the AudioBuffer
* @param {function=} onload The function to invoke when the buffer is loaded.
* Recommended to use Tone.Buffer.onload instead.
* @example
* var player = new Tone.Player("./path/to/sample.mp3").toMaster();
* Tone.Buffer.onload = function(){
* player.start();
* }
*/
Tone.Player = function(url){
var options;
if (url instanceof Tone.Buffer){
url = url.get();
options = Tone.Player.defaults;
} else {
options = this.optionsObject(arguments, ["url", "onload"], Tone.Player.defaults);
}
Tone.Source.call(this, options);
/**
* @private
* @type {AudioBufferSourceNode}
*/
this._source = null;
/**
* If the file should play as soon
* as the buffer is loaded.
* @type {boolean}
* @example
* //will play as soon as it's loaded
* var player = new Tone.Player({
* "url" : "./path/to/sample.mp3",
* "autostart" : true,
* }).toMaster();
*/
this.autostart = options.autostart;
/**
* the buffer
* @private
* @type {Tone.Buffer}
*/
this._buffer = new Tone.Buffer({
"url" : options.url,
"onload" : this._onload.bind(this, options.onload),
"reverse" : options.reverse
});
if (url instanceof AudioBuffer){
this._buffer.set(url);
}
/**
* if the buffer should loop once it's over
* @type {boolean}
* @private
*/
this._loop = options.loop;
/**
* if 'loop' is true, the loop will start at this position
* @type {Time}
* @private
*/
this._loopStart = options.loopStart;
/**
* if 'loop' is true, the loop will end at this position
* @type {Time}
* @private
*/
this._loopEnd = options.loopEnd;
/**
* the playback rate
* @private
* @type {number}
*/
this._playbackRate = options.playbackRate;
/**
* Enabling retrigger will allow a player to be restarted
* before the the previous 'start' is done playing. Otherwise,
* successive calls to Tone.Player.start will only start
* the sample if it had played all the way through.
* @type {boolean}
*/
this.retrigger = options.retrigger;
};
Tone.extend(Tone.Player, Tone.Source);
/**
* the default parameters
* @static
* @const
* @type {Object}
*/
Tone.Player.defaults = {
"onload" : Tone.noOp,
"playbackRate" : 1,
"loop" : false,
"autostart" : false,
"loopStart" : 0,
"loopEnd" : 0,
"retrigger" : false,
"reverse" : false,
};
/**
* Load the audio file as an audio buffer.
* Decodes the audio asynchronously and invokes
* the callback once the audio buffer loads.
* Note: this does not need to be called if a url
* was passed in to the constructor. Only use this
* if you want to manually load a new url.
* @param {string} url The url of the buffer to load.
* Filetype support depends on the
* browser.
* @param {function=} callback The function to invoke once
* the sample is loaded.
* @returns {Tone.Player} this
*/
Tone.Player.prototype.load = function(url, callback){
this._buffer.load(url, this._onload.bind(this, callback));
return this;
};
/**
* Internal callback when the buffer is loaded.
* @private
*/
Tone.Player.prototype._onload = function(callback){
callback(this);
if (this.autostart){
this.start();
}
};
/**
* play the buffer between the desired positions
*
* @private
* @param {Time} [startTime=now] when the player should start.
* @param {Time} [offset=0] the offset from the beginning of the sample
* to start at.
* @param {Time=} duration how long the sample should play. If no duration
* is given, it will default to the full length
* of the sample (minus any offset)
* @returns {Tone.Player} this
*/
Tone.Player.prototype._start = function(startTime, offset, duration){
if (this._buffer.loaded){
//if it's a loop the default offset is the loopstart point
if (this._loop){
offset = this.defaultArg(offset, this._loopStart);
} else {
//otherwise the default offset is 0
offset = this.defaultArg(offset, 0);
}
offset = this.toSeconds(offset);
duration = this.defaultArg(duration, this._buffer.duration - offset);
//the values in seconds
startTime = this.toSeconds(startTime);
duration = this.toSeconds(duration);
//make the source
this._source = this.context.createBufferSource();
this._source.buffer = this._buffer.get();
//set the looping properties
if (this._loop){
this._source.loop = this._loop;
this._source.loopStart = this.toSeconds(this._loopStart);
this._source.loopEnd = this.toSeconds(this._loopEnd);
} else {
//if it's not looping, set the state change at the end of the sample
this._state.setStateAtTime(Tone.State.Stopped, startTime + duration);
}
//and other properties
this._source.playbackRate.value = this._playbackRate;
this._source.connect(this.output);
//start it
if (this._loop){
this._source.start(startTime, offset);
} else {
this._source.start(startTime, offset, duration);
}
} else {
throw Error("tried to start Player before the buffer was loaded");
}
return this;
};
/**
* Stop playback.
* @private
* @param {Time} [time=now]
* @returns {Tone.Player} this
*/
Tone.Player.prototype._stop = function(time){
if (this._source){
this._source.stop(this.toSeconds(time));
this._source = null;
}
return this;
};
/**
* Set the loop start and end. Will only loop if loop is
* set to true.
* @param {Time} loopStart The loop end time
* @param {Time} loopEnd The loop end time
* @returns {Tone.Player} this
* @example
* //loop 0.1 seconds of the file.
* player.setLoopPoints(0.2, 0.3);
* player.loop = true;
*/
Tone.Player.prototype.setLoopPoints = function(loopStart, loopEnd){
this.loopStart = loopStart;
this.loopEnd = loopEnd;
return this;
};
/**
* If loop is true, the loop will start at this position.
* @memberOf Tone.Player#
* @type {Time}
* @name loopStart
*/
Object.defineProperty(Tone.Player.prototype, "loopStart", {
get : function(){
return this._loopStart;
},
set : function(loopStart){
this._loopStart = loopStart;
if (this._source){
this._source.loopStart = this.toSeconds(loopStart);
}
}
});
/**
* If loop is true, the loop will end at this position.
* @memberOf Tone.Player#
* @type {Time}
* @name loopEnd
*/
Object.defineProperty(Tone.Player.prototype, "loopEnd", {
get : function(){
return this._loopEnd;
},
set : function(loopEnd){
this._loopEnd = loopEnd;
if (this._source){
this._source.loopEnd = this.toSeconds(loopEnd);
}
}
});
/**
* The audio buffer belonging to the player.
* @memberOf Tone.Player#
* @type {Tone.Buffer}
* @name buffer
*/
Object.defineProperty(Tone.Player.prototype, "buffer", {
get : function(){
return this._buffer;
},
set : function(buffer){
this._buffer.set(buffer);
}
});
/**
* If the buffer should loop once it's over.
* @memberOf Tone.Player#
* @type {boolean}
* @name loop
*/
Object.defineProperty(Tone.Player.prototype, "loop", {
get : function(){
return this._loop;
},
set : function(loop){
this._loop = loop;
if (this._source){
this._source.loop = loop;
}
}
});
/**
* The playback speed. 1 is normal speed. This is not a signal because
* Safari and iOS currently don't support playbackRate as a signal.
* @memberOf Tone.Player#
* @type {number}
* @name playbackRate
*/
Object.defineProperty(Tone.Player.prototype, "playbackRate", {
get : function(){
return this._playbackRate;
},
set : function(rate){
this._playbackRate = rate;
if (this._source) {
this._source.playbackRate.value = rate;
}
}
});
/**
* The direction the buffer should play in
* @memberOf Tone.Player#
* @type {boolean}
* @name reverse
*/
Object.defineProperty(Tone.Player.prototype, "reverse", {
get : function(){
return this._buffer.reverse;
},
set : function(rev){
this._buffer.reverse = rev;
}
});
/**
* Dispose and disconnect.
* @return {Tone.Player} this
*/
Tone.Player.prototype.dispose = function(){
Tone.Source.prototype.dispose.call(this);
if (this._source !== null){
this._source.disconnect();
this._source = null;
}
this._buffer.dispose();
this._buffer = null;
return this;
};
return Tone.Player;
});
|
# %%
"""
<table class="ee-notebook-buttons" align="left">
<td><a target="_blank" href="https://github.com/giswqs/earthengine-py-notebooks/tree/master/Join/intersect.ipynb"><img width=32px src="https://www.tensorflow.org/images/GitHub-Mark-32px.png" /> View source on GitHub</a></td>
<td><a target="_blank" href="https://nbviewer.jupyter.org/github/giswqs/earthengine-py-notebooks/blob/master/Join/intersect.ipynb"><img width=26px src="https://upload.wikimedia.org/wikipedia/commons/thumb/3/38/Jupyter_logo.svg/883px-Jupyter_logo.svg.png" />Notebook Viewer</a></td>
<td><a target="_blank" href="https://colab.research.google.com/github/giswqs/earthengine-py-notebooks/blob/master/Join/intersect.ipynb"><img src="https://www.tensorflow.org/images/colab_logo_32px.png" /> Run in Google Colab</a></td>
</table>
"""
# %%
"""
## Install Earth Engine API and geemap
Install the [Earth Engine Python API](https://developers.google.com/earth-engine/python_install) and [geemap](https://github.com/giswqs/geemap). The **geemap** Python package is built upon the [ipyleaflet](https://github.com/jupyter-widgets/ipyleaflet) and [folium](https://github.com/python-visualization/folium) packages and implements several methods for interacting with Earth Engine data layers, such as `Map.addLayer()`, `Map.setCenter()`, and `Map.centerObject()`.
The following script checks if the geemap package has been installed. If not, it will install geemap, which automatically installs its [dependencies](https://github.com/giswqs/geemap#dependencies), including earthengine-api, folium, and ipyleaflet.
**Important note**: A key difference between folium and ipyleaflet is that ipyleaflet is built upon ipywidgets and allows bidirectional communication between the front-end and the backend enabling the use of the map to capture user input, while folium is meant for displaying static data only ([source](https://blog.jupyter.org/interactive-gis-in-jupyter-with-ipyleaflet-52f9657fa7a)). Note that [Google Colab](https://colab.research.google.com/) currently does not support ipyleaflet ([source](https://github.com/googlecolab/colabtools/issues/60#issuecomment-596225619)). Therefore, if you are using geemap with Google Colab, you should use [`import geemap.eefolium`](https://github.com/giswqs/geemap/blob/master/geemap/eefolium.py). If you are using geemap with [binder](https://mybinder.org/) or a local Jupyter notebook server, you can use [`import geemap`](https://github.com/giswqs/geemap/blob/master/geemap/geemap.py), which provides more functionalities for capturing user input (e.g., mouse-clicking and moving).
"""
# %%
# Installs geemap package
import subprocess
try:
import geemap
except ImportError:
print('geemap package not installed. Installing ...')
subprocess.check_call(["python", '-m', 'pip', 'install', 'geemap'])
# Checks whether this notebook is running on Google Colab
try:
import google.colab
import geemap.eefolium as emap
except:
import geemap as emap
# Authenticates and initializes Earth Engine
import ee
try:
ee.Initialize()
except Exception as e:
ee.Authenticate()
ee.Initialize()
# %%
"""
## Create an interactive map
The default basemap is `Google Satellite`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/geemap.py#L13) can be added using the `Map.add_basemap()` function.
"""
# %%
Map = emap.Map(center=[40,-100], zoom=4)
Map.add_basemap('ROADMAP') # Add Google Map
Map
# %%
"""
## Add Earth Engine Python script
"""
# %%
# Add Earth Engine dataset
def intersect(state):
nPowerPlants = ee.List(state.get('power_plants')).size()
# Return the state feature with a new property: power plant count.
return state.set('n_power_plants', nPowerPlants)
# Load the primary 'collection': US state boundaries.
states = ee.FeatureCollection('TIGER/2018/States')
# Load the secondary 'collection': power plants.
powerPlants = ee.FeatureCollection('WRI/GPPD/power_plants')
# Define a spatial filter as geometries that intersect.
spatialFilter = ee.Filter.intersects(**{
'leftField': '.geo',
'rightField': '.geo',
'maxError': 10
})
# Define a save all join.
saveAllJoin = ee.Join.saveAll(**{
'matchesKey': 'power_plants',
})
# Apply the join.
intersectJoined = saveAllJoin.apply(states, powerPlants, spatialFilter)
# Add power plant count per state as a property.
intersectJoined = intersectJoined.map(intersect)
# intersectJoined = intersectJoined.map(function(state) {
# # Get "power_plant" intersection list, count how many intersected this state.
# nPowerPlants = ee.List(state.get('power_plants')).size()
# # Return the state feature with a new property: power plant count.
# return state.set('n_power_plants', nPowerPlants)
# })
print(intersectJoined.getInfo())
# # Make a bar chart for the number of power plants per state.
# chart = ui.Chart.feature.byFeature(intersectJoined, 'NAME', 'n_power_plants') \
# .setChartType('ColumnChart') \
# .setSeriesNames({n_power_plants: 'Power plants'}) \
# .setOptions({
# title: 'Power plants per state',
# hAxis: {title: 'State'},
# vAxis: {title: 'Frequency'}})
# # Print the chart to the console.
# print(chart)
# %%
"""
## Display Earth Engine data layers
"""
# %%
Map.addLayerControl() # This line is not needed for ipyleaflet-based Map.
Map
|
// blogslider start
$(".blogslid").slick({
dots: true,
arrows: false,
autoplay: true,
infinite: false,
speed: 300,
slidesToShow: 1,
slidesToScroll: 1,
responsive: [
{
breakpoint: 1024,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
infinite: true,
dots: true,
},
},
{
breakpoint: 600,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
},
},
{
breakpoint: 480,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
},
},
],
});
// blogslider end
// product slider jas start
$(".slider-for").slick({
slidesToShow: 1,
slidesToScroll: 1,
arrows: false,
fade: true,
asNavFor: ".slider-nav",
});
$(".slider-nav").slick({
slidesToShow: 3,
slidesToScroll: 1,
asNavFor: ".slider-for",
dots: true,
centerMode: true,
focusOnSelect: true,
});
// product slider jas end
// simple slick slider start
$(".regular").slick({
dots: true,
infinite: true,
speed: 300,
autoplay: true,
slidesToShow: 3,
slidesToScroll: 3,
});
// simple slick slider end
// wow animation js
$(function () {
new WOW().init();
});
// responsive menu js
$(function () {
$("#menu").slicknav();
});
function openNav() {
document.getElementById("myNav").style.width = "100%";
}
function closeNav() {
document.getElementById("myNav").style.width = "0%";
}
var btn = $("#button");
$(window).scroll(function () {
if ($(window).scrollTop() > 800) {
btn.addClass("show");
} else {
btn.removeClass("show");
}
});
btn.on("click", function (e) {
e.preventDefault();
$("html, body").animate({ scrollTop: 0 }, "800");
});
// Product Quaintity Js
(function () {
"use strict";
var jQueryPlugin = (window.jQueryPlugin = function (ident, func) {
return function (arg) {
if (this.length > 1) {
this.each(function () {
var $this = $(this);
if (!$this.data(ident)) {
$this.data(ident, func($this, arg));
}
});
return this;
} else if (this.length === 1) {
if (!this.data(ident)) {
this.data(ident, func(this, arg));
}
return this.data(ident);
}
};
});
})();
(function () {
"use strict";
function Guantity($root) {
const element = $root;
const quantity = $root.first("data-quantity");
const quantity_target = $root.find("[data-quantity-target]");
const quantity_minus = $root.find("[data-quantity-minus]");
const quantity_plus = $root.find("[data-quantity-plus]");
var quantity_ = quantity_target.val();
$(quantity_minus).click(function () {
quantity_target.val(--quantity_);
});
$(quantity_plus).click(function () {
quantity_target.val(++quantity_);
});
}
$.fn.Guantity = jQueryPlugin("Guantity", Guantity);
$("[data-quantity]").Guantity();
})();
// slider product
// blogslider start
$(".blogslider").slick({
dots: false,
arrows: false,
autoplay: true,
infinite: false,
speed: 300,
slidesToShow: 1,
slidesToScroll: 1,
responsive: [
{
breakpoint: 1024,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
infinite: true,
dots: true,
},
},
{
breakpoint: 600,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
},
},
{
breakpoint: 480,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
},
},
],
});
// blogslider end
|
var assert = require('assert');
var R = require('..');
describe('substringTo', function() {
it('returns the trailing substring of a string', function() {
assert.strictEqual(R.substringTo(8, 'abcdefghijklm'), 'abcdefgh');
});
it('is automatically curried', function() {
var through8 = R.substringTo(8);
assert.strictEqual(through8('abcdefghijklm'), 'abcdefgh');
});
});
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_DRM_DISPLAY_COMPOSITION_H_
#define ANDROID_DRM_DISPLAY_COMPOSITION_H_
#include "drmcrtc.h"
#include "drmhwcomposer.h"
#include "drmplane.h"
#include <sstream>
#include <vector>
#include <hardware/gralloc.h>
#include <hardware/hardware.h>
#include <hardware/hwcomposer.h>
namespace android {
class Importer;
class Planner;
class SquashState;
enum DrmCompositionType {
DRM_COMPOSITION_TYPE_EMPTY,
DRM_COMPOSITION_TYPE_FRAME,
DRM_COMPOSITION_TYPE_DPMS,
DRM_COMPOSITION_TYPE_MODESET,
};
struct DrmCompositionDisplayLayersMap {
int display;
bool geometry_changed = true;
std::vector<DrmHwcLayer> layers;
DrmCompositionDisplayLayersMap() = default;
DrmCompositionDisplayLayersMap(DrmCompositionDisplayLayersMap &&rhs) =
default;
};
struct DrmCompositionRegion {
std::vector<size_t> source_layers;
};
class DrmCompositionPlane {
public:
enum class Type : int32_t {
kDisable,
kLayer,
};
DrmCompositionPlane() = default;
DrmCompositionPlane(DrmCompositionPlane &&rhs) = default;
DrmCompositionPlane &operator=(DrmCompositionPlane &&other) = default;
DrmCompositionPlane(Type type, DrmPlane *plane, DrmCrtc *crtc)
: type_(type), plane_(plane), crtc_(crtc) {
}
DrmCompositionPlane(Type type, DrmPlane *plane, DrmCrtc *crtc,
size_t source_layer)
: type_(type),
plane_(plane),
crtc_(crtc),
source_layers_(1, source_layer) {
}
Type type() const {
return type_;
}
DrmPlane *plane() const {
return plane_;
}
void set_plane(DrmPlane *plane) {
plane_ = plane;
}
DrmCrtc *crtc() const {
return crtc_;
}
std::vector<size_t> &source_layers() {
return source_layers_;
}
const std::vector<size_t> &source_layers() const {
return source_layers_;
}
private:
Type type_ = Type::kDisable;
DrmPlane *plane_ = NULL;
DrmCrtc *crtc_ = NULL;
std::vector<size_t> source_layers_;
};
class DrmDisplayComposition {
public:
DrmDisplayComposition() = default;
DrmDisplayComposition(const DrmDisplayComposition &) = delete;
~DrmDisplayComposition();
int Init(DrmDevice *drm, DrmCrtc *crtc, Importer *importer, Planner *planner,
uint64_t frame_no);
int SetLayers(DrmHwcLayer *layers, size_t num_layers, bool geometry_changed);
int AddPlaneComposition(DrmCompositionPlane plane);
int AddPlaneDisable(DrmPlane *plane);
int SetDpmsMode(uint32_t dpms_mode);
int SetDisplayMode(const DrmMode &display_mode);
int Plan(std::vector<DrmPlane *> *primary_planes,
std::vector<DrmPlane *> *overlay_planes);
std::vector<DrmHwcLayer> &layers() {
return layers_;
}
std::vector<DrmCompositionPlane> &composition_planes() {
return composition_planes_;
}
bool geometry_changed() const {
return geometry_changed_;
}
uint64_t frame_no() const {
return frame_no_;
}
DrmCompositionType type() const {
return type_;
}
uint32_t dpms_mode() const {
return dpms_mode_;
}
const DrmMode &display_mode() const {
return display_mode_;
}
DrmCrtc *crtc() const {
return crtc_;
}
Importer *importer() const {
return importer_;
}
Planner *planner() const {
return planner_;
}
int take_out_fence() {
return out_fence_.Release();
}
void set_out_fence(int out_fence) {
out_fence_.Set(out_fence);
}
void Dump(std::ostringstream *out) const;
private:
bool validate_composition_type(DrmCompositionType desired);
DrmDevice *drm_ = NULL;
DrmCrtc *crtc_ = NULL;
Importer *importer_ = NULL;
Planner *planner_ = NULL;
DrmCompositionType type_ = DRM_COMPOSITION_TYPE_EMPTY;
uint32_t dpms_mode_ = DRM_MODE_DPMS_ON;
DrmMode display_mode_;
UniqueFd out_fence_ = -1;
bool geometry_changed_;
std::vector<DrmHwcLayer> layers_;
std::vector<DrmCompositionPlane> composition_planes_;
uint64_t frame_no_ = 0;
};
} // namespace android
#endif // ANDROID_DRM_DISPLAY_COMPOSITION_H_
|
#ifndef C0P_PARAM_OBJECTS_SURFER__US_0O8__SURFTIMECONST_2O75_GROUP_HOMOGENEOUS_MEMBER_AGENT_BEHAVIOUR_SENSOR_DIRECTION_ACCURATE_PARAMETERS_H
#define C0P_PARAM_OBJECTS_SURFER__US_0O8__SURFTIMECONST_2O75_GROUP_HOMOGENEOUS_MEMBER_AGENT_BEHAVIOUR_SENSOR_DIRECTION_ACCURATE_PARAMETERS_H
#pragma once
// app includes
#include "core/env/objects/object/agent/behaviour/sensor/direction/accurate/prop.h"
#include "param/parameters.h"
namespace c0p {
// Accurate direction sensor parameters
struct SurferUs0O8Surftimeconst2O75GroupHomogeneousMemberAgentBehaviourSensorDirectionAccurateParameters {
// Direction sensed
const TypeSpaceVector direction = {0.0, 1.0, 0.0};
};
}
#endif
|
/*
NSMigrationManager.h
Core Data
Copyright (c) 2004-2016, Apple Inc.
All rights reserved.
*/
#import <Foundation/NSArray.h>
#import <Foundation/NSDictionary.h>
#import <Foundation/NSError.h>
NS_ASSUME_NONNULL_BEGIN
@class NSEntityDescription;
@class NSEntityMapping;
@class NSManagedObjectContext;
@class NSManagedObject;
@class NSManagedObjectModel;
@class NSMappingModel;
@class NSMigrationContext;
API_AVAILABLE(macosx(10.5),ios(3.0))
@interface NSMigrationManager : NSObject {
}
/* Creates a migration manager instance with the corresponding source and destination models. (All validation of the arguments is performed during migrateStoreFromURL:toURL:) As with the NSPersistentStoreCoordinator, once models are added to the migration manager they are immutable and cannot be altered.*/
- (instancetype)initWithSourceModel:(NSManagedObjectModel *)sourceModel destinationModel:(NSManagedObjectModel *)destinationModel;
/* Migrates of the store at the specified source URL to the store at the destination URL, performing all of the mappings in the mapping model. A store must exist at the source URL; if a store does not exist at the destination URL, one will be created (otherwise the migration will append to the existing store.) Invoking this method will perform compatibility checks on the source and destination models (and the mapping model.) If an error occurs during the validation or migration, this method will return NO.*/
- (BOOL)migrateStoreFromURL:(NSURL *)sourceURL type:(NSString *)sStoreType options:(nullable NSDictionary *)sOptions withMappingModel:(nullable NSMappingModel *)mappings toDestinationURL:(NSURL *)dURL destinationType:(NSString *)dStoreType destinationOptions:(nullable NSDictionary *)dOptions error:(NSError **)error;
/* Tries to use a store specific migration manager to perform the store migration, note that a store specific migration manager class is not guaranteed to perform any of the migration manager delegate callbacks or update values for the observable properties.
Defaults to YES */
@property () BOOL usesStoreSpecificMigrationManager API_AVAILABLE(macosx(10.7),ios(5.0));
/* Resets the association tables for the migration. (Note this does NOT reset the source or destination contexts).*/
- (void)reset;
/* Accessors for the mapping model, source model, and destination model*/
@property (readonly, strong) NSMappingModel *mappingModel;
@property (readonly, strong) NSManagedObjectModel *sourceModel;
@property (readonly, strong) NSManagedObjectModel *destinationModel;
/* Accessors for the managed object contexts used for reading the source and destination stores. These contexts are created lazily, as part of the initialization of two Core Data stacks (one for reading, the other for writing data.) */
@property (readonly, strong) NSManagedObjectContext *sourceContext;
@property (readonly, strong) NSManagedObjectContext *destinationContext;
/* Returns the NSEntityDescription for the source and destination entities, respectively, of the entity mapping. (Entity mappings do not store the actual description objects, but rather the name and version information of the entity.)*/
- (nullable NSEntityDescription *)sourceEntityForEntityMapping:(NSEntityMapping *)mEntity;
- (nullable NSEntityDescription *)destinationEntityForEntityMapping:(NSEntityMapping *)mEntity;
/* Associates the source instance with the specified destination instance for the given entity mapping. Since the migration is performed as a three-step process (first create the data, then relate the data, then validate the data) it is necessary to be able to associate data between the source and destination stores, in order to allow for relationship creation/fixup after the creation pass. This method is called in the default
implementation of NSEntityMigrationPolicy's createDestinationInstancesForSourceInstance:entityMapping:manager:error: method.*/
- (void)associateSourceInstance:(NSManagedObject *)sourceInstance withDestinationInstance:(NSManagedObject *)destinationInstance forEntityMapping:(NSEntityMapping *)entityMapping;
/* Returns the managed object instances created in the destination store for the given entity mapping for the specified source instances.*/
- (NSArray<__kindof NSManagedObject *> *)destinationInstancesForEntityMappingNamed:(NSString *)mappingName sourceInstances:(nullable NSArray<__kindof NSManagedObject *> *)sourceInstances;
/* Returns the managed object instances in the source store used to create the specified destination instances for the given entity mapping.*/
- (NSArray<__kindof NSManagedObject *> *)sourceInstancesForEntityMappingNamed:(NSString *)mappingName destinationInstances:(nullable NSArray<__kindof NSManagedObject *> *)destinationInstances;
/* Observable property that can be used to determine progress of the migration process. Returns the current entity mapping being processed. Each entity is processed a total of three times (instance creation, relationship creation, validation)*/
@property (readonly, strong) NSEntityMapping *currentEntityMapping;
/* Observable property that can be used to determine progress of the migration process. Returns the percentage complete of the migration process. The progress value is a number from 0 to 1 indicating percent complete.*/
@property (readonly) float migrationProgress;
/* Returns/sets the user info for the migration manager*/
@property (nullable, nonatomic, strong) NSDictionary *userInfo;
/* Cancels the migration with the specified error. Calling this method causes migrateStoreFromURL:type:options:withMappingModel:toDestinationURL:destinationType:destinationOptions:error: to abort the migration and return the specified error. */
- (void)cancelMigrationWithError:(NSError *)error;
@end
NS_ASSUME_NONNULL_END
|
#!/usr/bin/env python
__version__ = '$Revision: 1.1.1.1 $'
__date__ = '$Date: 2008/07/09 14:41:05 $'
__author__ = '$Author: ttaauu $'
__credit__ = ''
import sys
import pickle
import popen2
import time
import Queue
import signal
while True:
try:
from make_sched import *
except EOFError, ValueError:
time.sleep(0.1)
else:
break
random.seed(os.environ.get('GXP_EXEC_IDX', None))
hostname = os.environ.get('GXP_HOSTNAME', socket.gethostname()) + randstr(8)
def heprint(str_):
eprint('%s: %s\n' % (hostname, str_))
class Worker:
def __init__(self):
self.__from_master_fd = os.fdopen(4, 'r')
self.__to_master_fd = os.fdopen(3, 'w')
self.__from_master_queue = Queue.Queue()
self.__from_master_eater = FdEater(self.__from_master_queue, \
self.__from_master_fd)
self.__from_master_eater.setDaemon(1)
self.__from_master_eater.start()
self.tid = None
def send_msg(self, msg):
msg.src = hostname
self.__to_master_fd.write(str((pickle.dumps(msg), )) + '\n')
self.__to_master_fd.flush()
def send_avail_msg(self):
self.tid = None
msg = Message('avail')
self.send_msg(msg)
def get_stdin(self):
line = self.__from_master_fd.readline()
if line == '':
raise RuntimeError, 'Master Dead'
try:
tuple_ = eval(line)
except SyntaxError:
eprint("%s: Master may be dead. Exit.\n" % hostname)
sys.stderr.flush()
sys.exit(1)
assert isinstance(tuple_, tuple), tuple_
string_ = tuple_[0]
assert isinstance(string_, str), string_
msg = pickle.loads(string_)
return msg
def is_for_me(self, dst):
return hostname == dst
def get_command(self):
while True:
msg = self.__from_master_queue.get()
if msg == None:
eprint("%s: Master may be dead. Exit.\n" % hostname)
sys.exit(1)
if self.is_for_me(msg.dst):
if msg.type == 'task':
return msg.body, msg.tid, msg.exits_files
else:
eprint("%s: received %s message\n" % (hostname, msg.type))
def cleanup(self, pid, command):
os.kill(pid, signal.SIGKILL)
eprint('%s: killed %d\n' % (hostname, pid))
'''
nonecho_command = get_nonecho_command(command)
exits_files = get_exits_files(nonecho_command)
for exits_file in exits_files:
os.remove(exits_file)
eprint('%s: removed %s\n' % (hostname, exits_file))
'''
def do_cmd(self, command, tid):
child = popen2.Popen4(command)
status = -1
body = ''
while True:
try:
msg = self.__from_master_queue.get_nowait()
except Queue.Empty:
pass
else:
if msg == None:
eprint("%s: Master may be dead. Exit.\n" % hostname)
self.cleanup(child.pid, command)
sys.exit(1)
if self.is_for_me(msg.dst):
if msg.type == 'stop':
eprint("%s: received stop message\n" % hostname)
self.cleanup(child.pid, command)
child.wait()
break
else:
raise RuntimeError, msg
status = child.poll()
if status != -1:
body = child.fromchild.read()
break
time.sleep(0.5)
eprint('%s: status=%s\n' % (tid, status))
nonecho_command = get_nonecho_command(command)
exits_files = get_exits_files(nonecho_command)
return '%s: return code %d\n'% (hostname, status) + body, \
status, exits_files
def send_status_msg(self, body, status, tid):
msg = Message('status')
msg.tid = tid
msg.body = body
msg.status = status
self.send_msg(msg)
def send_input_req_msg(self, infiles, tid):
msg = Message('input_req')
msg.tid = tid
msg.body = infiles
self.send_msg(msg)
def recv_input_ok_msg(self):
while True:
msg = self.__from_master_queue.get()
if msg == None:
eprint("%s: Master may be dead. Exit.\n" % hostname)
sys.exit(1)
if self.is_for_me(msg.dst):
return msg.type
def send_output_req_msg(self, outfiles, outdirs, tid):
msg = Message('output_req')
msg.tid = tid
msg.body = outfiles
msg.outdir = outdirs
self.send_msg(msg)
def recv_output_ok_msg(self):
while True:
msg = self.__from_master_queue.get()
if msg == None:
eprint("%s: Master may be dead. Exit.\n" % hostname)
sys.exit(1)
if self.is_for_me(msg.dst):
return msg.type
def run(self):
while True:
self.send_avail_msg()
heprint('send_avail_msg')
command, tid, file_like = self.get_command()
self.tid = tid
infiles = select_input_files(file_like)
if len(infiles) > 0:
self.send_input_req_msg(infiles, tid)
heprint('send_input_req_msg %s' % tid)
type_ = self.recv_input_ok_msg()
heprint('recv_input_ok_msg %s' % tid)
if type_ == 'stop':
continue
assert type_ == 'input_ok', type_
body, status, outfiles = self.do_cmd(command, tid)
heprint('do_cmd %s' % tid)
if len(outfiles) > 0 and status == 0:
outdirs = filter(os.path.isdir, outfiles)
map(outfiles.remove, outdirs)
self.send_output_req_msg(outfiles, outdirs, tid)
heprint('send_output_req_msg %s' % tid)
type_ = self.recv_output_ok_msg()
heprint('recv_output_ok_msg %s' % tid)
if type_ == 'stop':
continue
if type_ == 'output_ng':
status = -1
body = None
assert type_ == 'output_ok' or type_ == 'output_ng', type_
self.send_status_msg(body, status, tid)
heprint('send_status_msg %s' % tid)
def main():
w = Worker()
try:
w.run()
except RuntimeError:
raise
return False
return True
if __name__ == '__main__':
if main():
sys.exit(0)
else:
sys.exit(1)
|
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
const resolve = _path => path.join(__dirname, _path);
const timePath = resolve('index.txt');
const MAX_TIME = 86400000;
const checkTime = () => {
const lastTime = +fs.readFileSync(timePath).toString();
const nowTime = new Date().getTime();
if (lastTime && nowTime - lastTime <= MAX_TIME) {
return;
}
fs.writeFileSync(timePath, nowTime);
const lastV = execSync('npm view jye-react version', { encoding: 'utf8' });
return lastV;
};
module.exports = {
checkTime,
};
|
from generator.basic.IntGenerator import IntGenerator
class LastStatusGenerator(IntGenerator):
def get_sequence(self, length, x, y, a, c, m, t0, min=0, max=3):
"""
Generates sequence of last statuses.
:param length: length of sequence
"""
try:
for i in super(LastStatusGenerator, self).get_sequence(
length=length, min=min, max=max, x=x, y=y, a=a, c=c, m=m, t0=t0):
yield i + 3
except Exception as ex:
raise ex
|
exports.up = async (knex) => {
await knex.schema
.createTable("users", (users) => {
users.increments("id");
users.string("email", 254).notNullable().unique();
users.string("password", 200).notNullable();
users.string("first_name", 120).notNullable();
users.string("last_name", 120).notNullable();
users.string("phone", 25).notNullable();
users.timestamp("deleted_at");
users.timestamps(false, true);
})
.createTable("glasses", (users) => {
users.increments("id");
users.string("type", 254).notNullable().unique();
users.timestamp("deleted_at");
users.timestamps(false, true);
})
.createTable("cocktails", (cocktails) => {
cocktails.increments("id");
cocktails.string("name", 300).notNullable();
cocktails
.integer("user_id")
.unsigned()
.notNullable()
.references("id")
.inTable("users")
.onUpdate("RESTRICT")
.onDelete("RESTRICT");
cocktails
.integer("glass_type")
.unsigned()
.notNullable()
.references("id")
.inTable("glasses")
.onUpdate("RESTRICT")
.onDelete("RESTRICT");
cocktails.string("photo");
cocktails.string("garnish");
cocktails.timestamp("deleted_at");
cocktails.timestamps(false, true);
})
.createTable("ingredients", (ingredients) => {
ingredients.increments("id");
ingredients.string("name", 300).notNullable();
ingredients.boolean("alcoholic").notNullable();
ingredients
.enum("category", [
"bourbon/whiskey",
"scotch",
"vodka",
"rum",
"gin",
"tequila",
"mezcal",
"cordial",
"bitters",
"other",
])
.notNullable()
.defaultTo("other");
ingredients.timestamp("deleted_at");
ingredients.timestamps(false, true);
})
.createTable("cocktails_ingredients", (cocktails_ingredients) => {
cocktails_ingredients.primary(["cocktail_id", "ingredient_id"]);
cocktails_ingredients
.integer("cocktail_id")
.unsigned()
.notNullable()
.references("id")
.inTable("cocktails")
.onUpdate("RESTRICT")
.onDelete("RESTRICT");
cocktails_ingredients
.integer("ingredient_id")
.unsigned()
.notNullable()
.references("id")
.inTable("ingredients")
.onUpdate("RESTRICT")
.onDelete("RESTRICT");
cocktails_ingredients.string("quantity").notNullable();
cocktails_ingredients.timestamp("deleted_at");
cocktails_ingredients.timestamps(false, true);
})
.createTable("cocktails_steps", (cocktails_steps) => {
cocktails_steps.increments("id");
cocktails_steps.integer("step_number").notNullable();
cocktails_steps.string("step_instructions", 250).notNullable();
cocktails_steps
.integer("cocktail_id")
.unsigned()
.notNullable()
.references("id")
.inTable("cocktails")
.onUpdate("RESTRICT")
.onDelete("RESTRICT");
cocktails_steps.timestamp("deleted_at");
cocktails_steps.timestamps(false, true);
});
};
exports.down = async (knex) => {
await knex.schema
.dropTableIfExists("cocktails_steps")
.dropTableIfExists("cocktails_ingredients")
.dropTableIfExists("ingredients")
.dropTableIfExists("cocktails")
.dropTableIfExists("glasses")
.dropTableIfExists("users");
};
|
var assert = require('assert');
var value = '';
Feature('rating');
Scenario('should be disabled if "readonly" is specified', async (I) => {
I.amOnPage('read-only.html');
value = await I.grabAttributeFrom('[id="root[rating]5"]', 'disabled');
assert.equal(value, 'true');
value = await I.grabAttributeFrom('[id="root[rating]4"]', 'disabled');
assert.equal(value, 'true');
value = await I.grabAttributeFrom('[id="root[rating]3"]', 'disabled');
assert.equal(value, 'true');
value = await I.grabAttributeFrom('[id="root[rating]2"]', 'disabled');
assert.equal(value, 'true');
value = await I.grabAttributeFrom('[id="root[rating]1"]', 'disabled');
assert.equal(value, 'true');
});
|
import json
from channels.generic.websocket import WebsocketConsumer
from channels.routing import URLRouter
from channels.testing import WebsocketCommunicator
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import path
from jwt import encode as jwt_encode
from django_channels_jwt_auth_middleware.auth import JWTAuthMiddlewareStack
User = get_user_model()
class TestConsumer(WebsocketConsumer):
def connect(self):
self.accept()
# def disconnect(self, code):
# self.disconnect()
def receive(self, text_data, bytes_data=None):
context = {
'message': 'testing consumer',
}
user = self.scope['user']
if (user_id := user.id):
context.update({'user': str(user_id)})
else:
context.update({'user': None})
self.send(text_data=json.dumps(context))
class JWTAuthMiddlewareTestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username='example', email='example@example.com', password='Abcd1234')
self.jwt_token = jwt_encode(
{'user_id': str(self.user.id)}, settings.SECRET_KEY, algorithm='HS256')
async def test_middleware_without_token(self):
application = JWTAuthMiddlewareStack(
URLRouter([
path('ws/chat/<str:room_name>/', TestConsumer.as_asgi()),
])
)
test_url = f'ws/chat/lobby/'
communicator = WebsocketCommunicator(application, test_url)
connected, subprotocol = await communicator.connect()
assert connected
await communicator.send_to(text_data=json.dumps({'message': 'test'}))
response = await communicator.receive_from()
decoded_response = json.loads(response)
self.assertIsNone(decoded_response['user'])
await communicator.disconnect()
async def test_middleware_with_token_query_string_but_no_value(self):
application = JWTAuthMiddlewareStack(
URLRouter([
path('ws/chat/<str:room_name>/', TestConsumer.as_asgi()),
])
)
test_url = f'ws/chat/lobby/?token='
communicator = WebsocketCommunicator(application, test_url)
connected, subprotocol = await communicator.connect()
assert connected
await communicator.send_to(text_data=json.dumps({'message': 'test'}))
response = await communicator.receive_from()
decoded_response = json.loads(response)
self.assertIsNone(decoded_response['user'])
await communicator.disconnect()
|
"""The Trakt integration."""
import asyncio
import logging
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
LocalOAuth2Implementation,
OAuth2Session,
async_get_config_entry_implementation,
)
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import api, config_flow
from .api import TraktApi
from .config_flow import OAuth2FlowHandler
from .configuration import build_config_domain_schema, build_config_schema
from .const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
from .utils import nested_get, update_domain_data
LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = build_config_schema()
PLATFORMS = ["sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the TraktTV component from a yaml (not supported)."""
update_domain_data(hass, "configuration", CONFIG_SCHEMA(config).get(DOMAIN, {}))
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up TraktTV from a config entry."""
OAuth2FlowHandler.async_register_implementation(
hass,
config_entry_oauth2_flow.LocalOAuth2Implementation(
hass,
DOMAIN,
entry.data[CONF_CLIENT_ID],
entry.data[CONF_CLIENT_SECRET],
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
),
)
implementation = await async_get_config_entry_implementation(hass, entry)
session = OAuth2Session(hass, entry, implementation)
configuration = {"client_id": entry.data[CONF_CLIENT_ID]}
update_domain_data(hass, "configuration", configuration)
api = TraktApi(async_get_clientsession(hass), session, entry, hass)
coordinator = DataUpdateCoordinator(
hass=hass,
logger=LOGGER,
name="trakt",
update_method=api.retrieve_data,
)
await coordinator.async_config_entry_first_refresh()
instances = {"coordinator": coordinator, "api": api}
update_domain_data(hass, "instances", instances)
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
)
)
if unload_ok:
hass.data.pop(DOMAIN)
return unload_ok
|
# coding: utf-8
"""
Apteco API
An API to allow access to Apteco Marketing Suite resources # noqa: E501
The version of the OpenAPI document: v2
Contact: support@apteco.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class RecordSet(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'key_variable_name': 'str',
'by_reference': 'bool',
'path': 'str',
'transient': 'bool',
'values': 'str',
'min_occurs': 'int'
}
attribute_map = {
'type': 'type',
'key_variable_name': 'keyVariableName',
'by_reference': 'byReference',
'path': 'path',
'transient': 'transient',
'values': 'values',
'min_occurs': 'minOccurs'
}
def __init__(self, type=None, key_variable_name=None, by_reference=None, path=None, transient=None, values=None, min_occurs=None): # noqa: E501
"""RecordSet - a model defined in OpenAPI""" # noqa: E501
self._type = None
self._key_variable_name = None
self._by_reference = None
self._path = None
self._transient = None
self._values = None
self._min_occurs = None
self.discriminator = None
if type is not None:
self.type = type
if key_variable_name is not None:
self.key_variable_name = key_variable_name
if by_reference is not None:
self.by_reference = by_reference
if path is not None:
self.path = path
if transient is not None:
self.transient = transient
if values is not None:
self.values = values
if min_occurs is not None:
self.min_occurs = min_occurs
@property
def type(self):
"""Gets the type of this RecordSet. # noqa: E501
:return: The type of this RecordSet. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this RecordSet.
:param type: The type of this RecordSet. # noqa: E501
:type: str
"""
allowed_values = ["URN", "SBM", "FSRN"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def key_variable_name(self):
"""Gets the key_variable_name of this RecordSet. # noqa: E501
:return: The key_variable_name of this RecordSet. # noqa: E501
:rtype: str
"""
return self._key_variable_name
@key_variable_name.setter
def key_variable_name(self, key_variable_name):
"""Sets the key_variable_name of this RecordSet.
:param key_variable_name: The key_variable_name of this RecordSet. # noqa: E501
:type: str
"""
self._key_variable_name = key_variable_name
@property
def by_reference(self):
"""Gets the by_reference of this RecordSet. # noqa: E501
:return: The by_reference of this RecordSet. # noqa: E501
:rtype: bool
"""
return self._by_reference
@by_reference.setter
def by_reference(self, by_reference):
"""Sets the by_reference of this RecordSet.
:param by_reference: The by_reference of this RecordSet. # noqa: E501
:type: bool
"""
self._by_reference = by_reference
@property
def path(self):
"""Gets the path of this RecordSet. # noqa: E501
:return: The path of this RecordSet. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this RecordSet.
:param path: The path of this RecordSet. # noqa: E501
:type: str
"""
self._path = path
@property
def transient(self):
"""Gets the transient of this RecordSet. # noqa: E501
:return: The transient of this RecordSet. # noqa: E501
:rtype: bool
"""
return self._transient
@transient.setter
def transient(self, transient):
"""Sets the transient of this RecordSet.
:param transient: The transient of this RecordSet. # noqa: E501
:type: bool
"""
self._transient = transient
@property
def values(self):
"""Gets the values of this RecordSet. # noqa: E501
:return: The values of this RecordSet. # noqa: E501
:rtype: str
"""
return self._values
@values.setter
def values(self, values):
"""Sets the values of this RecordSet.
:param values: The values of this RecordSet. # noqa: E501
:type: str
"""
self._values = values
@property
def min_occurs(self):
"""Gets the min_occurs of this RecordSet. # noqa: E501
:return: The min_occurs of this RecordSet. # noqa: E501
:rtype: int
"""
return self._min_occurs
@min_occurs.setter
def min_occurs(self, min_occurs):
"""Sets the min_occurs of this RecordSet.
:param min_occurs: The min_occurs of this RecordSet. # noqa: E501
:type: int
"""
self._min_occurs = min_occurs
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RecordSet):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
var key = require('./');
var expect = require('expect');
describe('weak-key', function() {
it('generates a key for an object', function() {
var obj = {};
expect(key(obj)).toBeA('string');
});
it('generates the same key for the same object', function() {
var obj = {};
var key1 = key(obj);
var key2 = key(obj);
expect(key1).toEqual(key2);
});
it('generates the same key for the same object for non consecutive calls', function() {
var obj = {};
var key1 = key(obj);
key({});
var key2 = key(obj);
expect(key1).toEqual(key2);
});
it('generates different keys for different objects', function() {
var obj1 = {};
var key1 = key(obj1);
var obj2 = {};
var key2 = key(obj2);
expect(key1).toNotEqual(key2);
});
});
|
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_BASIC_MEMORY_CHUNK_H_
#define V8_HEAP_BASIC_MEMORY_CHUNK_H_
#include <type_traits>
#include "src/base/atomic-utils.h"
#include "src/common/globals.h"
#include "src/heap/marking.h"
namespace v8 {
namespace internal {
class MemoryChunk;
class BasicMemoryChunk {
public:
enum Flag {
NO_FLAGS = 0u,
IS_EXECUTABLE = 1u << 0,
POINTERS_TO_HERE_ARE_INTERESTING = 1u << 1,
POINTERS_FROM_HERE_ARE_INTERESTING = 1u << 2,
// A page in the from-space or a young large page that was not scavenged
// yet.
FROM_PAGE = 1u << 3,
// A page in the to-space or a young large page that was scavenged.
TO_PAGE = 1u << 4,
LARGE_PAGE = 1u << 5,
EVACUATION_CANDIDATE = 1u << 6,
NEVER_EVACUATE = 1u << 7,
// Large objects can have a progress bar in their page header. These object
// are scanned in increments and will be kept black while being scanned.
// Even if the mutator writes to them they will be kept black and a white
// to grey transition is performed in the value.
HAS_PROGRESS_BAR = 1u << 8,
// |PAGE_NEW_OLD_PROMOTION|: A page tagged with this flag has been promoted
// from new to old space during evacuation.
PAGE_NEW_OLD_PROMOTION = 1u << 9,
// |PAGE_NEW_NEW_PROMOTION|: A page tagged with this flag has been moved
// within the new space during evacuation.
PAGE_NEW_NEW_PROMOTION = 1u << 10,
// This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next
// candidates selection cycle.
FORCE_EVACUATION_CANDIDATE_FOR_TESTING = 1u << 11,
// This flag is intended to be used for testing.
NEVER_ALLOCATE_ON_PAGE = 1u << 12,
// The memory chunk is already logically freed, however the actual freeing
// still has to be performed.
PRE_FREED = 1u << 13,
// |POOLED|: When actually freeing this chunk, only uncommit and do not
// give up the reservation as we still reuse the chunk at some point.
POOLED = 1u << 14,
// |COMPACTION_WAS_ABORTED|: Indicates that the compaction in this page
// has been aborted and needs special handling by the sweeper.
COMPACTION_WAS_ABORTED = 1u << 15,
// |COMPACTION_WAS_ABORTED_FOR_TESTING|: During stress testing evacuation
// on pages is sometimes aborted. The flag is used to avoid repeatedly
// triggering on the same page.
COMPACTION_WAS_ABORTED_FOR_TESTING = 1u << 16,
// |SWEEP_TO_ITERATE|: The page requires sweeping using external markbits
// to iterate the page.
SWEEP_TO_ITERATE = 1u << 17,
// |INCREMENTAL_MARKING|: Indicates whether incremental marking is currently
// enabled.
INCREMENTAL_MARKING = 1u << 18,
NEW_SPACE_BELOW_AGE_MARK = 1u << 19,
// The memory chunk freeing bookkeeping has been performed but the chunk has
// not yet been freed.
UNREGISTERED = 1u << 20,
// The memory chunk belongs to the read-only heap and does not participate
// in garbage collection. This is used instead of owner for identity
// checking since read-only chunks have no owner once they are detached.
READ_ONLY_HEAP = 1u << 21,
};
static const intptr_t kAlignment =
(static_cast<uintptr_t>(1) << kPageSizeBits);
static const intptr_t kAlignmentMask = kAlignment - 1;
BasicMemoryChunk(size_t size, Address area_start, Address area_end);
static Address BaseAddress(Address a) { return a & ~kAlignmentMask; }
Address address() const { return reinterpret_cast<Address>(this); }
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
Address area_start() const { return area_start_; }
Address area_end() const { return area_end_; }
void set_area_end(Address area_end) { area_end_ = area_end; }
size_t area_size() const {
return static_cast<size_t>(area_end() - area_start());
}
template <AccessMode access_mode = AccessMode::NON_ATOMIC>
void SetFlag(Flag flag) {
if (access_mode == AccessMode::NON_ATOMIC) {
flags_ |= flag;
} else {
base::AsAtomicWord::SetBits<uintptr_t>(&flags_, flag, flag);
}
}
template <AccessMode access_mode = AccessMode::NON_ATOMIC>
bool IsFlagSet(Flag flag) const {
return (GetFlags<access_mode>() & flag) != 0;
}
void ClearFlag(Flag flag) { flags_ &= ~flag; }
// Set or clear multiple flags at a time. The flags in the mask are set to
// the value in "flags", the rest retain the current value in |flags_|.
void SetFlags(uintptr_t flags, uintptr_t mask) {
flags_ = (flags_ & ~mask) | (flags & mask);
}
// Return all current flags.
template <AccessMode access_mode = AccessMode::NON_ATOMIC>
uintptr_t GetFlags() const {
if (access_mode == AccessMode::NON_ATOMIC) {
return flags_;
} else {
return base::AsAtomicWord::Relaxed_Load(&flags_);
}
}
bool InReadOnlySpace() const { return IsFlagSet(READ_ONLY_HEAP); }
// TODO(v8:7464): Add methods for down casting to MemoryChunk.
bool Contains(Address addr) const {
return addr >= area_start() && addr < area_end();
}
// Checks whether |addr| can be a limit of addresses in this page. It's a
// limit if it's in the page, or if it's just after the last byte of the page.
bool ContainsLimit(Address addr) const {
return addr >= area_start() && addr <= area_end();
}
V8_EXPORT_PRIVATE static bool HasHeaderSentinel(Address slot_addr);
void ReleaseMarkingBitmap();
static const intptr_t kSizeOffset = 0;
static const intptr_t kFlagsOffset = kSizeOffset + kSizetSize;
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kUIntptrSize;
static const intptr_t kHeapOffset = kMarkBitmapOffset + kSystemPointerSize;
static const intptr_t kHeaderSentinelOffset =
kHeapOffset + kSystemPointerSize;
static const size_t kHeaderSize =
kSizeOffset + kSizetSize // size_t size
+ kUIntptrSize // uintptr_t flags_
+ kSystemPointerSize // Bitmap* marking_bitmap_
+ kSystemPointerSize // Heap* heap_
+ kSystemPointerSize // Address header_sentinel_
+ kSystemPointerSize // Address area_start_
+ kSystemPointerSize; // Address area_end_
protected:
// Overall size of the chunk, including the header and guards.
size_t size_;
uintptr_t flags_ = NO_FLAGS;
Bitmap* marking_bitmap_ = nullptr;
// TODO(v8:7464): Find a way to remove this.
// This goes against the spirit for the BasicMemoryChunk, but until C++14/17
// is the default it needs to live here because MemoryChunk is not standard
// layout under C++11.
Heap* heap_;
// This is used to distinguish the memory chunk header from the interior of a
// large page. The memory chunk header stores here an impossible tagged
// pointer: the tagger pointer of the page start. A field in a large object is
// guaranteed to not contain such a pointer.
Address header_sentinel_;
// Start and end of allocatable memory on this chunk.
Address area_start_;
Address area_end_;
friend class BasicMemoryChunkValidator;
};
STATIC_ASSERT(std::is_standard_layout<BasicMemoryChunk>::value);
class BasicMemoryChunkValidator {
// Computed offsets should match the compiler generated ones.
STATIC_ASSERT(BasicMemoryChunk::kSizeOffset ==
offsetof(BasicMemoryChunk, size_));
STATIC_ASSERT(BasicMemoryChunk::kFlagsOffset ==
offsetof(BasicMemoryChunk, flags_));
STATIC_ASSERT(BasicMemoryChunk::kMarkBitmapOffset ==
offsetof(BasicMemoryChunk, marking_bitmap_));
STATIC_ASSERT(BasicMemoryChunk::kHeapOffset ==
offsetof(BasicMemoryChunk, heap_));
STATIC_ASSERT(BasicMemoryChunk::kHeaderSentinelOffset ==
offsetof(BasicMemoryChunk, header_sentinel_));
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_BASIC_MEMORY_CHUNK_H_
|
import sys
import math
from datetime import datetime
from reloadium.vendored.sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exception,
disable_capture_event,
format_timestamp,
json_dumps,
safe_repr,
strip_string,
)
import reloadium.vendored.sentry_sdk.utils
from reloadium.vendored.sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
from reloadium.vendored.sentry_sdk._types import MYPY
if MYPY:
from datetime import timedelta
from types import TracebackType
from typing import Any
from typing import Callable
from typing import ContextManager
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from reloadium.vendored.sentry_sdk._types import NotImplementedType, Event
Span = Dict[str, Any]
ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
Segment = Union[str, int]
if PY2:
# Importing ABCs from collections is deprecated, and will stop working in 3.8
# https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
from collections import Mapping, Sequence, Set
serializable_str_types = string_types
else:
# New in 3.3
# https://docs.python.org/3/library/collections.abc.html
from collections.abc import Mapping, Sequence, Set
# Bytes are technically not strings in Python 3, but we can serialize them
serializable_str_types = (str, bytes)
# Maximum length of JSON-serialized event payloads that can be safely sent
# before the server may reject the event due to its size. This is not intended
# to reflect actual values defined server-side, but rather only be an upper
# bound for events sent by the SDK.
#
# Can be overwritten if wanting to send more bytes, e.g. with a custom server.
# When changing this, keep in mind that events may be a little bit larger than
# this value due to attached metadata, so keep the number conservative.
MAX_EVENT_BYTES = 10 ** 6
MAX_DATABAG_DEPTH = 5
MAX_DATABAG_BREADTH = 10
CYCLE_MARKER = u"<cyclic>"
global_repr_processors = [] # type: List[ReprProcessor]
def add_global_repr_processor(processor):
# type: (ReprProcessor) -> None
global_repr_processors.append(processor)
class Memo(object):
__slots__ = ("_ids", "_objs")
def __init__(self):
# type: () -> None
self._ids = {} # type: Dict[int, Any]
self._objs = [] # type: List[Any]
def memoize(self, obj):
# type: (Any) -> ContextManager[bool]
self._objs.append(obj)
return self
def __enter__(self):
# type: () -> bool
obj = self._objs[-1]
if id(obj) in self._ids:
return True
else:
self._ids[id(obj)] = obj
return False
def __exit__(
self,
ty, # type: Optional[Type[BaseException]]
value, # type: Optional[BaseException]
tb, # type: Optional[TracebackType]
):
# type: (...) -> None
self._ids.pop(id(self._objs.pop()), None)
def serialize(event, smart_transaction_trimming=False, **kwargs):
# type: (Event, bool, **Any) -> Event
memo = Memo()
path = [] # type: List[Segment]
meta_stack = [] # type: List[Dict[str, Any]]
span_description_bytes = [] # type: List[int]
def _annotate(**meta):
# type: (**Any) -> None
while len(meta_stack) <= len(path):
try:
segment = path[len(meta_stack) - 1]
node = meta_stack[-1].setdefault(text_type(segment), {})
except IndexError:
node = {}
meta_stack.append(node)
meta_stack[-1].setdefault("", {}).update(meta)
def _should_repr_strings():
# type: () -> Optional[bool]
"""
By default non-serializable objects are going through
safe_repr(). For certain places in the event (local vars) we
want to repr() even things that are JSON-serializable to
make their type more apparent. For example, it's useful to
see the difference between a unicode-string and a bytestring
when viewing a stacktrace.
For container-types we still don't do anything different.
Generally we just try to make the Sentry UI present exactly
what a pretty-printed repr would look like.
:returns: `True` if we are somewhere in frame variables, and `False` if
we are in a position where we will never encounter frame variables
when recursing (for example, we're in `event.extra`). `None` if we
are not (yet) in frame variables, but might encounter them when
recursing (e.g. we're in `event.exception`)
"""
try:
p0 = path[0]
if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars":
return True
if (
p0 in ("threads", "exception")
and path[1] == "values"
and path[3] == "stacktrace"
and path[4] == "frames"
and path[6] == "vars"
):
return True
except IndexError:
return None
return False
def _is_databag():
# type: () -> Optional[bool]
"""
A databag is any value that we need to trim.
:returns: Works like `_should_repr_strings()`. `True` for "yes",
`False` for :"no", `None` for "maybe soon".
"""
try:
rv = _should_repr_strings()
if rv in (True, None):
return rv
p0 = path[0]
if p0 == "request" and path[1] == "data":
return True
if p0 == "breadcrumbs" and path[1] == "values":
path[2]
return True
if p0 == "extra":
return True
except IndexError:
return None
return False
def _serialize_node(
obj, # type: Any
is_databag=None, # type: Optional[bool]
should_repr_strings=None, # type: Optional[bool]
segment=None, # type: Optional[Segment]
remaining_breadth=None, # type: Optional[int]
remaining_depth=None, # type: Optional[int]
):
# type: (...) -> Any
if segment is not None:
path.append(segment)
try:
with memo.memoize(obj) as result:
if result:
return CYCLE_MARKER
return _serialize_node_impl(
obj,
is_databag=is_databag,
should_repr_strings=should_repr_strings,
remaining_depth=remaining_depth,
remaining_breadth=remaining_breadth,
)
except BaseException:
capture_internal_exception(sys.exc_info())
if is_databag:
return u"<failed to serialize, use init(debug=True) to see error logs>"
return None
finally:
if segment is not None:
path.pop()
del meta_stack[len(path) + 1 :]
def _flatten_annotated(obj):
# type: (Any) -> Any
if isinstance(obj, AnnotatedValue):
_annotate(**obj.metadata)
obj = obj.value
return obj
def _serialize_node_impl(
obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
):
# type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
if should_repr_strings is None:
should_repr_strings = _should_repr_strings()
if is_databag is None:
is_databag = _is_databag()
if is_databag and remaining_depth is None:
remaining_depth = MAX_DATABAG_DEPTH
if is_databag and remaining_breadth is None:
remaining_breadth = MAX_DATABAG_BREADTH
obj = _flatten_annotated(obj)
if remaining_depth is not None and remaining_depth <= 0:
_annotate(rem=[["!limit", "x"]])
if is_databag:
return _flatten_annotated(strip_string(safe_repr(obj)))
return None
if is_databag and global_repr_processors:
hints = {"memo": memo, "remaining_depth": remaining_depth}
for processor in global_repr_processors:
result = processor(obj, hints)
if result is not NotImplemented:
return _flatten_annotated(result)
if obj is None or isinstance(obj, (bool, number_types)):
if should_repr_strings or (
isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
):
return safe_repr(obj)
else:
return obj
elif isinstance(obj, datetime):
return (
text_type(format_timestamp(obj))
if not should_repr_strings
else safe_repr(obj)
)
elif isinstance(obj, Mapping):
# Create temporary copy here to avoid calling too much code that
# might mutate our dictionary while we're still iterating over it.
obj = dict(iteritems(obj))
rv_dict = {} # type: Dict[str, Any]
i = 0
for k, v in iteritems(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
str_k = text_type(k)
v = _serialize_node(
v,
segment=str_k,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
rv_dict[str_k] = v
i += 1
return rv_dict
elif not isinstance(obj, serializable_str_types) and isinstance(
obj, (Set, Sequence)
):
rv_list = []
for i, v in enumerate(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
rv_list.append(
_serialize_node(
v,
segment=i,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
)
return rv_list
if should_repr_strings:
obj = safe_repr(obj)
else:
if isinstance(obj, bytes):
obj = obj.decode("utf-8", "replace")
if not isinstance(obj, string_types):
obj = safe_repr(obj)
# Allow span descriptions to be longer than other strings.
#
# For database auto-instrumented spans, the description contains
# potentially long SQL queries that are most useful when not truncated.
# Because arbitrarily large events may be discarded by the server as a
# protection mechanism, we dynamically limit the description length
# later in _truncate_span_descriptions.
if (
smart_transaction_trimming
and len(path) == 3
and path[0] == "spans"
and path[-1] == "description"
):
span_description_bytes.append(len(obj))
return obj
return _flatten_annotated(strip_string(obj))
def _truncate_span_descriptions(serialized_event, event, excess_bytes):
# type: (Event, Event, int) -> None
"""
Modifies serialized_event in-place trying to remove excess_bytes from
span descriptions. The original event is used read-only to access the
span timestamps (represented as RFC3399-formatted strings in
serialized_event).
It uses heuristics to prioritize preserving the description of spans
that might be the most interesting ones in terms of understanding and
optimizing performance.
"""
# When truncating a description, preserve a small prefix.
min_length = 10
def shortest_duration_longest_description_first(args):
# type: (Tuple[int, Span]) -> Tuple[timedelta, int]
i, serialized_span = args
span = event["spans"][i]
now = datetime.utcnow()
start = span.get("start_timestamp") or now
end = span.get("timestamp") or now
duration = end - start
description = serialized_span.get("description") or ""
return (duration, -len(description))
# Note: for simplicity we sort spans by exact duration and description
# length. If ever needed, we could have a more involved heuristic, e.g.
# replacing exact durations with "buckets" and/or looking at other span
# properties.
path.append("spans")
for i, span in sorted(
enumerate(serialized_event.get("spans") or []),
key=shortest_duration_longest_description_first,
):
description = span.get("description") or ""
if len(description) <= min_length:
continue
excess_bytes -= len(description) - min_length
path.extend([i, "description"])
# Note: the last time we call strip_string we could preserve a few
# more bytes up to a total length of MAX_EVENT_BYTES. Since that's
# not strictly required, we leave it out for now for simplicity.
span["description"] = _flatten_annotated(
strip_string(description, max_length=min_length)
)
del path[-2:]
del meta_stack[len(path) + 1 :]
if excess_bytes <= 0:
break
path.pop()
del meta_stack[len(path) + 1 :]
disable_capture_event.set(True)
try:
rv = _serialize_node(event, **kwargs)
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
sum_span_description_bytes = sum(span_description_bytes)
if smart_transaction_trimming and sum_span_description_bytes > 0:
span_count = len(event.get("spans") or [])
# This is an upper bound of how many bytes all descriptions would
# consume if the usual string truncation in _serialize_node_impl
# would have taken place, not accounting for the metadata attached
# as event["_meta"].
descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
# If by not truncating descriptions we ended up with more bytes than
# per the usual string truncation, check if the event is too large
# and we need to truncate some descriptions.
#
# This is guarded with an if statement to avoid JSON-encoding the
# event unnecessarily.
if sum_span_description_bytes > descriptions_budget_bytes:
original_bytes = len(json_dumps(rv))
excess_bytes = original_bytes - MAX_EVENT_BYTES
if excess_bytes > 0:
# Event is too large, will likely be discarded by the
# server. Trim it down before sending.
_truncate_span_descriptions(rv, event, excess_bytes)
# Span descriptions truncated, set or reset _meta.
#
# We run the same code earlier because we want to account
# for _meta when calculating original_bytes, the number of
# bytes in the JSON-encoded event.
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
return rv
finally:
disable_capture_event.set(False)
|
export default function(scrollClass) {
const el = document.body
window.addEventListener('scroll', updateClass.bind(null, el, scrollClass))
updateClass(el, scrollClass)
}
function updateClass(el, className) {
const scrollPos = el.scrollTop
if (scrollPos === 0) {
if (hasClass(el, className)) {
console.log('removing scroll class')
el.className = el.className.replace(className, '');
}
} else {
if (!hasClass(el, className)) {
console.log('adding scroll')
el.className += ' ' + className;
}
}
}
function hasClass(el, className) {
var classRE = new RegExp(className)
return classRE.test(el.className)
}
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from threading import Thread
from mycroft import dialog
from mycroft.client.speech.listener import RecognizerLoop
from mycroft.configuration import Configuration
from mycroft.enclosure.api import EnclosureAPI
from mycroft.identity import IdentityManager
from mycroft.messagebus.message import Message
from mycroft.util import (
start_message_bus_client
)
from mycroft.util.log import LOG
from mycroft.util.process_utils import ProcessStatus, StatusCallbackMap
def on_ready():
LOG.info('Speech client is ready.')
def on_stopping():
LOG.info('Speech service is shutting down...')
def on_error(e='Unknown'):
LOG.error('Audio service failed to launch ({}).'.format(repr(e)))
class SpeechClient(Thread):
def __init__(self, on_ready=on_ready, on_error=on_error,
on_stopping=on_stopping, watchdog=lambda: None):
super(SpeechClient, self).__init__()
callbacks = StatusCallbackMap(on_ready=on_ready,
on_error=on_error,
on_stopping=on_stopping)
self.status = ProcessStatus('speech', callback_map=callbacks)
self.status.set_started()
self.config = Configuration.get()
self.bus = start_message_bus_client("VOICE")
self.connect_bus_events()
self.status.bind(self.bus)
# Register handlers on internal RecognizerLoop bus
self.loop = RecognizerLoop(self.bus, watchdog)
self.connect_loop_events()
# loop events
def handle_record_begin(self):
"""Forward internal bus message to external bus."""
LOG.info("Begin Recording...")
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('recognizer_loop:record_begin', context=context))
def handle_record_end(self):
"""Forward internal bus message to external bus."""
LOG.info("End Recording...")
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('recognizer_loop:record_end', context=context))
def handle_no_internet(self):
LOG.debug("Notifying enclosure of no internet connection")
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('enclosure.notify.no_internet', context=context))
def handle_awoken(self):
"""Forward mycroft.awoken to the messagebus."""
LOG.info("Listener is now Awake: ")
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('mycroft.awoken', context=context))
def handle_wakeword(self, event):
LOG.info("Wakeword Detected: " + event['utterance'])
self.bus.emit(Message('recognizer_loop:wakeword', event))
def handle_hotword(self, event):
LOG.info("Hotword Detected: " + event['hotword'])
self.bus.emit(Message('recognizer_loop:hotword', event))
def handle_hotword_event(self, event):
""" hotword configured to emit a bus event
forward event from internal emitter to mycroft bus"""
self.bus.emit(Message(event["msg_type"]))
def handle_utterance(self, event):
LOG.info("Utterance: " + str(event['utterances']))
context = {'client_name': 'mycroft_listener',
'source': 'audio',
'destination': ["skills"]}
if 'ident' in event:
ident = event.pop('ident')
context['ident'] = ident
self.bus.emit(Message('recognizer_loop:utterance', event, context))
def handle_unknown(self):
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(
Message('mycroft.speech.recognition.unknown', context=context))
def handle_speak(self, event):
"""
Forward speak message to message bus.
"""
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('speak', event, context))
def handle_complete_intent_failure(self, event):
"""Extreme backup for answering completely unhandled intent requests."""
LOG.info("Failed to find intent.")
data = {'utterance': dialog.get('not.loaded')}
context = {'client_name': 'mycroft_listener',
'source': 'audio'}
self.bus.emit(Message('speak', data, context))
def handle_sleep(self, event):
"""Put the recognizer loop to sleep."""
self.loop.sleep()
def handle_wake_up(self, event):
"""Wake up the the recognize loop."""
self.loop.awaken()
def handle_mic_mute(self, event):
"""Mute the listener system."""
self.loop.mute()
def handle_mic_unmute(self, event):
"""Unmute the listener system."""
self.loop.unmute()
def handle_mic_listen(self, _):
"""Handler for mycroft.mic.listen.
Starts listening as if wakeword was spoken.
"""
self.loop.responsive_recognizer.trigger_listen()
def handle_mic_get_status(self, event):
"""Query microphone mute status."""
data = {'muted': self.loop.is_muted()}
self.bus.emit(event.response(data))
def handle_paired(self, event):
"""Update identity information with pairing data.
This is done here to make sure it's only done in a single place.
TODO: Is there a reason this isn't done directly in the pairing skill?
"""
IdentityManager.update(event.data)
def handle_audio_start(self, event):
"""Mute recognizer loop."""
if self.config.get("listener").get("mute_during_output"):
self.loop.mute()
def handle_audio_end(self, event):
"""Request unmute, if more sources have requested the mic to be muted
it will remain muted.
"""
if self.config.get("listener").get("mute_during_output"):
self.loop.unmute() # restore
def handle_stop(self, event):
"""Handler for mycroft.stop, i.e. button press."""
self.loop.force_unmute()
def handle_open(self):
# TODO: Move this into the Enclosure (not speech client)
# Reset the UI to indicate ready for speech processing
EnclosureAPI(bus).reset()
def connect_loop_events(self):
self.loop.on('recognizer_loop:utterance', self.handle_utterance)
self.loop.on('recognizer_loop:speech.recognition.unknown',
self.handle_unknown)
self.loop.on('speak', self.handle_speak)
self.loop.on('recognizer_loop:record_begin', self.handle_record_begin)
self.loop.on('recognizer_loop:awoken', self.handle_awoken)
self.loop.on('recognizer_loop:wakeword', self.handle_wakeword)
self.loop.on('recognizer_loop:hotword', self.handle_hotword)
self.loop.on('recognizer_loop:record_end', self.handle_record_end)
self.loop.on('recognizer_loop:no_internet', self.handle_no_internet)
self.loop.on('recognizer_loop:hotword_event',
self.handle_hotword_event)
def connect_bus_events(self):
# Register handlers for events on main Mycroft messagebus
self.bus.on('open', self.handle_open)
self.bus.on('complete_intent_failure',
self.handle_complete_intent_failure)
self.bus.on('recognizer_loop:sleep', self.handle_sleep)
self.bus.on('recognizer_loop:wake_up', self.handle_wake_up)
self.bus.on('mycroft.mic.mute', self.handle_mic_mute)
self.bus.on('mycroft.mic.unmute', self.handle_mic_unmute)
self.bus.on('mycroft.mic.get_status', self.handle_mic_get_status)
self.bus.on('mycroft.mic.listen', self.handle_mic_listen)
self.bus.on("mycroft.paired", self.handle_paired)
self.bus.on('recognizer_loop:audio_output_start',
self.handle_audio_start)
self.bus.on('recognizer_loop:audio_output_end', self.handle_audio_end)
self.bus.on('mycroft.stop', self.handle_stop)
def run(self):
self.status.set_started()
try:
self.status.set_ready()
self.loop.run()
except Exception as e:
self.status.set_error(e)
self.status.set_stopping()
if __name__ == "__main__":
main()
|
/* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
/*
* Copyright (c) 2015-2020 Intel, Inc. All rights reserved.
* Copyright (c) 2016 Mellanox Technologies, Inc.
* All rights reserved.
* Copyright (c) 2018 Research Organization for Information Science
* and Technology (RIST). All rights reserved.
*
* $COPYRIGHT$
*
* Additional copyrights may follow
*
* $HEADER$
*/
#include "src/include/pmix_config.h"
#include "include/pmix_common.h"
#include "src/include/pmix_globals.h"
#include "src/class/pmix_list.h"
#include "src/mca/preg/preg.h"
#include "src/util/argv.h"
#include "src/util/error.h"
#include "src/util/pmix_environ.h"
#include "src/server/pmix_server_ops.h"
#include "src/mca/pnet/base/base.h"
static pmix_status_t process_maps(char *nspace, char **nodes, char **procs);
/* NOTE: a tool (e.g., prun) may call this function to
* harvest local envars for inclusion in a call to
* PMIx_Spawn, or it might be called in response to
* a call to PMIx_Allocate_resources */
pmix_status_t pmix_pnet_base_allocate(char *nspace,
pmix_info_t info[], size_t ninfo,
pmix_list_t *ilist)
{
pmix_pnet_base_active_module_t *active;
pmix_status_t rc;
pmix_namespace_t *nptr, *ns;
size_t n;
char **nodes, **procs;
if (!pmix_pnet_globals.initialized) {
return PMIX_ERR_INIT;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet:allocate called");
/* protect against bozo inputs */
if (NULL == nspace || NULL == ilist) {
return PMIX_ERR_BAD_PARAM;
}
if (PMIX_PEER_IS_SERVER(pmix_globals.mypeer)) {
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_SUCCESS;
}
nptr = NULL;
/* find this nspace - note that it may not have
* been registered yet */
PMIX_LIST_FOREACH(ns, &pmix_globals.nspaces, pmix_namespace_t) {
if (0 == strcmp(ns->nspace, nspace)) {
nptr = ns;
break;
}
}
if (NULL == nptr) {
/* add it */
nptr = PMIX_NEW(pmix_namespace_t);
if (NULL == nptr) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_ERR_NOMEM;
}
nptr->nspace = strdup(nspace);
pmix_list_append(&pmix_globals.nspaces, &nptr->super);
}
if (NULL != info) {
/* check for description of the node and proc maps */
nodes = NULL;
procs = NULL;
for (n=0; n < ninfo; n++) {
if (PMIX_CHECK_KEY(&info[n], PMIX_NODE_MAP)) {
rc = pmix_preg.parse_nodes(info[n].value.data.bo.bytes, &nodes);
if (PMIX_SUCCESS != rc) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
} else if (PMIX_CHECK_KEY(&info[n], PMIX_PROC_MAP)) {
rc = pmix_preg.parse_procs(info[n].value.data.bo.bytes, &procs);
if (PMIX_SUCCESS != rc) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
if (NULL != nodes && NULL != procs) {
/* assemble the pnet node and proc descriptions
* NOTE: this will eventually be folded into the
* new shared memory system, but we do it here
* as the pnet plugins need the information and
* the host will not have registered the clients
* and nspace prior to calling allocate
*/
rc = process_maps(nspace, nodes, procs);
pmix_argv_free(nodes);
pmix_argv_free(procs);
if (PMIX_SUCCESS != rc) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
/* process the allocation request */
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->allocate) {
if (PMIX_SUCCESS == (rc = active->module->allocate(nptr, info, ninfo, ilist))) {
continue;
}
if (PMIX_ERR_TAKE_NEXT_OPTION != rc) {
/* true error */
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
}
return PMIX_SUCCESS;
}
/* can only be called by a server */
pmix_status_t pmix_pnet_base_setup_local_network(char *nspace,
pmix_info_t info[],
size_t ninfo)
{
pmix_pnet_base_active_module_t *active;
pmix_status_t rc;
pmix_namespace_t *nptr, *ns;
if (!pmix_pnet_globals.initialized) {
return PMIX_ERR_INIT;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet: setup_local_network called");
/* protect against bozo inputs */
if (NULL == nspace) {
return PMIX_ERR_BAD_PARAM;
}
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_SUCCESS;
}
/* find this proc's nspace object */
nptr = NULL;
PMIX_LIST_FOREACH(ns, &pmix_globals.nspaces, pmix_namespace_t) {
if (0 == strcmp(ns->nspace, nspace)) {
nptr = ns;
break;
}
}
if (NULL == nptr) {
/* add it */
nptr = PMIX_NEW(pmix_namespace_t);
if (NULL == nptr) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_ERR_NOMEM;
}
nptr->nspace = strdup(nspace);
pmix_list_append(&pmix_globals.nspaces, &nptr->super);
}
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->setup_local_network) {
if (PMIX_SUCCESS != (rc = active->module->setup_local_network(nptr, info, ninfo))) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_SUCCESS;
}
/* can only be called by a server */
pmix_status_t pmix_pnet_base_setup_fork(const pmix_proc_t *proc, char ***env)
{
pmix_pnet_base_active_module_t *active;
pmix_status_t rc;
pmix_namespace_t *nptr, *ns;
if (!pmix_pnet_globals.initialized) {
return PMIX_ERR_INIT;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet: setup_fork called");
/* protect against bozo inputs */
if (NULL == proc || NULL == env) {
return PMIX_ERR_BAD_PARAM;
}
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_SUCCESS;
}
/* find this proc's nspace object */
nptr = NULL;
PMIX_LIST_FOREACH(ns, &pmix_globals.nspaces, pmix_namespace_t) {
if (0 == strcmp(ns->nspace, proc->nspace)) {
nptr = ns;
break;
}
}
if (NULL == nptr) {
/* add it */
nptr = PMIX_NEW(pmix_namespace_t);
if (NULL == nptr) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_ERR_NOMEM;
}
nptr->nspace = strdup(proc->nspace);
pmix_list_append(&pmix_globals.nspaces, &nptr->super);
}
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->setup_fork) {
rc = active->module->setup_fork(nptr, proc, env);
if (PMIX_SUCCESS != rc && PMIX_ERR_NOT_AVAILABLE != rc) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_SUCCESS;
}
void pmix_pnet_base_child_finalized(pmix_proc_t *peer)
{
pmix_pnet_base_active_module_t *active;
if (!pmix_pnet_globals.initialized) {
return;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet: child_finalized called");
/* protect against bozo inputs */
if (NULL == peer) {
PMIX_ERROR_LOG(PMIX_ERR_BAD_PARAM);
return;
}
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->child_finalized) {
active->module->child_finalized(peer);
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
void pmix_pnet_base_local_app_finalized(pmix_namespace_t *nptr)
{
pmix_pnet_base_active_module_t *active;
if (!pmix_pnet_globals.initialized) {
return;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet: local_app_finalized called");
/* protect against bozo inputs */
if (NULL == nptr) {
return;
}
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->local_app_finalized) {
active->module->local_app_finalized(nptr);
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
void pmix_pnet_base_deregister_nspace(char *nspace)
{
pmix_pnet_base_active_module_t *active;
pmix_namespace_t *nptr, *ns;
pmix_pnet_job_t *job;
pmix_pnet_node_t *node;
if (!pmix_pnet_globals.initialized) {
return;
}
pmix_output_verbose(2, pmix_pnet_base_framework.framework_output,
"pnet: deregister_nspace called");
/* protect against bozo inputs */
if (NULL == nspace) {
return;
}
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
/* find this nspace object */
nptr = NULL;
PMIX_LIST_FOREACH(ns, &pmix_globals.nspaces, pmix_namespace_t) {
if (0 == strcmp(ns->nspace, nspace)) {
nptr = ns;
break;
}
}
if (NULL == nptr) {
/* nothing we can do */
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return;
}
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->deregister_nspace) {
active->module->deregister_nspace(nptr);
}
}
PMIX_LIST_FOREACH(job, &pmix_pnet_globals.jobs, pmix_pnet_job_t) {
if (0 == strcmp(nspace, job->nspace)) {
pmix_list_remove_item(&pmix_pnet_globals.jobs, &job->super);
PMIX_RELEASE(job);
break;
}
}
PMIX_LIST_FOREACH(node, &pmix_pnet_globals.nodes, pmix_pnet_node_t) {
pmix_pnet_local_procs_t *lp;
PMIX_LIST_FOREACH(lp, &node->local_jobs, pmix_pnet_local_procs_t) {
if (0 == strcmp(nspace, lp->nspace)) {
pmix_list_remove_item(&node->local_jobs, &lp->super);
PMIX_RELEASE(lp);
break;
}
}
}
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
}
static void cicbfunc(pmix_status_t status,
pmix_list_t *inventory,
void *cbdata)
{
pmix_inventory_rollup_t *rollup = (pmix_inventory_rollup_t*)cbdata;
pmix_kval_t *kv;
PMIX_ACQUIRE_THREAD(&rollup->lock);
/* check if they had an error */
if (PMIX_SUCCESS != status && PMIX_SUCCESS == rollup->status) {
rollup->status = status;
}
/* transfer the inventory */
if (NULL != inventory) {
while (NULL != (kv = (pmix_kval_t*)pmix_list_remove_first(inventory))) {
pmix_list_append(&rollup->payload, &kv->super);
}
}
/* record that we got a reply */
rollup->replies++;
/* see if all have replied */
if (rollup->replies < rollup->requests) {
/* nope - need to wait */
PMIX_RELEASE_THREAD(&rollup->lock);
return;
}
/* if we get here, then collection is complete */
PMIX_RELEASE_THREAD(&rollup->lock);
if (NULL != rollup->cbfunc) {
rollup->cbfunc(rollup->status, &rollup->payload, rollup->cbdata);
}
PMIX_RELEASE(rollup);
return;
}
void pmix_pnet_base_collect_inventory(pmix_info_t directives[], size_t ndirs,
pmix_inventory_cbfunc_t cbfunc, void *cbdata)
{
pmix_pnet_base_active_module_t *active;
pmix_inventory_rollup_t *myrollup;
pmix_status_t rc;
/* we cannot block here as each plugin could take some time to
* complete the request. So instead, we call each active plugin
* and get their immediate response - if "in progress", then
* we record that we have to wait for their answer before providing
* the caller with a response. If "error", then we know we
* won't be getting a response from them */
if (!pmix_pnet_globals.initialized) {
/* need to call them back so they know */
if (NULL != cbfunc) {
cbfunc(PMIX_ERR_INIT, NULL, cbdata);
}
return;
}
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
cbfunc(PMIX_SUCCESS, NULL, cbdata);
return;
}
/* create the rollup object */
myrollup = PMIX_NEW(pmix_inventory_rollup_t);
if (NULL == myrollup) {
/* need to call them back so they know */
if (NULL != cbfunc) {
cbfunc(PMIX_ERR_NOMEM, NULL, cbdata);
}
return;
}
myrollup->cbfunc = cbfunc;
myrollup->cbdata = cbdata;
/* hold the lock until all active modules have been called
* to avoid race condition where replies come in before
* the requests counter has been fully updated */
PMIX_ACQUIRE_THREAD(&myrollup->lock);
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->collect_inventory) {
pmix_output_verbose(5, pmix_pnet_base_framework.framework_output,
"COLLECTING %s", active->module->name);
rc = active->module->collect_inventory(directives, ndirs, cicbfunc, (void*)myrollup);
/* if they return success, then the values were
* placed directly on the payload - nothing
* to wait for here */
if (PMIX_OPERATION_IN_PROGRESS == rc) {
myrollup->requests++;
} else if (PMIX_SUCCESS != rc &&
PMIX_ERR_TAKE_NEXT_OPTION != rc &&
PMIX_ERR_NOT_SUPPORTED != rc) {
/* a true error - we need to wait for
* all pending requests to complete
* and then notify the caller of the error */
if (PMIX_SUCCESS == myrollup->status) {
myrollup->status = rc;
}
}
}
}
if (0 == myrollup->requests) {
/* report back */
PMIX_RELEASE_THREAD(&myrollup->lock);
if (NULL != cbfunc) {
cbfunc(myrollup->status, &myrollup->payload, cbdata);
}
PMIX_RELEASE(myrollup);
return;
}
PMIX_RELEASE_THREAD(&myrollup->lock);
return;
}
static void dlcbfunc(pmix_status_t status,
void *cbdata)
{
pmix_inventory_rollup_t *rollup = (pmix_inventory_rollup_t*)cbdata;
PMIX_ACQUIRE_THREAD(&rollup->lock);
/* check if they had an error */
if (PMIX_SUCCESS != status && PMIX_SUCCESS == rollup->status) {
rollup->status = status;
}
/* record that we got a reply */
rollup->replies++;
/* see if all have replied */
if (rollup->replies < rollup->requests) {
/* nope - need to wait */
PMIX_RELEASE_THREAD(&rollup->lock);
return;
}
/* if we get here, then delivery is complete */
PMIX_RELEASE_THREAD(&rollup->lock);
if (NULL != rollup->opcbfunc) {
rollup->opcbfunc(rollup->status, rollup->cbdata);
}
PMIX_RELEASE(rollup);
return;
}
void pmix_pnet_base_deliver_inventory(pmix_info_t info[], size_t ninfo,
pmix_info_t directives[], size_t ndirs,
pmix_op_cbfunc_t cbfunc, void *cbdata)
{
pmix_pnet_base_active_module_t *active;
pmix_inventory_rollup_t *myrollup;
pmix_status_t rc;
/* we cannot block here as each plugin could take some time to
* complete the request. So instead, we call each active plugin
* and get their immediate response - if "in progress", then
* we record that we have to wait for their answer before providing
* the caller with a response. If "error", then we know we
* won't be getting a response from them */
if (!pmix_pnet_globals.initialized) {
/* need to call them back so they know */
if (NULL != cbfunc) {
cbfunc(PMIX_ERR_INIT, cbdata);
}
return;
}
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
cbfunc(PMIX_SUCCESS, cbdata);
return;
}
/* create the rollup object */
myrollup = PMIX_NEW(pmix_inventory_rollup_t);
if (NULL == myrollup) {
/* need to call them back so they know */
if (NULL != cbfunc) {
cbfunc(PMIX_ERR_NOMEM, cbdata);
}
return;
}
myrollup->opcbfunc = cbfunc;
myrollup->cbdata = cbdata;
/* hold the lock until all active modules have been called
* to avoid race condition where replies come in before
* the requests counter has been fully updated */
PMIX_ACQUIRE_THREAD(&myrollup->lock);
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->deliver_inventory) {
pmix_output_verbose(5, pmix_pnet_base_framework.framework_output,
"DELIVERING TO %s", active->module->name);
rc = active->module->deliver_inventory(info, ninfo, directives, ndirs, dlcbfunc, (void*)myrollup);
/* if they return success, then the values were
* immediately archived - nothing to wait for here */
if (PMIX_OPERATION_IN_PROGRESS == rc) {
myrollup->requests++;
} else if (PMIX_SUCCESS != rc &&
PMIX_ERR_TAKE_NEXT_OPTION != rc &&
PMIX_ERR_NOT_SUPPORTED != rc) {
/* a true error - we need to wait for
* all pending requests to complete
* and then notify the caller of the error */
if (PMIX_SUCCESS == myrollup->status) {
myrollup->status = rc;
}
}
}
}
if (0 == myrollup->requests) {
/* report back */
PMIX_RELEASE_THREAD(&myrollup->lock);
if (NULL != cbfunc) {
cbfunc(myrollup->status, cbdata);
}
PMIX_RELEASE(myrollup);
return;
}
PMIX_RELEASE_THREAD(&myrollup->lock);
return;
}
pmix_status_t pmix_pnet_base_register_fabric(pmix_fabric_t *fabric,
const pmix_info_t directives[],
size_t ndirs,
pmix_op_cbfunc_t cbfunc, void *cbdata)
{
pmix_pnet_base_active_module_t *active;
pmix_status_t rc;
pmix_pnet_fabric_t *ft;
/* ensure our fields of the fabric object are initialized */
fabric->info = NULL;
fabric->ninfo = 0;
fabric->module = NULL;
PMIX_ACQUIRE_THREAD(&pmix_pnet_globals.lock);
if (0 == pmix_list_get_size(&pmix_pnet_globals.actives)) {
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_ERR_NOT_SUPPORTED;
}
/* scan across active modules until one returns success */
PMIX_LIST_FOREACH(active, &pmix_pnet_globals.actives, pmix_pnet_base_active_module_t) {
if (NULL != active->module->register_fabric) {
rc = active->module->register_fabric(fabric, directives, ndirs, cbfunc, cbdata);
if (PMIX_OPERATION_SUCCEEDED == rc) {
/* track this fabric so we can respond to remote requests */
ft = PMIX_NEW(pmix_pnet_fabric_t);
ft->index = fabric->index;
if (NULL != fabric->name) {
ft->name = strdup(fabric->name);
}
ft->module = active->module;
pmix_list_append(&pmix_pnet_globals.fabrics, &ft->super);
} else if (PMIX_ERR_TAKE_NEXT_OPTION != rc) {
/* just return the result */
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return rc;
}
}
}
/* unlock prior to return */
PMIX_RELEASE_THREAD(&pmix_pnet_globals.lock);
return PMIX_ERR_NOT_FOUND;
}
pmix_status_t pmix_pnet_base_update_fabric(pmix_fabric_t *fabric)
{
pmix_status_t rc = PMIX_SUCCESS;
pmix_pnet_fabric_t *active;
pmix_pnet_module_t *module = NULL;
pmix_pnet_fabric_t *ft;
/* protect against bozo input */
if (NULL == fabric) {
return PMIX_ERR_BAD_PARAM;
} else if (NULL == fabric->module) {
/* this might be a remote request, so look at the
* list of fabrics we have registered locally and
* see if we have one with the matching index */
PMIX_LIST_FOREACH(ft, &pmix_pnet_globals.fabrics, pmix_pnet_fabric_t) {
if (fabric->index == ft->index) {
module = ft->module;
} else if (NULL != fabric->name && NULL != ft->name
&& 0 == strcmp(ft->name, fabric->name)) {
module = ft->module;
}
}
} else {
active = (pmix_pnet_fabric_t*)fabric->module;
module = (pmix_pnet_module_t*)active->module;
}
if (NULL == module) {
return PMIX_ERR_BAD_PARAM;
}
if (NULL != module->update_fabric) {
rc = module->update_fabric(fabric);
}
return rc;
}
pmix_status_t pmix_pnet_base_deregister_fabric(pmix_fabric_t *fabric)
{
pmix_status_t rc = PMIX_SUCCESS;
pmix_pnet_fabric_t *active;
pmix_pnet_module_t *module = NULL;
pmix_pnet_fabric_t *ft;
/* protect against bozo input */
if (NULL == fabric) {
return PMIX_ERR_BAD_PARAM;
} else if (NULL == fabric->module) {
/* this might be a remote request, so look at the
* list of fabrics we have registered locally and
* see if we have one with the matching index */
PMIX_LIST_FOREACH(ft, &pmix_pnet_globals.fabrics, pmix_pnet_fabric_t) {
if (fabric->index == ft->index) {
module = ft->module;
} else if (NULL != fabric->name && NULL != ft->name
&& 0 == strcmp(ft->name, fabric->name)) {
module = ft->module;
}
}
} else {
active = (pmix_pnet_fabric_t*)fabric->module;
module = (pmix_pnet_module_t*)active->module;
}
if (NULL == module) {
return PMIX_ERR_BAD_PARAM;
}
if (NULL != module->deregister_fabric) {
rc = module->deregister_fabric(fabric);
}
return rc;
}
static pmix_status_t process_maps(char *nspace, char **nodes, char **procs)
{
char **ranks;
pmix_status_t rc;
size_t m, n;
pmix_pnet_job_t *jptr, *job;
pmix_pnet_node_t *nd, *ndptr;
pmix_pnet_local_procs_t *lp;
bool needcheck;
/* bozo check */
if (pmix_argv_count(nodes) != pmix_argv_count(procs)) {
rc = PMIX_ERR_BAD_PARAM;
PMIX_ERROR_LOG(rc);
return rc;
}
/* see if we already know about this job */
job = NULL;
if (0 < pmix_list_get_size(&pmix_pnet_globals.jobs)) {
PMIX_LIST_FOREACH(jptr, &pmix_pnet_globals.jobs, pmix_pnet_job_t) {
if (0 == strcmp(nspace, jptr->nspace)) {
job = jptr;
break;
}
}
}
if (NULL == job) {
job = PMIX_NEW(pmix_pnet_job_t);
job->nspace = strdup(nspace);
pmix_list_append(&pmix_pnet_globals.jobs, &job->super);
}
if (0 < pmix_list_get_size(&pmix_pnet_globals.nodes)) {
needcheck = true;
} else {
needcheck = false;
}
for (n=0; NULL != nodes[n]; n++) {
if (needcheck) {
/* check and see if we already have data for this node */
nd = NULL;
PMIX_LIST_FOREACH(ndptr, &pmix_pnet_globals.nodes, pmix_pnet_node_t) {
if (0 == strcmp(nodes[n], ndptr->name)) {
nd = ndptr;
break;
}
}
if (NULL == nd) {
nd = PMIX_NEW(pmix_pnet_node_t);
nd->name = strdup(nodes[n]);
pmix_list_append(&pmix_pnet_globals.nodes, &nd->super);
/* add this node to the job */
PMIX_RETAIN(nd);
nd->index = pmix_pointer_array_add(&job->nodes, nd);
}
} else {
nd = PMIX_NEW(pmix_pnet_node_t);
nd->name = strdup(nodes[n]);
pmix_list_append(&pmix_pnet_globals.nodes, &nd->super);
/* add this node to the job */
PMIX_RETAIN(nd);
nd->index = pmix_pointer_array_add(&job->nodes, nd);
}
/* check and see if we already have this job on this node */
PMIX_LIST_FOREACH(lp, &nd->local_jobs, pmix_pnet_local_procs_t) {
if (0 == strcmp(nspace, lp->nspace)) {
/* we assume that the input replaces the prior
* list of ranks */
pmix_list_remove_item(&nd->local_jobs, &lp->super);
PMIX_RELEASE(lp);
break;
}
}
/* track the local procs */
lp = PMIX_NEW(pmix_pnet_local_procs_t);
lp->nspace = strdup(nspace);
/* separate out the procs - they are a comma-delimited list
* of rank values */
ranks = pmix_argv_split(procs[n], ',');
lp->np = pmix_argv_count(ranks);
lp->ranks = (pmix_rank_t*)malloc(lp->np * sizeof(pmix_rank_t));
for (m=0; m < lp->np; m++) {
lp->ranks[m] = strtoul(ranks[m], NULL, 10);
}
pmix_list_append(&nd->local_jobs, &lp->super);
pmix_argv_free(ranks);
}
return PMIX_SUCCESS;
}
|
/*
* Copyright 2015 Anton Tananaev (anton@traccar.org)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
Ext.define('Traccar.Application', {
extend: 'Ext.app.Application',
name: 'Traccar',
requires: [
'Traccar.Style',
'Traccar.AttributeFormatter'
],
models: [
'Server',
'User',
'Group',
'Device',
'Position',
'Attribute',
'Command',
'Event',
'Geofence',
'Notification',
'AttributeAlias',
'ReportSummary',
'ReportTrip',
'Calendar'
],
stores: [
'Groups',
'Devices',
'AllGroups',
'AllDevices',
'Positions',
'LatestPositions',
'Users',
'Attributes',
'MapTypes',
'DistanceUnits',
'SpeedUnits',
'CoordinateFormats',
'CommandTypes',
'TimeUnits',
'Languages',
'Events',
'Geofences',
'AllGeofences',
'Notifications',
'AllNotifications',
'GeofenceTypes',
'AttributeAliases',
'ReportRoute',
'ReportEvents',
'ReportTrips',
'ReportSummary',
'ReportTypes',
'ReportEventTypes',
'ReportChartTypes',
'Statistics',
'DeviceImages',
'Calendars',
'AllCalendars'
],
controllers: [
'Root'
],
isMobile: function () {
return window.matchMedia && window.matchMedia('(max-width: 768px)').matches;
},
getEventString: function (eventType) {
var key = 'event' + eventType.charAt(0).toUpperCase() + eventType.slice(1);
return Strings[key] || key;
},
showReports: function (show) {
var rootPanel = Ext.getCmp('rootPanel');
if (rootPanel) {
rootPanel.setActiveItem(show ? 1 : 0);
}
},
setUser: function (data) {
var reader = Ext.create('Ext.data.reader.Json', {
model: 'Traccar.model.User'
});
this.user = reader.readRecords(data).getRecords()[0];
},
getUser: function () {
return this.user;
},
setServer: function (data) {
var reader = Ext.create('Ext.data.reader.Json', {
model: 'Traccar.model.Server'
});
this.server = reader.readRecords(data).getRecords()[0];
},
getServer: function () {
return this.server;
},
getPreference: function (key, defaultValue) {
if (this.getServer().get('forceSettings')) {
return this.getServer().get(key) || this.getUser().get(key) || defaultValue;
} else {
return this.getUser().get(key) || this.getServer().get(key) || defaultValue;
}
},
getAttributePreference: function (key, defaultValue) {
if (this.getServer().get('forceSettings')) {
return this.getServer().get('attributes')[key] || this.getUser().get('attributes')[key] || defaultValue;
} else {
return this.getUser().get('attributes')[key] || this.getServer().get('attributes')[key] || defaultValue;
}
},
getReportColor: function (deviceId) {
var index, reportColor, device = Ext.getStore('Devices').getById(deviceId);
if (device) {
reportColor = device.get('attributes')['web.reportColor'];
}
if (reportColor) {
return reportColor;
} else {
index = 0;
if (deviceId !== undefined) {
index = deviceId % Traccar.Style.mapRouteColor.length;
}
return Traccar.Style.mapRouteColor[index];
}
},
showError: function (response) {
if (Ext.isString(response)) {
Ext.Msg.alert(Strings.errorTitle, response);
} else if (response.responseText) {
Ext.Msg.alert(Strings.errorTitle, response.responseText);
} else if (response.statusText) {
Ext.Msg.alert(Strings.errorTitle, response.statusText);
} else {
Ext.Msg.alert(Strings.errorTitle, Strings.errorConnection);
}
}
});
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
"""
@author:Hadrianl
"""
from pathlib import Path
from vnpy.trader.app import BaseApp
from .engine import VisualEngine, APP_NAME
class VisualizationApp(BaseApp):
""""""
app_name = APP_NAME
app_module = __module__
app_path = Path(__file__).parent
display_name = "IB可视化"
engine_class = VisualEngine
widget_name = "CandleChartWidget"
icon_name = "visulization.ico"
|
export default {
header: {
editor: 'edit online'
},
form: {
itemsAsset: 'Form Item Asset',
attribute: 'Form Attribute',
itemAttribute: 'FormItem Attribute',
JSON: 'JSON to Form'
},
code: {
copy: 'copy code'
}
}
|
search_result['239']=["topic_0000000000000075.html","SyncAccessTokenDto Class",""];
|
/*jshint globalstrict:false, strict:false */
/*jshint -W034, -W098, -W016 */
/*eslint no-useless-computed-key: "off"*/
/*global assertTrue */
////////////////////////////////////////////////////////////////////////////////
/// @brief test v8
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Jan Steemann
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
var jsunity = require("jsunity");
var console = require("console");
////////////////////////////////////////////////////////////////////////////////
/// @brief test crash resilience
////////////////////////////////////////////////////////////////////////////////
function V8CrashSuite () {
'use strict';
return {
////////////////////////////////////////////////////////////////////////////////
/// @brief set up
////////////////////////////////////////////////////////////////////////////////
setUp : function () {
},
////////////////////////////////////////////////////////////////////////////////
/// @brief tear down
////////////////////////////////////////////////////////////////////////////////
tearDown : function () {
},
////////////////////////////////////////////////////////////////////////////////
/// @brief https://bugs.chromium.org/p/v8/issues/detail?id=5033
////////////////////////////////////////////////////////////////////////////////
testTypeFeedbackOracle : function () {
"use strict";
// code below is useless, but it triggered a segfault in V8 code optimization
var test = function () {
var t = Date.now();
var o = {
['p'] : 1,
t
};
};
for (var n = 0; n < 100000; n++) {
test();
}
// simply need to survive the above code
assertTrue(true);
},
////////////////////////////////////////////////////////////////////////////////
/// @brief https://bugs.chromium.org/p/v8/issues/detail?id=5033
////////////////////////////////////////////////////////////////////////////////
testTypeFeedbackOracle2 : function () {
"use strict";
// code below is useless, but it triggered a segfault in V8 code optimization
var test = function () {
var random = 0 | Math.random() * 1000;
var today = Date.now();
var o = {
['prop_' + random] : today,
random,
today
};
};
console.time('test');
for (var n = 0; n < 100000; n++) {
test();
}
console.timeEnd('test');
// simply need to survive the above code
assertTrue(true);
}
};
}
////////////////////////////////////////////////////////////////////////////////
/// @brief executes the test suite
////////////////////////////////////////////////////////////////////////////////
jsunity.run(V8CrashSuite);
return jsunity.done();
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""
This module contains the classes required for dialogue management.
- OefSearchDialogue: The dialogue class maintains state of a dialogue of type oef_search and manages it.
- OefSearchDialogues: The dialogues class keeps track of all dialogues of type oef_search.
- SigningDialogue: The dialogue class maintains state of a dialogue of type signing and manages it.
- SigningDialogues: The dialogues class keeps track of all dialogues of type signing.
- TacDialogue: The dialogue class maintains state of a dialogue of type tac and manages it.
- TacDialogues: The dialogues class keeps track of all dialogues of type tac.
"""
from aea.helpers.dialogue.base import Dialogue as BaseDialogue
from aea.helpers.dialogue.base import DialogueLabel as BaseDialogueLabel
from aea.protocols.base import Message
from aea.protocols.state_update.dialogues import (
StateUpdateDialogue as BaseStateUpdateDialogue,
)
from aea.protocols.state_update.dialogues import (
StateUpdateDialogues as BaseStateUpdateDialogues,
)
from aea.skills.base import Model
from packages.fetchai.protocols.oef_search.dialogues import (
OefSearchDialogue as BaseOefSearchDialogue,
)
from packages.fetchai.protocols.oef_search.dialogues import (
OefSearchDialogues as BaseOefSearchDialogues,
)
from packages.fetchai.protocols.tac.dialogues import TacDialogue as BaseTacDialogue
from packages.fetchai.protocols.tac.dialogues import TacDialogues as BaseTacDialogues
OefSearchDialogue = BaseOefSearchDialogue
class OefSearchDialogues(Model, BaseOefSearchDialogues):
"""This class keeps track of all oef_search dialogues."""
def __init__(self, **kwargs) -> None:
"""
Initialize dialogues.
:param agent_address: the address of the agent for whom dialogues are maintained
:return: None
"""
Model.__init__(self, **kwargs)
BaseOefSearchDialogues.__init__(
self, self.context.agent_address + "_" + str(self.context.skill_id)
)
@staticmethod
def role_from_first_message(message: Message) -> BaseDialogue.Role:
"""Infer the role of the agent from an incoming/outgoing first message
:param message: an incoming/outgoing first message
:return: The role of the agent
"""
return BaseOefSearchDialogue.Role.AGENT
def create_dialogue(
self, dialogue_label: BaseDialogueLabel, role: BaseDialogue.Role,
) -> OefSearchDialogue:
"""
Create an instance of fipa dialogue.
:param dialogue_label: the identifier of the dialogue
:param role: the role of the agent this dialogue is maintained for
:return: the created dialogue
"""
dialogue = OefSearchDialogue(
dialogue_label=dialogue_label, agent_address=self.agent_address, role=role
)
return dialogue
StateUpdateDialogue = BaseStateUpdateDialogue
class StateUpdateDialogues(Model, BaseStateUpdateDialogues):
"""This class keeps track of all state_update dialogues."""
def __init__(self, **kwargs) -> None:
"""
Initialize dialogues.
:param agent_address: the address of the agent for whom dialogues are maintained
:return: None
"""
Model.__init__(self, **kwargs)
BaseStateUpdateDialogues.__init__(
self, self.context.agent_address + "_" + str(self.context.skill_id)
)
@staticmethod
def role_from_first_message(message: Message) -> BaseDialogue.Role:
"""Infer the role of the agent from an incoming/outgoing first message
:param message: an incoming/outgoing first message
:return: The role of the agent
"""
return BaseStateUpdateDialogue.Role.SKILL
def create_dialogue(
self, dialogue_label: BaseDialogueLabel, role: BaseDialogue.Role,
) -> StateUpdateDialogue:
"""
Create an instance of fipa dialogue.
:param dialogue_label: the identifier of the dialogue
:param role: the role of the agent this dialogue is maintained for
:return: the created dialogue
"""
dialogue = StateUpdateDialogue(
dialogue_label=dialogue_label, agent_address=self.agent_address, role=role
)
return dialogue
TacDialogue = BaseTacDialogue
class TacDialogues(Model, BaseTacDialogues):
"""The dialogues class keeps track of all dialogues."""
def __init__(self, **kwargs) -> None:
"""
Initialize dialogues.
:return: None
"""
Model.__init__(self, **kwargs)
BaseTacDialogues.__init__(self, self.context.agent_address)
@staticmethod
def role_from_first_message(message: Message) -> BaseDialogue.Role:
"""Infer the role of the agent from an incoming/outgoing first message
:param message: an incoming/outgoing first message
:return: The role of the agent
"""
return BaseTacDialogue.Role.PARTICIPANT
def create_dialogue(
self, dialogue_label: BaseDialogueLabel, role: BaseDialogue.Role,
) -> TacDialogue:
"""
Create an instance of tac dialogue.
:param dialogue_label: the identifier of the dialogue
:param role: the role of the agent this dialogue is maintained for
:return: the created dialogue
"""
dialogue = TacDialogue(
dialogue_label=dialogue_label, agent_address=self.agent_address, role=role
)
return dialogue
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(React.createElement("path", {
d: "M19.35 10.04C18.67 6.59 15.64 4 12 4 9.11 4 6.6 5.64 5.35 8.04 2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h13c2.76 0 5-2.24 5-5 0-2.64-2.05-4.78-4.65-4.96zM10 17l-3.5-3.5 1.41-1.41L10 14.18 15.18 9l1.41 1.41L10 17z"
}), 'CloudDoneSharp');
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetAllEntries
# Retrieves all calendar entries from a specified project.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetAllEntries(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetAllEntries Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetAllEntries, self).__init__(temboo_session, '/Library/Basecamp/GetAllEntries')
def new_input_set(self):
return GetAllEntriesInputSet()
def _make_result_set(self, result, path):
return GetAllEntriesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetAllEntriesChoreographyExecution(session, exec_id, path)
class GetAllEntriesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetAllEntries
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccountName(self, value):
"""
Set the value of the AccountName input for this Choreo. ((required, string) A valid Basecamp account name. This is the first part of the account's URL.)
"""
super(GetAllEntriesInputSet, self)._set_input('AccountName', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) The Basecamp account password. Use the value 'X' when specifying an API Key for the Username input.)
"""
super(GetAllEntriesInputSet, self)._set_input('Password', value)
def set_ProjectID(self, value):
"""
Set the value of the ProjectID input for this Choreo. ((required, integer) The ID for the project from which to retrieve all calendar entries.)
"""
super(GetAllEntriesInputSet, self)._set_input('ProjectID', value)
def set_Username(self, value):
"""
Set the value of the Username input for this Choreo. ((required, string) A Basecamp account username or API Key.)
"""
super(GetAllEntriesInputSet, self)._set_input('Username', value)
class GetAllEntriesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetAllEntries Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((xml) The response returned from Basecamp.)
"""
return self._output.get('Response', None)
class GetAllEntriesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetAllEntriesResultSet(response, path)
|
var _ = require('underscore');
var MockFactory = require('../../../helpers/mockFactory');
module.exports = function (LayerModel) {
var layer;
var source;
var engineMock;
beforeEach(function () {
source = MockFactory.createAnalysisModel({ id: 'a0' });
engineMock = MockFactory.createEngine();
layer = new LayerModel({source: source}, { engine: engineMock });
});
var METHODS = [
'isVisible',
'getName'
];
_.each(METHODS, function (method) {
it('should respond to .' + method, function () {
expect(typeof layer[method] === 'function').toBeTruthy();
});
});
it('should have legends', function () {
var legends = [
{ type: 'bubble', title: 'My Bubble Legend' },
{ type: 'category', title: 'My Category Legend' },
{ type: 'choropleth', title: 'My Choropleth Legend' },
{ type: 'custom', title: 'My Custom Legend' }
];
layer = new LayerModel({ legends: legends }, { engine: engineMock });
expect(layer.get('legends')).toBeUndefined();
expect(layer.legends.bubble.get('title')).toEqual('My Bubble Legend');
expect(layer.legends.category.get('title')).toEqual('My Category Legend');
expect(layer.legends.choropleth.get('title')).toEqual('My Choropleth Legend');
expect(layer.legends.custom.get('title')).toEqual('My Custom Legend');
});
describe('source references', function () {
describe('when layer is initialized', function () {
it('should mark source as referenced', function () {
expect(source.isSourceOf(layer)).toBe(true);
});
});
describe('when layer is updated', function () {
it('should unmark source and mark new source as referenced', function () {
var oldSource = source;
var newSource = MockFactory.createAnalysisModel({ id: 'a1' });
expect(oldSource.isSourceOf(layer)).toBe(true);
expect(newSource.isSourceOf(layer)).toBe(false);
layer.setSource(newSource);
expect(oldSource.isSourceOf(layer)).toBe(false);
expect(newSource.isSourceOf(layer)).toBe(true);
});
});
describe('when layer is removed', function () {
it('should unmark source as referenced', function () {
expect(source.isSourceOf(layer)).toBe(true);
layer.remove();
expect(source.isSourceOf(layer)).toBe(false);
});
});
});
};
|
"""Add min and max position cost
Revision ID: e31a94a27efb
Revises: 15bc523ae63e
Create Date: 2019-04-08 14:18:56.081686
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'e31a94a27efb'
down_revision = '15bc523ae63e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('decidotron_decision_process', sa.Column('max_position_cost', sa.Integer(), nullable=True))
op.add_column('decidotron_decision_process',
sa.Column('min_position_cost', sa.Integer(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('decidotron_decision_process', 'min_position_cost')
op.drop_column('decidotron_decision_process', 'max_position_cost')
# ### end Alembic commands ###
|
import React from 'react';
import { Route, Switch } from 'react-router-dom';
import StartRoute from '@/components/ReactRouter/StartRoute';
import AddressRoute from '@/components/ReactRouter/AddressRoute';
import BlockRoute from '@/components/ReactRouter/BlockRoute';
import BlocksRoute from '@/components/ReactRouter/BlocksRoute';
import TransactionRoute from '@/components/ReactRouter/TransactionRoute';
const Switcher = () => (
<Switch>
<Route
path='/'
component={StartRoute}
exact
/>
<Route
path='/address/:address'
component={AddressRoute}
/>
<Route
path='/blocks'
component={BlocksRoute}
exact
/>
<Route
path='/block/:blockNumber'
component={BlockRoute}
exact
/>
<Route
path='/transaction/:hash'
component={TransactionRoute}
/>
</Switch>
);
export default Switcher;
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TurnMemoryScope = void 0;
/**
* @module botbuilder-dialogs
*/
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
const memoryScope_1 = require("./memoryScope");
const scopePath_1 = require("../scopePath");
/**
* @private
*/
const TURN_STATE = 'turn';
/**
* TurnMemoryScope represents memory scoped to the current turn.
*/
class TurnMemoryScope extends memoryScope_1.MemoryScope {
/**
* Initializes a new instance of the [TurnMemoryScope](xref:botbuilder-dialogs.TurnMemoryScope) class.
*/
constructor() {
super(scopePath_1.ScopePath.turn, true);
}
/**
* Get the backing memory for this scope.
* @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for this turn.
* @returns The memory for the scope.
*/
getMemory(dc) {
let memory = dc.context.turnState.get(TURN_STATE);
if (typeof memory != 'object') {
memory = {};
dc.context.turnState.set(TURN_STATE, memory);
}
return memory;
}
/**
* Changes the backing object for the memory scope.
* @param dc The [DialogContext](xref:botbuilder-dialogs.DialogContext) for this turn.
* @param memory Memory object to set for the scope.
*/
setMemory(dc, memory) {
if (memory == undefined) {
throw new Error(`TurnMemoryScope.setMemory: undefined memory object passed in.`);
}
dc.context.turnState.set(TURN_STATE, memory);
}
}
exports.TurnMemoryScope = TurnMemoryScope;
//# sourceMappingURL=turnMemoryScope.js.map
|
const wr = new ( require ( 'stream' ).Writable );
const Ex = require ( 'st_ex1' );
debugger;
const waon = Ex.SetStream.StreamAllOn.WriteAllOn;
console.log ( waon.help );
waon ( wr );
debugger;
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import six
import requests
from furl import furl
from retry import retry
from retry.api import retry_call
from ratelimiter import RateLimiter
from functools import lru_cache
from rnacentral_pipeline.databases.data import OntologyTerm
BASE = "https://www.ebi.ac.uk/ols/api/ontologies"
@lru_cache(maxsize=500)
@retry(requests.HTTPError, tries=5, delay=1)
@RateLimiter(max_calls=10, period=1)
def query_ols(url):
if isinstance(url, furl):
url = url.url
response = requests.get(url)
response.raise_for_status()
return response.json()
def ontology_url(ontology):
"""
This will fetch the base URL to use with the given ontology name.
"""
url = furl(BASE)
url.path.segments.append(ontology.upper())
info = query_ols(url.url)
return furl(info["config"]["baseUris"][0])
def term_url(term_id):
ontology, rest = term_id.split(":", 1)
ont_url = ontology_url(ontology)
ont_url.path.segments[-1] += rest
iri = six.moves.urllib.parse.quote_plus(ont_url.url)
url = furl(BASE)
url.path.segments.extend([ontology, "terms", iri])
return url
@lru_cache()
def term(term_id):
"""
Fetch information about the given term_id. The term_id's should be in the
form of: "GO:000001". This will only work for ontologies that are in the
OLS.
"""
ontology, _ = term_id.split(":", 1)
url = term_url(term_id)
term_info = query_ols(url.url)
definition = None
if term_info["description"]:
definition = " ".join(term_info["description"] or "")
qualifier = None
synonyms = []
given = term_info.get("synonyms", None) or []
leader = "INSDC_qualifier:"
for synonym in given:
if synonym.startswith(leader):
if qualifier:
raise ValueError("Multiple INSDC qualifiers found")
qualifier = synonym[len(leader) :]
else:
synonyms.append(synonym)
return OntologyTerm(
ontology=ontology,
ontology_id=term_id,
name=term_info["label"],
definition=definition,
synonyms=synonyms,
insdc_qualifier=qualifier,
)
|
# coding=utf-8
#
"""
Copyright (c) 2020, Alexander Magola. All rights reserved.
license: BSD 3-Clause License, see LICENSE for more details.
"""
class AnyAmountStrsKey(object):
""" Any amount of string keys"""
__slots__ = ()
def __eq__(self, other):
if not isinstance(other, AnyAmountStrsKey):
# don't attempt to compare against unrelated types
return NotImplemented # pragma: no cover
return True
def __hash__(self):
# necessary for instances to behave sanely in dicts and sets.
return hash(self.__class__)
ANYAMOUNTSTRS_KEY = AnyAmountStrsKey()
def addSelectToParams(scheme, paramNames = None):
"""
Add '.select' variant to param from scheme
"""
if paramNames is None:
paramNames = tuple(scheme.keys())
for name in paramNames:
origParam = scheme[name]
scheme['%s.select' % name] = {
'type' : 'dict',
'vars' : {
'default' : origParam,
ANYAMOUNTSTRS_KEY : origParam,
},
}
|
import discord
from discord.ext import commands
import logging
import traceback
class Listeners(commands.Cog):
def __init__(self, bot):
self.bot=bot
self.logger=logging.getLogger("discord")
@commands.Cog.listener(name="on_command")
async def _log_command_invoke(self, ctx):
self.logger.info(
f"Command \"{ctx.command.name}\" started by {ctx.author.name}#{ctx.author.discriminator} in guild {ctx.guild.name}")
@commands.Cog.listener(name="on_command_completion")
async def _log_command_completion(self, ctx):
self.logger.info(
f"Command \"{ctx.command.name}\" finished successfully. {ctx.author.name}#{ctx.author.discriminator} in guild {ctx.guild.name}")
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
"""
The default command error handler provided by the bot.
"""
if hasattr(ctx.command, 'on_error'):
return
cog = ctx.cog
if cog:
if cog._get_overridden_method(cog.cog_command_error) is not None:
return
ignored = (commands.errors.CommandNotFound, )
error = getattr(error, 'original', error)
if isinstance(error, ignored):
return
if isinstance(error, commands.DisabledCommand):
await ctx.reply(f'{ctx.command} has been disabled.')
elif isinstance(error, commands.NoPrivateMessage):
try:
await ctx.author.send(f'{ctx.command} can not be used in Private Messages.')
except discord.HTTPException:
pass
elif isinstance(error, commands.errors.BadArgument):
error_embed = discord.Embed(title="Error!", description=str(
error), colour=discord.Colour.red())
await ctx.reply(embed=error_embed)
elif isinstance(error, commands.errors.TooManyArguments):
pass
elif isinstance(error, discord.errors.HTTPException):
return
await ctx.reply("There was an error, please try again later. If you are trying to message someone, they might have it turned off.")
elif isinstance(error, discord.errors.NotFound):
pass
elif isinstance(error, commands.errors.MissingPermissions):
perms = ", ".join(error.missing_perms)
error_embed = discord.Embed(
title="Error!", description=f"You are missing the following perm(s): `{perms}`", colour=discord.Colour.red())
await ctx.reply(embed=error_embed)
elif isinstance(error, commands.errors.BotMissingPermissions):
perms = ", ".join(error.missing_perms)
error_embed = discord.Embed(
title="Error!", description=f"I am missing the following perm(s): `{perms}`", colour=discord.Colour.red())
await ctx.reply(embed=error_embed)
elif isinstance(error, discord.errors.Forbidden):
await ctx.reply("I couldn't do that, sorry. Try checking my perms")
elif isinstance(error, commands.errors.MissingRequiredArgument):
param = str(error.param).split(":")[0]
error_embed = discord.Embed(
title="Error!", description=f"Missing parameter: `{param}`", colour=discord.Colour.red())
await ctx.reply(embed=error_embed)
elif isinstance(error, commands.NotOwner):
embed = discord.Embed(
title="Error!", description="You need to be owner to execute this command!", colour=discord.Colour.red())
await ctx.send(embed=embed)
else:
error_embed = discord.Embed(
title="Error!", description=f"```diff\n- {str(error)}```\nIf this keeps happening, please contact `isaa_ctaylor#2494`", colour=discord.Colour.red())
tb = "".join(traceback.format_exception(
type(error), error, error.__traceback__))
self.logger.error(
f"Command error!\nCommand name: {ctx.command.qualified_name}, Author: {ctx.author.name}#{ctx.author.discriminator}\n{tb}")
await ctx.reply(embed=error_embed)
@commands.Cog.listener(name="on_message_edit")
async def _reinvoke_commands(self, before, after):
if after.content != before.content:
await self.bot.process_commands(after)
def setup(bot):
bot.add_cog(Listeners(bot))
|
# -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-权限中心(BlueKing-IAM) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from backend.apps.subject.audit import BaseSubjectProvider
from backend.audit.audit import NoNeedAuditException, audit_context_getter
from backend.audit.constants import AuditType
from .constants import OperateEnum
class SubjectPolicyGrantOrRevokeAuditProvider(BaseSubjectProvider):
@property
def type(self):
operate = audit_context_getter(self.request, "operate")
if operate == OperateEnum.REVOKE.value:
return AuditType.USER_POLICY_UPDATE.value
if operate == OperateEnum.GRANT.value:
return AuditType.USER_POLICY_CREATE.value
return ""
@property
def extra(self):
system_id = audit_context_getter(self.request, "system_id")
policies = audit_context_getter(self.request, "policies")
if not policies:
raise NoNeedAuditException
return {"system_id": system_id, "policies": [p.dict() for p in policies]}
@property
def system_id(self) -> str:
return audit_context_getter(self.request, "system_id")
|
# Copyright 2018-2019 The glTF-Blender-IO authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bpy
import bmesh
from ..com.gltf2_blender_extras import set_extras
from .gltf2_blender_material import BlenderMaterial
from .gltf2_blender_primitive import BlenderPrimitive
class BlenderMesh():
"""Blender Mesh."""
def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls)
@staticmethod
def create(gltf, mesh_idx, skin_idx):
"""Mesh creation."""
pymesh = gltf.data.meshes[mesh_idx]
# Create one bmesh, add all primitives to it, and then convert it to a
# mesh.
bme = bmesh.new()
# List of all the materials this mesh will use. The material each
# primitive uses is set by giving an index into this list.
materials = []
# Process all primitives
for prim in pymesh.primitives:
if prim.material is None:
material_idx = None
else:
pymaterial = gltf.data.materials[prim.material]
vertex_color = None
# A32NX Disabling vertex color import
# It just makes things look weird, I think it's only in the gltf to match the standard
# if 'COLOR_0' in prim.attributes:
# vertex_color = 'COLOR_0'
# Create Blender material if needed
if vertex_color not in pymaterial.blender_material:
BlenderMaterial.create(gltf, prim.material, vertex_color)
material_name = pymaterial.blender_material[vertex_color]
material = bpy.data.materials[material_name]
try:
material_idx = materials.index(material.name)
except ValueError:
materials.append(material.name)
material_idx = len(materials) - 1
BlenderPrimitive.add_primitive_to_bmesh(gltf, bme, pymesh, prim, skin_idx, material_idx)
name = pymesh.name or 'Mesh_' + str(mesh_idx)
mesh = bpy.data.meshes.new(name)
BlenderMesh.bmesh_to_mesh(gltf, pymesh, bme, mesh)
bme.free()
for name_material in materials:
mesh.materials.append(bpy.data.materials[name_material])
mesh.update()
set_extras(mesh, pymesh.extras, exclude=['targetNames'])
# Clear accessor cache after all primitives are done
gltf.accessor_cache = {}
return mesh
@staticmethod
def bmesh_to_mesh(gltf, pymesh, bme, mesh):
bme.to_mesh(mesh)
# Unfortunately need to do shapekeys/normals/smoothing ourselves.
# Shapekeys
if len(bme.verts.layers.shape) != 0:
# The only way I could find to create a shape key was to temporarily
# parent mesh to an object and use obj.shape_key_add.
tmp_ob = None
try:
tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
tmp_ob.shape_key_add(name='Basis')
mesh.shape_keys.name = mesh.name
for layer_name in bme.verts.layers.shape.keys():
tmp_ob.shape_key_add(name=layer_name)
key_block = mesh.shape_keys.key_blocks[layer_name]
layer = bme.verts.layers.shape[layer_name]
for i, v in enumerate(bme.verts):
key_block.data[i].co = v[layer]
finally:
if tmp_ob:
bpy.data.objects.remove(tmp_ob)
# Normals
mesh.update()
if gltf.import_settings['import_shading'] == "NORMALS":
mesh.create_normals_split()
use_smooths = [] # whether to smooth for each poly
face_idx = 0
for prim in pymesh.primitives:
if gltf.import_settings['import_shading'] == "FLAT" or \
'NORMAL' not in prim.attributes:
use_smooths += [False] * prim.num_faces
elif gltf.import_settings['import_shading'] == "SMOOTH":
use_smooths += [True] * prim.num_faces
elif gltf.import_settings['import_shading'] == "NORMALS":
mesh_loops = mesh.loops
for fi in range(face_idx, face_idx + prim.num_faces):
poly = mesh.polygons[fi]
# "Flat normals" are when all the vertices in poly have the
# poly's normal. Otherwise, smooth the poly.
for loop_idx in range(poly.loop_start, poly.loop_start + poly.loop_total):
vi = mesh_loops[loop_idx].vertex_index
if poly.normal.dot(bme.verts[vi].normal) <= 0.9999999:
use_smooths.append(True)
break
else:
use_smooths.append(False)
else:
# shouldn't happen
assert False
face_idx += prim.num_faces
mesh.polygons.foreach_set('use_smooth', use_smooths)
# Custom normals, now that every update is done
if gltf.import_settings['import_shading'] == "NORMALS":
custom_normals = [v.normal for v in bme.verts]
mesh.normals_split_custom_set_from_vertices(custom_normals)
mesh.use_auto_smooth = True
|
/**
* Directly from fnakstad
* https://github.com/fnakstad/angular-client-side-auth/blob/master/client/js/routingConfig.js
*/
(function (exports) {
'use strict';
var config = {
/* List all the roles you wish to use in the app
* You have a max of 31 before the bit shift pushes the accompanying integer out of
* the memory footprint for an integer
*/
roles: [
'public',
'user',
'admin'
],
/*
Build out all the access levels you want referencing the roles listed above
You can use the "*" symbol to represent access to all roles
*/
accessLevels: {
'public': '*',
'anon': ['public'],
'user': ['user', 'admin'],
'admin': ['admin']
}
};
/*
Method to build a distinct bit mask for each role
It starts off with "1" and shifts the bit to the left for each element in the
roles array parameter
*/
function buildRoles(roles) {
var bitMask = '01';
var userRoles = {};
for (var role in roles) {
var intCode = parseInt(bitMask, 2);
userRoles[roles[role]] = {
bitMask: intCode,
title: roles[role]
};
bitMask = (intCode << 1).toString(2);
}
return userRoles;
}
/*
This method builds access level bit masks based on the accessLevelDeclaration parameter which must
contain an array for each access level containing the allowed user roles.
*/
function buildAccessLevels(accessLevelDeclarations, userRoles) {
var accessLevels = {},
resultBitMask,
role;
for (var level in accessLevelDeclarations) {
if (typeof accessLevelDeclarations[level] === 'string') {
if (accessLevelDeclarations[level] === '*') {
resultBitMask = '';
for (role in userRoles) {
resultBitMask += '1';
}
//accessLevels[level] = parseInt(resultBitMask, 2);
accessLevels[level] = {
bitMask: parseInt(resultBitMask, 2),
title: accessLevelDeclarations[level]
};
}
else {
console.log('Access Control Error: Could not parse [' + accessLevelDeclarations[level] + '] as access definition for level [' + level + ']');
}
}
else {
resultBitMask = 0;
for (role in accessLevelDeclarations[level]) {
if (userRoles.hasOwnProperty(accessLevelDeclarations[level][role])) {
resultBitMask = resultBitMask | userRoles[accessLevelDeclarations[level][role]].bitMask;
}
else {
console.log('Access Control Error: Could not find role [' + accessLevelDeclarations[level][role] + '] in registered roles while building access for [' + level + ']');
}
}
accessLevels[level] = {
bitMask: resultBitMask,
title: accessLevelDeclarations[level][role]
};
}
}
return accessLevels;
}
exports.userRoles = buildRoles(config.roles);
exports.accessLevels = buildAccessLevels(config.accessLevels, exports.userRoles);
})(typeof exports === 'undefined' ? this : exports);
|
# DADSA - Assignment 1
# Reece Benson
from classes import Player
from classes import Round
class Season():
_app = None
_j_data = None
_name = None
_players = { }
_tournaments = { }
_rounds = { }
_rounds_raw = { }
_settings = { }
def __init__(self, _app, name, j_data):
# Set our application as a variable
self._app = _app
# Set our Season JSON Data in a variable
self._j_data = j_data
# Debug
if(self._app.debug):
print("[LOAD]: Loaded Season '{0}'".format(name))
# Set variables
self._name = name
self._settings = j_data['settings']
def name(self):
return self._name
def settings(self):
return self._settings
def display(self, detail):
# What detail are we handling?
ret = None
if(detail == "details"):
# Set our header text
ret = "Details about '{0}':".format(self.name()) + "\n"
ret += "---------------------------------------------------------------" + "\n"
# Add details to the return string
ret += "There have been {0} genders defined within this season".format(len(self.players())) + "\n"
for gdr in self.players():
ret += " -> The gender '{0}' has {1} players stored within it:".format(gdr, len(self.players()[gdr])) + "\n"
ret += " ALL: " + ", ".join([p.name() for p in self.players()[gdr] ]) + "\n"
# Add settings
ret += "\n" + "Settings for this season:" + "\n"
for setting in self.settings():
ret += " -> The setting '{0}' is set to '{1}'".format(setting, self.settings()[setting]) + "\n"
# Show tournaments
ret += "\n" + "Tournaments in this season:" + "\n"
for tournament_name in self.tournaments():
tournament = self.tournament(tournament_name)
ret += " -> {0} — Difficulty: {1}".format(tournament_name, tournament.difficulty()) + "\n"
ret += " Prize Money:" + "\n"
ret += " {0}".format("\n".join([ "\t\t#{0}: {1}".format(i, t) for i, t in enumerate(tournament.prize_money(), 1) ])) + "\n"
else:
ret = "An unknown error has been handled..."
# Returning as a string, so if there was a GUI I could
# append this to a Label instead of printing to console
return ret
def tournaments(self):
return self._tournaments
def tournament(self, name):
if(name in self.tournaments()):
return self._tournaments[name]
else:
return None
def add_tournament(self, name, tournament):
self._tournaments.update({ name: tournament })
# Debug
if(self._app.debug):
print("[LOAD]: Loaded Tournament '{0}' for '{1}'".format(name, self.name()))
return self.tournament(name)
def players(self):
return self._players
def add_player(self, name, gender):
if(not gender in self.players()):
self._players[gender] = [ ]
self._rounds[gender] = [ ]
# Append our Players to their specific gender category
self._players[gender].append(Player.Player(name, gender, len(self.players()[gender])))
def round(self, gender, rnd_name):
if(gender in self.rounds()):
if(rnd_name in self.rounds()[gender]):
return self.rounds()[gender][rnd_name]
else:
return None
else:
return None
def rounds(self):
return self._rounds
def add_round(self, gender, _round):
if(not gender in self.rounds()):
self._rounds[gender] = { }
self._rounds[gender].update({ _round.name(): _round })
return self._rounds[gender][_round.name()]
def set_rounds(self):
for rnd in self._rounds_raw:
for gdr in self._rounds_raw[rnd]:
# If the Gender category doesn't exist within the rounds, create it
if(not gdr in self._rounds):
self._rounds[gdr] = [ ]
# Populate our dictionary with our match data
for match in self._rounds_raw[rnd][gdr]:
_round._matches.append(match)
# Append our Round
self._rounds[gdr].append(_round)
|
"""
This animation example shows how perform a radar sweep animation.
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.radar_sweep
"""
import arcade
import math
# Set up the constants
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Radar Sweep Example"
# These constants control the particulars about the radar
CENTER_X = SCREEN_WIDTH // 2
CENTER_Y = SCREEN_HEIGHT // 2
RADIANS_PER_FRAME = 0.02
SWEEP_LENGTH = 250
def on_draw(_delta_time):
""" Use this function to draw everything to the screen. """
# Move the angle of the sweep.
on_draw.angle += RADIANS_PER_FRAME
# Calculate the end point of our radar sweep. Using math.
x = SWEEP_LENGTH * math.sin(on_draw.angle) + CENTER_X
y = SWEEP_LENGTH * math.cos(on_draw.angle) + CENTER_Y
# Start the render. This must happen before any drawing
# commands. We do NOT need an stop render command.
arcade.start_render()
# Draw the radar line
arcade.draw_line(CENTER_X, CENTER_Y, x, y, arcade.color.OLIVE, 4)
# Draw the outline of the radar
arcade.draw_circle_outline(CENTER_X, CENTER_Y, SWEEP_LENGTH,
arcade.color.DARK_GREEN, 10)
# This is a function-specific variable. Before we
# use them in our function, we need to give them initial
# values.
on_draw.angle = 0 # type: ignore # dynamic attribute on function obj
def main():
# Open up our window
arcade.open_window(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.set_background_color(arcade.color.BLACK)
# Tell the computer to call the draw command at the specified interval.
arcade.schedule(on_draw, 1 / 80)
# Run the program
arcade.run()
# When done running the program, close the window.
arcade.close_window()
if __name__ == "__main__":
main()
|
# Copyright 2019 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Sampling algorithms
===================
**Module name:** :mod:`thewalrus.samples`
.. currentmodule:: thewalrus.samples
This submodule provides access to algorithms to sample from the
hafnian or the torontonian of Gaussian quantum states.
Hafnian sampling
----------------
.. autosummary::
generate_hafnian_sample
hafnian_sample_state
hafnian_sample_graph
hafnian_sample_classical_state
hafnian_sample_graph_rank_one
Torontonian sampling
--------------------
.. autosummary::
generate_torontonian_sample
torontonian_sample_state
torontonian_sample_graph
torontonian_sample_classical_state
threshold_detection_prob
Brute force sampling
--------------------
.. autosummary::
photon_number_sampler
Code details
------------
"""
# pylint: disable=too-many-arguments
import dask
import numpy as np
from scipy.special import factorial as fac
from ._hafnian import hafnian, reduction
from ._torontonian import threshold_detection_prob
from .quantum import (
Amat,
Covmat,
Qmat,
gen_Qmat_from_graph,
is_classical_cov,
reduced_gaussian,
density_matrix_element,
)
__all__ = [
"generate_hafnian_sample",
"hafnian_sample_state",
"hafnian_sample_graph",
"hafnian_sample_classical_state",
"hafnian_sample_graph_rank_one",
"generate_torontonian_sample",
"torontonian_sample_state",
"torontonian_sample_graph",
"torontonian_sample_classical_state",
"threshold_detection_prob",
"photon_number_sampler",
]
# ===============================================================================================
# Hafnian sampling
# ===============================================================================================
# pylint: disable=too-many-branches
def generate_hafnian_sample(
cov, mean=None, hbar=2, cutoff=6, max_photons=30, approx=False, approx_samples=1e5
): # pylint: disable=too-many-branches
r"""Returns a single sample from the Hafnian of a Gaussian state.
Args:
cov (array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
mean (array): a :math:`2N`` ``np.float64`` vector of means representing the Gaussian
state.
hbar (float): (default 2) the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
cutoff (int): the Fock basis truncation.
max_photons (int): specifies the maximum number of photons that can be counted.
approx (bool): if ``True``, the approximate hafnian algorithm is used.
Note that this can only be used for real, non-negative matrices.
approx_samples: the number of samples used to approximate the hafnian if ``approx=True``.
Returns:
np.array[int]: a photon number sample from the Gaussian states.
"""
N = len(cov) // 2
result = []
prev_prob = 1.0
nmodes = N
if mean is None:
local_mu = np.zeros(2 * N)
else:
local_mu = mean
A = Amat(Qmat(cov), hbar=hbar)
for k in range(nmodes):
probs1 = np.zeros([cutoff + 1], dtype=np.float64)
kk = np.arange(k + 1)
mu_red, V_red = reduced_gaussian(local_mu, cov, kk)
if approx:
Q = Qmat(V_red, hbar=hbar)
A = Amat(Q, hbar=hbar, cov_is_qmat=True)
for i in range(cutoff):
indices = result + [i]
ind2 = indices + indices
if approx:
factpref = np.prod(fac(indices))
mat = reduction(A, ind2)
probs1[i] = (
hafnian(np.abs(mat.real), approx=True, num_samples=approx_samples) / factpref
)
else:
probs1[i] = density_matrix_element(
mu_red, V_red, indices, indices, include_prefactor=True, hbar=hbar
).real
if approx:
probs1 = probs1 / np.sqrt(np.linalg.det(Q).real)
probs2 = probs1 / prev_prob
probs3 = np.maximum(
probs2, np.zeros_like(probs2)
) # pylint: disable=assignment-from-no-return
ssum = np.sum(probs3)
if ssum < 1.0:
probs3[-1] = 1.0 - ssum
# The following normalization of probabilities is needed to prevent np.random.choice error
if ssum > 1.0:
probs3 = probs3 / ssum
result.append(np.random.choice(a=range(len(probs3)), p=probs3))
if result[-1] == cutoff:
return -1
if np.sum(result) > max_photons:
return -1
prev_prob = probs1[result[-1]]
return result
def _hafnian_sample(args):
r"""Returns samples from the Hafnian of a Gaussian state.
Note: this is a wrapper function, instead of using this function
directly, please use either :func:`torontonian_sample_state` or
:func:`torontonian_sample_graph`.
Args:
args (list): a list containing the following parameters:
cov (array)
a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int)
the number of samples to return.
mean (array): a :math:`2N`` ``np.float64`` vector of means representing the Gaussian
state.
hbar (float)
the value of :math:`\hbar` in the commutation relation :math:`[\x,\p]=i\hbar`.
cutoff (int)
the Fock basis truncation.
max_photons (int)
specifies the maximum number of photons that can be counted.
approx (bool)
if ``True``, the approximate hafnian algorithm is used.
Note that this can only be used for real, non-negative matrices.
approx_samples (int)
the number of samples used to approximate the hafnian if ``approx=True``.
Returns:
np.array[int]: photon number samples from the Gaussian state
"""
cov, samples, mean, hbar, cutoff, max_photons, approx, approx_samples = args
if not isinstance(cov, np.ndarray):
raise TypeError("Covariance matrix must be a NumPy array.")
matshape = cov.shape
if matshape[0] != matshape[1]:
raise ValueError("Covariance matrix must be square.")
if np.isnan(cov).any():
raise ValueError("Covariance matrix must not contain NaNs.")
samples_array = []
j = 0
while j < samples:
result = generate_hafnian_sample(
cov,
mean=mean,
hbar=hbar,
cutoff=cutoff,
max_photons=max_photons,
approx=approx,
approx_samples=approx_samples,
)
if result != -1:
# if result == -1, then you never get anything beyond cutoff
samples_array.append(result)
j = j + 1
return np.vstack(samples_array)
def hafnian_sample_state(
cov,
samples,
mean=None,
hbar=2,
cutoff=5,
max_photons=30,
approx=False,
approx_samples=1e5,
parallel=False,
):
r"""Returns samples from the Hafnian of a Gaussian state.
Args:
cov (array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int): the number of samples to return.
mean (array): a :math:`2N`` ``np.float64`` vector of means representing the Gaussian
state.
hbar (float): (default 2) the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
cutoff (int): the Fock basis truncation.
max_photons (int): specifies the maximum number of photons that can be counted.
approx (bool): if ``True``, the :func:`~.hafnian_approx` function is used
to approximate the hafnian. Note that this can only be used for
real, non-negative matrices.
approx_samples: the number of samples used to approximate the hafnian if ``approx=True``.
parallel (bool): if ``True``, uses ``dask`` for parallelization of samples
Returns:
np.array[int]: photon number samples from the Gaussian state
"""
if parallel:
params = [[cov, 1, mean, hbar, cutoff, max_photons, approx, approx_samples]] * samples
compute_list = []
for p in params:
compute_list.append(dask.delayed(_hafnian_sample)(p))
results = dask.compute(*compute_list, scheduler="threads")
return np.vstack(results)
params = [cov, samples, mean, hbar, cutoff, max_photons, approx, approx_samples]
return _hafnian_sample(params)
def hafnian_sample_graph(
A, n_mean, samples=1, cutoff=5, max_photons=30, approx=False, approx_samples=1e5, parallel=False
):
r"""Returns samples from the Gaussian state specified by the adjacency matrix :math:`A`
and with total mean photon number :math:`n_{mean}`
Args:
A (array): a :math:`N\times N` ``np.float64`` (symmetric) adjacency matrix matrix
n_mean (float): mean photon number of the Gaussian state
samples (int): the number of samples to return.
cutoff (int): the Fock basis truncation.
max_photons (int): specifies the maximum number of photons that can be counted.
approx (bool): if ``True``, the approximate hafnian algorithm is used.
Note that this can only be used for real, non-negative matrices.
approx_samples: the number of samples used to approximate the hafnian if ``approx=True``.
parallel (bool): if ``True``, uses ``dask`` for parallelization of samples
Returns:
np.array[int]: photon number samples from the Gaussian state
"""
Q = gen_Qmat_from_graph(A, n_mean)
cov = Covmat(Q, hbar=2)
return hafnian_sample_state(
cov,
samples,
mean=None,
hbar=2,
cutoff=cutoff,
max_photons=max_photons,
approx=approx,
approx_samples=approx_samples,
parallel=parallel,
)
# ===============================================================================================
# Torontonian sampling
# ===============================================================================================
def generate_torontonian_sample(cov, mu=None, hbar=2, max_photons=30):
r"""Returns a single sample from the Hafnian of a Gaussian state.
Args:
cov (array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
mu (array): a :math:`2N` ``np.float64`` displacement vector
representing an :math:`N` mode quantum state. This can be obtained
via the ``smeanxp`` method of the Gaussian backend of Strawberry Fields.
hbar (float): (default 2) the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
max_photons (int): specifies the maximum number of clicks that can be counted.
Returns:
np.array[int]: a threshold sample from the Gaussian state.
"""
results = []
n1, n2 = cov.shape
if mu is None:
mu = np.zeros(n1, dtype=np.float64)
if n1 != n2:
raise ValueError("Covariance matrix must be square.")
nmodes = n1 // 2
prev_prob = 1.0
for k in range(nmodes):
probs = np.zeros([2], dtype=np.float64)
kk = np.arange(k + 1)
mu_red, V_red = reduced_gaussian(mu, cov, kk)
indices0 = results + [0]
probs[0] = threshold_detection_prob(mu_red, V_red, indices0, hbar=hbar)
indices1 = results + [1]
probs[1] = threshold_detection_prob(mu_red, V_red, indices1, hbar=hbar)
probs = np.real_if_close(probs)
probs = np.maximum(probs, 0)
local_p = probs / prev_prob
local_p /= np.sum(local_p)
result = np.random.choice(range(2), p=local_p)
results.append(result)
prev_prob = probs[result]
if np.sum(results) > max_photons:
return -1
return results
def _torontonian_sample(args):
r"""Returns samples from the Torontonian of a Gaussian state.
Note: this is a wrapper function, instead of using this function
directly, please use either :func:`torontonian_sample_state` or
:func:`torontonian_sample_graph`.
Args:
args (list): a list containing the following parameters:
cov (array)
a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int)
number of samples to generate
mu (array)
a :math:`2N` ``np.float64`` displacement vector
representing an :math:`N` mode quantum state. This can be obtained
via the ``smeanxp`` method of the Gaussian backend of Strawberry Fields.
hbar (float)
the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
max_photons (int)
specifies the maximum number of clicks that can be counted.
Returns:
np.array[int]: threshold samples from the Gaussian state.
"""
cov, samples, mu, hbar, max_photons = args
if not isinstance(cov, np.ndarray):
raise TypeError("Covariance matrix must be a NumPy array.")
matshape = cov.shape
if matshape[0] != matshape[1]:
raise ValueError("Covariance matrix must be square.")
if np.isnan(cov).any():
raise ValueError("Covariance matrix must not contain NaNs.")
samples_array = []
j = 0
while j < samples:
result = generate_torontonian_sample(cov, mu, hbar=hbar, max_photons=max_photons)
if result != -1:
samples_array.append(result)
j = j + 1
return np.vstack(samples_array)
def torontonian_sample_state(cov, samples, mu=None, hbar=2, max_photons=30, parallel=False):
r"""Returns samples from the Torontonian of a Gaussian state
Args:
cov(array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int): number of samples to generate
mu (array): a :math:`2N` ``np.float64`` displacement vector
representing an :math:`N` mode quantum state. This can be obtained
via the ``smeanxp`` method of the Gaussian backend of Strawberry Fields.
hbar (float): (default 2) the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
max_photons (int): specifies the maximum number of clicks that can be counted.
parallel (bool): if ``True``, uses ``dask`` for parallelization of samples
Returns:
np.array[int]: threshold samples from the Gaussian state.
"""
if not isinstance(cov, np.ndarray):
raise TypeError("Covariance matrix must be a NumPy array.")
if mu is None:
M = cov.shape[0] // 2
mu = np.zeros(2 * M, dtype=np.float64)
if parallel:
params = [[cov, 1, mu, hbar, max_photons]] * samples
compute_list = []
for p in params:
compute_list.append(dask.delayed(_torontonian_sample)(p))
results = dask.compute(*compute_list, scheduler="threads")
return np.vstack(results)
params = [cov, samples, mu, hbar, max_photons]
return _torontonian_sample(params)
def torontonian_sample_graph(A, n_mean, samples=1, max_photons=30, parallel=False):
r"""Returns samples from the Torontonian of a Gaussian state specified by the adjacency matrix :math:`A`
and with total mean photon number :math:`n_{mean}`
Args:
A (array): a :math:`N\times N` ``np.float64`` (symmetric) adjacency matrix matrix
n_mean (float): mean photon number of the Gaussian state
samples (int): the number of samples to return.
max_photons (int): specifies the maximum number of photons that can be counted.
parallel (bool): if ``True``, uses ``dask`` for parallelization of samples
Returns:
np.array[int]: photon number samples from the Torontonian of the Gaussian state
"""
Q = gen_Qmat_from_graph(A, n_mean)
cov = Covmat(Q, hbar=2)
return torontonian_sample_state(
cov, samples, hbar=2, max_photons=max_photons, parallel=parallel
)
# pylint: disable=unused-argument
def hafnian_sample_classical_state(
cov, samples, mean=None, hbar=2, atol=1e-08, cutoff=None
): # add cutoff for consistency pylint: disable=unused-argument
r"""Returns samples from a Gaussian state that has a positive :math:`P` function.
Args:
cov(array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int): number of samples to generate
mean (array): vector of means of the gaussian state
hbar (float): the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
sigdigits (integer): precision to check that the covariance matrix is a true covariance matrix of a gaussian state.
Returns:
np.array[int]: photon number samples from the Gaussian state with covariance cov and vector means mean.
"""
if not is_classical_cov(cov, hbar=hbar, atol=atol):
raise ValueError("Not a classical covariance matrix")
(n, _) = cov.shape
if mean is None:
mean = np.zeros([n])
else:
if mean.shape != (n,):
raise ValueError("mean and cov do not have compatible shapes")
R = np.random.multivariate_normal(mean, cov - 0.5 * hbar * np.identity(n), samples)
N = n // 2
alpha = (1.0 / np.sqrt(2 * hbar)) * (R[:, 0:N] + 1j * R[:, N : 2 * N])
samples = np.random.poisson(np.abs(alpha) ** 2)
return samples
def torontonian_sample_classical_state(cov, samples, mean=None, hbar=2, atol=1e-08):
r"""Returns threshold samples from a Gaussian state that has a positive P function.
Args:
cov(array): a :math:`2N\times 2N` ``np.float64`` covariance matrix
representing an :math:`N` mode quantum state. This can be obtained
via the ``scovmavxp`` method of the Gaussian backend of Strawberry Fields.
samples (int): number of samples to generate
mean (array): vector of means of the Gaussian state
hbar (float): the value of :math:`\hbar` in the commutation
relation :math:`[\x,\p]=i\hbar`.
sigdigits (integer): precision to check that the covariance matrix is a true covariance matrix of a gaussian state.
Returns:
np.array[int]: threshold samples from the Gaussian state with covariance cov and vector means mean.
"""
return np.where(
hafnian_sample_classical_state(cov, samples, mean=mean, hbar=hbar, atol=atol) > 0, 1, 0
)
def photon_number_sampler(probabilities, num_samples, out_of_bounds=False):
"""Given a photon-number probability mass function(PMF) it returns samples according to said PMF.
Args:
probabilities (array): probability tensor of the modes, has shape ``[cutoff]*num_modes``
num_samples (int): number of samples requested
out_of_bounds (boolean): if ``False`` the probability distribution is renormalized. If not ``False``, the value of
``out_of_bounds`` is used as a placeholder for samples where more than the cutoff of probabilities are detected.
Returns:
(array): Samples, with shape [num_sample, num_modes]
"""
num_modes = len(probabilities.shape)
cutoff = probabilities.shape[0]
sum_p = np.sum(probabilities)
if out_of_bounds is False:
probabilities = probabilities.flatten() / sum_p
vals = np.arange(cutoff**num_modes, dtype=int)
return [
np.unravel_index(np.random.choice(vals, p=probabilities), [cutoff] * num_modes)
for _ in range(num_samples)
]
upper_limit = cutoff**num_modes
def sorter(index):
if index == upper_limit:
return out_of_bounds
return np.unravel_index(index, [cutoff] * num_modes)
vals = np.arange(1 + cutoff**num_modes, dtype=int)
probabilities = np.append(probabilities.flatten(), 1.0 - sum_p)
return [sorter(np.random.choice(vals, p=probabilities)) for _ in range(num_samples)]
def seed(seed_val=None):
r"""Seeds the random number generator used in the sampling algorithms.
This function is a wrapper around ``numpy.random.seed()``. By setting the seed
to a specific integer, the sampling algorithms will exhibit deterministic behaviour.
Args:
seed_val (int): Seed for RandomState. Must be convertible to 32 bit unsigned integers.
"""
np.random.seed(seed_val)
def _hafnian_sample_graph_rank_one(G, n_mean):
r"""Returns a sample from a rank one adjacency matrix `\bm{A} = \bm{G} \bm{G}^T` where :math:`\bm{G}`
is a row vector.
Args:
G (array): factorization of the rank-one matrix A = G @ G.T.
nmean (float): Total mean photon number.
Returns:
(array): sample.
"""
s = np.arcsinh(np.sqrt(n_mean))
q = 1.0 - np.tanh(s) ** 2
total_photon_num = 2 * np.random.negative_binomial(0.5, q, 1)[0]
sample = np.zeros(len(G))
single_ph_ps = np.abs(G) ** 2
single_ph_ps /= np.sum(single_ph_ps)
for _ in range(total_photon_num):
detector = np.random.choice(len(G), p=single_ph_ps)
sample[detector] += 1
return sample
def hafnian_sample_graph_rank_one(G, n_mean, samples=1):
r"""Returns samples from a rank one adjacency matrix `\bm{A} = \bm{G} \bm{G}^T` where :math:`\bm{G}`
is a row vector.
Args:
G (array): factorization of the rank-one matrix A = G @ G.T.
nmean (float): Total mean photon number.
samples (int): the number of samples to return.
Returns
(array): samples.
"""
return np.array([_hafnian_sample_graph_rank_one(G, n_mean) for _ in range(samples)])
|
##############################################################################
#
# A simple program to write some data to an Excel file using the XlsxWriter
# Python module.
#
# This program is shown, with explanations, in Tutorial 1 of the XlsxWriter
# documentation.
#
# Copyright 2013-2018, John McNamara, jmcnamara@cpan.org
#
import xlsxwriter
# Create a workbook and add a worksheet.
workbook = xlsxwriter.Workbook('Expenses01.xlsx')
worksheet = workbook.add_worksheet()
# Some data we want to write to the worksheet.
expenses = (
['Rent', 1000],
['Gas', 100],
['Food', 300],
['Gym', 50],
)
# Start from the first cell. Rows and columns are zero indexed.
row = 0
col = 0
# Iterate over the data and write it out row by row.
for item, cost in (expenses):
worksheet.write(row, col, item)
worksheet.write(row, col + 1, cost)
row += 1
# Write a total using a formula.
worksheet.write(row, 0, 'Total')
worksheet.write(row, 1, '=SUM(B1:B4)')
workbook.close()
|
/**
* Copyright 2017-present, BOCOMUI, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
import React from 'react'
import { findDOMNode } from 'react-dom'
import TestUtils from 'react-addons-test-utils'
import { Select, Option } from '../index'
describe('Select', () => {
beforeEach(() => {
document.body.innerHTML = ''
})
it('should value works', () => {
const instance = TestUtils.renderIntoDocument(
<Select value="1" onChange={jest.fn()}>
<Option value="0">苹果</Option>
<Option value="1">三星</Option>
<Option value="2">小米</Option>
</Select>
)
const container = findDOMNode(instance)
TestUtils.Simulate.click(container)
const selected = document.querySelector('.bocomui-select__option--selected')
expect(selected.textContent).toBe('三星')
})
it('should defaultValue works', () => {
const instance = TestUtils.renderIntoDocument(
<Select defaultValue="1">
<Option value="0">苹果</Option>
<Option value="1">三星</Option>
<Option value="2">小米</Option>
</Select>
)
const container = findDOMNode(instance)
TestUtils.Simulate.click(container)
const selected = document.querySelector('.bocomui-select__option--selected')
expect(selected.textContent).toBe('三星')
})
it('should onChange works', () => {
const handleChange = jest.fn()
const instance = TestUtils.renderIntoDocument(
<Select onChange={handleChange}>
<Option value="0">苹果</Option>
<Option value="1">三星</Option>
<Option value="2">小米</Option>
</Select>
)
const container = findDOMNode(instance)
TestUtils.Simulate.click(container)
const options = document.querySelectorAll('.bocomui-select__option')
TestUtils.Simulate.click(options[0])
expect(handleChange).toBeCalledWith('0', {value: '0', children: '苹果'})
})
it('should data works', () => {
const handleChange = jest.fn()
const instance = TestUtils.renderIntoDocument(
<Select data={['a', 'b']} render={item => <Option>{item}</Option>} />
)
const container = findDOMNode(instance)
TestUtils.Simulate.click(container)
const options = document.querySelectorAll('.bocomui-select__option')
expect(options.length).toBe(2)
})
it('should searchable works', () => {
const handleChange = jest.fn()
const instance = TestUtils.renderIntoDocument(
<Select defaultValue="1" searchable>
<Option value="0">苹果</Option>
<Option value="1">三星</Option>
<Option value="2">小米</Option>
</Select>
)
const container = findDOMNode(instance)
TestUtils.Simulate.click(container)
const searchInput = document.querySelector('input')
TestUtils.Simulate.change(searchInput, {
target: {
value: '1'
}
})
expect(document.querySelectorAll('.bocomui-select__option').length).toBe(1)
TestUtils.Simulate.change(searchInput, {
target: {
value: '三'
}
})
expect(document.querySelectorAll('.bocomui-select__option').length).toBe(1)
TestUtils.Simulate.change(searchInput, {
target: {
value: 'x'
}
})
const options = document.querySelectorAll('.bocomui-select__option')
expect(options.length).toBe(1)
expect(options[0].textContent).toBe('无选项')
})
it('should empty children works', () => {
expect(() => {
TestUtils.renderIntoDocument(<Select>{null}</Select>)
}).not.toThrow()
})
})
|
"""
Ephemeris calculations using SunPy coordinate frames
"""
import numpy as np
from packaging import version
import astropy.units as u
from astropy.constants import c as speed_of_light
from astropy.coordinates import (
ICRS,
HeliocentricEclipticIAU76,
SkyCoord,
get_body_barycentric,
get_body_barycentric_posvel,
)
from astropy.coordinates.representation import (
CartesianDifferential,
CartesianRepresentation,
SphericalRepresentation,
)
from sunpy import log
from sunpy.time import parse_time
from sunpy.time.time import _variables_for_parse_time_docstring
from sunpy.util.decorators import add_common_docstring
from .frames import HeliographicStonyhurst
__author__ = "Albert Y. Shih"
__email__ = "ayshih@gmail.com"
__all__ = ['get_body_heliographic_stonyhurst', 'get_earth',
'get_horizons_coord']
@add_common_docstring(**_variables_for_parse_time_docstring())
def get_body_heliographic_stonyhurst(body, time='now', observer=None, *, include_velocity=False):
"""
Return a `~sunpy.coordinates.frames.HeliographicStonyhurst` frame for the location of a
solar-system body at a specified time. The location can be corrected for light travel time
to an observer.
Parameters
----------
body : `str`
The solar-system body for which to calculate positions
time : {parse_time_types}
Time to use in a parse_time-compatible format
observer : `~astropy.coordinates.SkyCoord`
If None, the returned coordinate is the instantaneous or "true" location.
If not None, the returned coordinate is the astrometric location (i.e., accounts for light
travel time to the specified observer)
include_velocity : `bool`, optional
If True, include the body's velocity in the output coordinate. Defaults to False.
Returns
-------
out : `~sunpy.coordinates.frames.HeliographicStonyhurst`
Location of the solar-system body in the `~sunpy.coordinates.HeliographicStonyhurst` frame
Notes
-----
There is no correction for aberration due to observer motion. For a body close to the Sun in
angular direction relative to the observer, the correction can be negligible because the
apparent location of the body will shift in tandem with the Sun.
Examples
--------
>>> from sunpy.coordinates.ephemeris import get_body_heliographic_stonyhurst
Obtain the location of Venus
>>> get_body_heliographic_stonyhurst('venus', '2012-06-06 04:07:29')
<HeliographicStonyhurst Coordinate (obstime=2012-06-06T04:07:29.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0.07349535, 0.05223575, 0.72605496)>
Obtain the location of Venus as seen from Earth when adjusted for light travel time
>>> earth = get_body_heliographic_stonyhurst('earth', '2012-06-06 04:07:29')
>>> get_body_heliographic_stonyhurst('venus', '2012-06-06 04:07:29', observer=earth)
INFO: Apparent body location accounts for 144.07 seconds of light travel time [sunpy.coordinates.ephemeris]
<HeliographicStonyhurst Coordinate (obstime=2012-06-06T04:07:29.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0.07084926, 0.0520573, 0.72605477)>
Obtain the location and velocity of Mars
>>> mars = get_body_heliographic_stonyhurst('mars', '2001-02-03', include_velocity=True)
>>> mars
<HeliographicStonyhurst Coordinate (obstime=2001-02-03T00:00:00.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(63.03105777, -5.20656151, 1.6251161)
(d_lon, d_lat, d_radius) in (arcsec / s, arcsec / s, km / s)
(-0.02323686, 0.00073376, -1.4798387)>
Transform that same location and velocity of Mars to a different frame using
`~astropy.coordinates.SkyCoord`.
>>> from astropy.coordinates import SkyCoord
>>> from sunpy.coordinates import Helioprojective
>>> SkyCoord(mars).transform_to(Helioprojective(observer=earth))
<SkyCoord (Helioprojective: obstime=2001-02-03T00:00:00.000, rsun=695700.0 km, observer=<HeliographicStonyhurst Coordinate (obstime=2012-06-06T04:07:29.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(6.2686056e-15, -0.00766698, 1.01475668)>): (Tx, Ty, distance) in (arcsec, arcsec, AU)
(-298654.73268523, -21726.6154073, 1.40134156)
(d_Tx, d_Ty, d_distance) in (arcsec / s, arcsec / s, km / s)
(-0.01663438, -0.00058027, -15.08908184)>
"""
obstime = parse_time(time)
if observer is None:
# If there is no observer, there is not adjustment for light travel time
emitted_time = obstime
else:
observer_icrs = SkyCoord(observer).icrs.cartesian
# This implementation is modeled after Astropy's `_get_apparent_body_position`
light_travel_time = 0.*u.s
emitted_time = obstime
delta_light_travel_time = 1.*u.s # placeholder value
while np.any(np.fabs(delta_light_travel_time) > 1.0e-8*u.s):
body_icrs = get_body_barycentric(body, emitted_time)
distance = (body_icrs - observer_icrs).norm()
delta_light_travel_time = light_travel_time - distance / speed_of_light
light_travel_time = distance / speed_of_light
emitted_time = obstime - light_travel_time
if light_travel_time.isscalar:
ltt_string = f"{light_travel_time.to_value('s'):.2f}"
else:
ltt_string = f"{light_travel_time.to_value('s')}"
log.info(f"Apparent body location accounts for {ltt_string} seconds of light travel time")
if include_velocity:
pos, vel = get_body_barycentric_posvel(body, emitted_time)
body_icrs = pos.with_differentials(vel.represent_as(CartesianDifferential))
else:
body_icrs = get_body_barycentric(body, emitted_time)
body_hgs = ICRS(body_icrs).transform_to(HeliographicStonyhurst(obstime=obstime))
return body_hgs
@add_common_docstring(**_variables_for_parse_time_docstring())
def get_earth(time='now', *, include_velocity=False):
"""
Return a `~astropy.coordinates.SkyCoord` for the location of the Earth at a specified time in
the `~sunpy.coordinates.frames.HeliographicStonyhurst` frame. The longitude will be zero by
definition.
Parameters
----------
time : {parse_time_types}
Time to use in a parse_time-compatible format
include_velocity : `bool`, optional
If True, include the Earth's velocity in the output coordinate. Defaults to False.
Returns
-------
out : `~astropy.coordinates.SkyCoord`
Location of the Earth in the `~sunpy.coordinates.frames.HeliographicStonyhurst` frame
Notes
-----
The Earth's velocity in the output coordinate will invariably be negligible in the longitude
direction because the `~sunpy.coordinates.frames.HeliographicStonyhurst` frame rotates in time
such that the plane of zero longitude (the XZ-plane) tracks Earth.
Examples
--------
>>> from sunpy.coordinates.ephemeris import get_earth
>>> get_earth('2001-02-03 04:05:06')
<SkyCoord (HeliographicStonyhurst: obstime=2001-02-03T04:05:06.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0., -6.18656962, 0.98567647)>
>>> get_earth('2001-02-03 04:05:06', include_velocity=True)
<SkyCoord (HeliographicStonyhurst: obstime=2001-02-03T04:05:06.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0., -6.18656962, 0.98567647)
(d_lon, d_lat, d_radius) in (arcsec / s, arcsec / s, km / s)
(6.42643739e-11, -0.00279484, 0.24968506)>
>>> get_earth('2001-02-03 04:05:06', include_velocity=True).transform_to('heliocentricinertial')
<SkyCoord (HeliocentricInertial: obstime=2001-02-03T04:05:06.000): (lon, lat, distance) in (deg, deg, AU)
(58.41594489, -6.18656962, 0.98567647)
(d_lon, d_lat, d_distance) in (arcsec / s, arcsec / s, km / s)
(0.0424104, -0.00279484, 0.2496851)>
"""
earth = get_body_heliographic_stonyhurst('earth', time=time, include_velocity=include_velocity)
# Explicitly set the longitude to 0
earth_repr = SphericalRepresentation(0*u.deg, earth.lat, earth.radius)
# Modify the representation in the frame while preserving all differentials (e.g., velocity)
earth = earth.realize_frame(earth_repr.with_differentials(earth.spherical.differentials))
return SkyCoord(earth)
@add_common_docstring(**_variables_for_parse_time_docstring())
def get_horizons_coord(body, time='now', id_type=None, *, include_velocity=False):
"""
Queries JPL HORIZONS and returns a `~astropy.coordinates.SkyCoord` for the location of a
solar-system body at a specified time. This location is the instantaneous or "true" location,
and is not corrected for light travel time or observer motion.
.. note::
This function requires the Astroquery package to be installed and
requires an Internet connection.
Parameters
----------
body : `str`
The solar-system body for which to calculate positions. One can also use the search form
linked below to find valid names or ID numbers.
id_type : `None`, `str`
See the astroquery documentation for information on id_types: `astroquery.jplhorizons`.
If the installed astroquery version is less than 0.4.4, defaults to ``'majorbody'``.
time : {parse_time_types}, `dict`
Time to use in a parse_time-compatible format.
Alternatively, this can be a dictionary defining a range of times and
dates; the range dictionary has to be of the form
{{'start': start_time, 'stop': stop_time, 'step':'n[y|d|m|s]'}}.
``start_time`` and ``stop_time`` must be in a parse_time-compatible format,
and are interpreted as UTC time. ``step`` must be a string with either a
number and interval length (e.g. for every 10 seconds, ``'10s'``), or a
plain number for a number of evenly spaced intervals. For more information
see the docstring of `astroquery.jplhorizons.HorizonsClass`.
include_velocity : `bool`, optional
If True, include the body's velocity in the output coordinate. Defaults to False.
Returns
-------
`~astropy.coordinates.SkyCoord`
Location of the solar-system body
Notes
-----
Be aware that there can be discrepancies between the coordinates returned by JPL HORIZONS,
the coordinates reported in mission data files, and the coordinates returned by
`~sunpy.coordinates.get_body_heliographic_stonyhurst`.
References
----------
* `JPL HORIZONS <https://ssd.jpl.nasa.gov/?horizons>`_
* `JPL HORIZONS form to search bodies <https://ssd.jpl.nasa.gov/horizons.cgi?s_target=1#top>`_
* `Astroquery <https://astroquery.readthedocs.io/en/latest/>`_
Examples
--------
>>> from sunpy.coordinates.ephemeris import get_horizons_coord
Query the location of Venus
>>> get_horizons_coord('Venus barycenter', '2001-02-03 04:05:06') # doctest: +REMOTE_DATA
INFO: Obtained JPL HORIZONS location for Venus Barycenter (2) [sunpy.coordinates.ephemeris]
<SkyCoord (HeliographicStonyhurst: obstime=2001-02-03T04:05:06.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(-33.93155836, -1.64998443, 0.71915147)>
Query the location of the SDO spacecraft
>>> get_horizons_coord('SDO', '2011-11-11 11:11:11') # doctest: +REMOTE_DATA
INFO: Obtained JPL HORIZONS location for Solar Dynamics Observatory (spac [sunpy.coordinates.ephemeris]
<SkyCoord (HeliographicStonyhurst: obstime=2011-11-11T11:11:11.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0.01019118, 3.29640728, 0.99011042)>
Query the location of the SOHO spacecraft via its ID number (-21)
>>> get_horizons_coord(-21, '2004-05-06 11:22:33') # doctest: +REMOTE_DATA
INFO: Obtained JPL HORIZONS location for SOHO (spacecraft) (-21) [sunpy.coordinates.ephemeris]
<SkyCoord (HeliographicStonyhurst: obstime=2004-05-06T11:22:33.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(0.25234902, -3.55863633, 0.99923086)>
Query the location and velocity of the asteroid Juno
>>> get_horizons_coord('Juno', '1995-07-18 07:17', 'smallbody', include_velocity=True) # doctest: +REMOTE_DATA
INFO: Obtained JPL HORIZONS location for 3 Juno (A804 RA) [sunpy.coordinates.ephemeris]
<SkyCoord (HeliographicStonyhurst: obstime=1995-07-18T07:17:00.000, rsun=695700.0 km): (lon, lat, radius) in (deg, deg, AU)
(-25.16107532, 14.59098438, 3.17667664)
(d_lon, d_lat, d_radius) in (arcsec / s, arcsec / s, km / s)
(-0.03306548, 0.00052415, -2.66709222)>
Query the location of Solar Orbiter at a set of 12 regularly sampled times
>>> get_horizons_coord('Solar Orbiter',
... time={{'start': '2020-12-01',
... 'stop': '2020-12-02',
... 'step': '12'}}) # doctest: +REMOTE_DATA
INFO: Obtained JPL HORIZONS location for Solar Orbiter (spacecraft) (-144 [sunpy.coordinates.ephemeris]
...
"""
# Import here so that astroquery is not a module-level dependency
import astroquery
from astroquery.jplhorizons import Horizons
if id_type is None and version.parse(astroquery.__version__) < version.parse('0.4.4'):
# For older versions of astroquery retain default behaviour of this function
# if id_type isn't manually specified.
id_type = 'majorbody'
if isinstance(time, dict):
if set(time.keys()) != set(['start', 'stop', 'step']):
raise ValueError('time dictionary must have the keys ["start", "stop", "step"]')
epochs = time
jpl_fmt = '%Y-%m-%d %H:%M:%S'
epochs['start'] = parse_time(epochs['start']).tdb.strftime(jpl_fmt)
epochs['stop'] = parse_time(epochs['stop']).tdb.strftime(jpl_fmt)
else:
obstime = parse_time(time)
array_time = np.reshape(obstime, (-1,)) # Convert to an array, even if scalar
epochs = array_time.tdb.jd.tolist() # Time must be provided in JD TDB
query = Horizons(id=body, id_type=id_type,
location='500@10', # Heliocentric (mean ecliptic)
epochs=epochs)
try:
result = query.vectors()
except Exception as e: # Catch and re-raise all exceptions, and also provide query URL if generated
if query.uri is not None:
log.error(f"See the raw output from the JPL HORIZONS query at {query.uri}")
raise e
finally:
query._session.close()
log.info(f"Obtained JPL HORIZONS location for {result[0]['targetname']}")
log.debug(f"See the raw output from the JPL HORIZONS query at {query.uri}")
if isinstance(time, dict):
obstime = parse_time(result['datetime_jd'], format='jd', scale='tdb')
else:
# JPL HORIZONS results are sorted by observation time, so this sorting needs to be undone.
# Calling argsort() on an array returns the sequence of indices of the unsorted list to put the
# list in order. Calling argsort() again on the output of argsort() reverses the mapping:
# the output is the sequence of indices of the sorted list to put that list back in the
# original unsorted order.
unsorted_indices = obstime.argsort().argsort()
result = result[unsorted_indices]
vector = CartesianRepresentation(result['x'], result['y'], result['z'])
if include_velocity:
velocity = CartesianDifferential(result['vx'], result['vy'], result['vz'])
vector = vector.with_differentials(velocity)
coord = SkyCoord(vector, frame=HeliocentricEclipticIAU76, obstime=obstime)
return coord.transform_to(HeliographicStonyhurst).reshape(obstime.shape)
|
/*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'contextmenu', 'da', {
options: 'Muligheder for hjælpemenu'
});
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import os
import sys
import unittest
import zipfile
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
from glucose import pack
from glucose import util
from glucose import zipfix
class ResetTimestampTest(unittest.TestCase):
def test_reset_timestamps(self):
with util.temporary_directory(prefix='glyco-zipfix-') as tempdir:
# Create an archive
zipname = os.path.join(tempdir, 'testfile.zip')
with zipfile.ZipFile(zipname, 'w') as f:
f.write(os.path.join(DATA_DIR, 'zipfix_test', 'file1.txt'))
f.write(os.path.join(DATA_DIR, 'zipfix_test', 'file2.txt'))
# Read original state
with zipfile.ZipFile(zipname, 'r') as f:
dt_orig = [info.date_time for info in f.infolist()]
namelist_orig = f.namelist()
namelist_orig.sort()
hashes_orig = [hashlib.sha1(f.read(filename)).hexdigest()
for filename in namelist_orig]
# Reset
zipfix.reset_all_timestamps_in_zip(zipname)
# Make sure only timestamps have changed.
with zipfile.ZipFile(zipname, 'r') as f:
dt_new = [info.date_time for info in f.infolist()]
namelist_new = f.namelist()
namelist_new.sort()
hashes_new = [hashlib.sha1(f.read(filename)).hexdigest()
for filename in namelist_new]
self.assertEqual(namelist_orig, namelist_new)
self.assertEqual(hashes_orig, hashes_new)
self.assertNotEqual(dt_orig, dt_new)
for dt in dt_new:
self.assertEqual(dt, (1980, 0, 0, 0, 0, 0))
if __name__ == '__main__':
unittest.main()
|
import pandas as pd
from bs4 import BeautifulSoup
import glob
import ntpath
from bs4.element import Comment
def path_leaf(path):
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
csv_file = "/media/rna/yahoo_crawl_data/Yahoo-20190406T235503Z-001/Yahoo/URLtoHTML_yahoo_news.csv"
mapping_file_df = (
pd.read_csv(csv_file).sort_values(by=["filename", "URL"]).reset_index(drop=True)
)
crawl_data_dir = "/media/rna/yahoo_crawl_data/Yahoo-20190406T235503Z-001/Yahoo/yahoo/"
list_of_html_files = glob.glob("{}/*.html".format(crawl_data_dir))
# Credits: https://stackoverflow.com/a/1983219/756986
def tag_visible(element):
if element.parent.name in [
"style",
"script",
"head",
"title",
"meta",
"[document]",
]:
return False
if isinstance(element, Comment):
return False
return True
def text_from_html(body):
soup = BeautifulSoup(body, "html.parser")
texts = soup.findAll(text=True)
visible_texts = filter(tag_visible, texts)
return u" ".join(t.strip() for t in visible_texts)
def is_likely_a_word(string):
return string.isalpha()
big = []
with open("./yahoo_big.txt", "w") as fh:
for f in list_of_html_files:
html = open(f).read()
text = text_from_html(html).split()
words = filter(is_likely_a_word, text)
for line in words:
fh.write("{}\n".format(line.lower()))
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* @file binding-redirect.ts
* @author tngan
* @desc Binding-level API, declare the functions using Redirect binding
*/
var utility_1 = require("./utility");
var libsaml_1 = require("./libsaml");
var url = require("url");
var urn_1 = require("./urn");
var lodash_1 = require("lodash");
var binding = urn_1.wording.binding;
var urlParams = urn_1.wording.urlParams;
/**
* @private
* @desc Helper of generating URL param/value pair
* @param {string} param key
* @param {string} value value of key
* @param {boolean} first determine whether the param is the starting one in order to add query header '?'
* @return {string}
*/
function pvPair(param, value, first) {
return (first === true ? '?' : '&') + param + '=' + value;
}
/**
* @private
* @desc Refractored part of URL generation for login/logout request
* @param {string} type
* @param {boolean} isSigned
* @param {string} rawSamlRequest
* @param {object} entitySetting
* @return {string}
*/
function buildRedirectURL(opts) {
var baseUrl = opts.baseUrl, type = opts.type, isSigned = opts.isSigned, context = opts.context, entitySetting = opts.entitySetting;
var _a = opts.relayState, relayState = _a === void 0 ? '' : _a;
var noParams = (url.parse(baseUrl).query || []).length === 0;
var queryParam = libsaml_1.default.getQueryParamByType(type);
// In general, this xmlstring is required to do deflate -> base64 -> urlencode
var samlRequest = encodeURIComponent(utility_1.default.base64Encode(utility_1.default.deflateString(context)));
if (relayState !== '') {
relayState = pvPair(urlParams.relayState, encodeURIComponent(relayState));
}
if (isSigned) {
var sigAlg = pvPair(urlParams.sigAlg, encodeURIComponent(entitySetting.requestSignatureAlgorithm));
var octetString = samlRequest + relayState + sigAlg;
return baseUrl + pvPair(queryParam, octetString, noParams) + pvPair(urlParams.signature, encodeURIComponent(libsaml_1.default.constructMessageSignature(queryParam + '=' + octetString, entitySetting.privateKey, entitySetting.privateKeyPass, null, entitySetting.requestSignatureAlgorithm)));
}
return baseUrl + pvPair(queryParam, samlRequest + relayState, noParams);
}
/**
* @desc Redirect URL for login request
* @param {object} entity object includes both idp and sp
* @param {function} customTagReplacement used when developers have their own login response template
* @return {string} redirect URL
*/
function loginRequestRedirectURL(entity, customTagReplacement) {
var metadata = { idp: entity.idp.entityMeta, sp: entity.sp.entityMeta };
var spSetting = entity.sp.entitySetting;
var id = '';
if (metadata && metadata.idp && metadata.sp) {
var base = metadata.idp.getSingleSignOnService(binding.redirect);
var rawSamlRequest = void 0;
if (spSetting.loginRequestTemplate) {
var info = customTagReplacement(spSetting.loginRequestTemplate);
id = lodash_1.get(info, 'id');
rawSamlRequest = lodash_1.get(info, 'context');
}
else {
id = spSetting.generateID();
rawSamlRequest = libsaml_1.default.replaceTagsByValue(libsaml_1.default.defaultLoginRequestTemplate.context, {
ID: id,
Destination: base,
Issuer: metadata.sp.getEntityID(),
IssueInstant: new Date().toISOString(),
NameIDFormat: urn_1.namespace.format[spSetting.loginNameIDFormat] || urn_1.namespace.format.emailAddress,
AssertionConsumerServiceURL: metadata.sp.getAssertionConsumerService(binding.post),
EntityID: metadata.sp.getEntityID(),
AllowCreate: spSetting.allowCreate,
});
}
return {
id: id,
context: buildRedirectURL({
context: rawSamlRequest,
type: urlParams.samlRequest,
isSigned: metadata.sp.isAuthnRequestSigned(),
entitySetting: spSetting,
baseUrl: base,
relayState: spSetting.relayState,
}),
};
}
throw new Error('Missing declaration of metadata');
}
/**
* @desc Redirect URL for logout request
* @param {object} user current logged user (e.g. req.user)
* @param {object} entity object includes both idp and sp
* @param {function} customTagReplacement used when developers have their own login response template
* @return {string} redirect URL
*/
function logoutRequestRedirectURL(user, entity, relayState, customTagReplacement) {
var metadata = { init: entity.init.entityMeta, target: entity.target.entityMeta };
var initSetting = entity.init.entitySetting;
var id = '';
if (metadata && metadata.init && metadata.target) {
var base = metadata.target.getSingleLogoutService(binding.redirect);
var rawSamlRequest = '';
if (initSetting.logoutRequestTemplate) {
var info = customTagReplacement(initSetting.logoutRequestTemplate);
id = lodash_1.get(info, 'id');
rawSamlRequest = lodash_1.get(info, 'context');
}
else {
id = initSetting.generateID();
rawSamlRequest = libsaml_1.default.replaceTagsByValue(libsaml_1.default.defaultLogoutRequestTemplate.context, {
ID: id,
Destination: base,
EntityID: metadata.init.getEntityID(),
Issuer: metadata.init.getEntityID(),
IssueInstant: new Date().toISOString(),
NameIDFormat: urn_1.namespace.format[initSetting.logoutNameIDFormat] || urn_1.namespace.format.emailAddress,
NameID: user.logoutNameID,
SessionIndex: user.sessionIndex,
});
}
return {
id: id,
context: buildRedirectURL({
context: rawSamlRequest,
relayState: relayState,
type: urlParams.logoutRequest,
isSigned: entity.target.entitySetting.wantLogoutRequestSigned,
entitySetting: initSetting,
baseUrl: base,
}),
};
}
throw new Error('Missing declaration of metadata');
}
/**
* @desc Redirect URL for logout response
* @param {object} requescorresponding request, used to obtain the id
* @param {object} entity object includes both idp and sp
* @param {function} customTagReplacement used when developers have their own login response template
*/
function logoutResponseRedirectURL(requestInfo, entity, relayState, customTagReplacement) {
var id = '';
var metadata = {
init: entity.init.entityMeta,
target: entity.target.entityMeta,
};
var initSetting = entity.init.entitySetting;
if (metadata && metadata.init && metadata.target) {
var base = metadata.target.getSingleLogoutService(binding.redirect);
var rawSamlResponse = void 0;
if (initSetting.logoutResponseTemplate) {
var template = customTagReplacement(initSetting.logoutResponseTemplate);
id = lodash_1.get(template, 'id');
rawSamlResponse = lodash_1.get(template, 'context');
}
else {
id = initSetting.generateID();
var tvalue = {
ID: id,
Destination: base,
Issuer: metadata.init.getEntityID(),
EntityID: metadata.init.getEntityID(),
IssueInstant: new Date().toISOString(),
StatusCode: urn_1.namespace.statusCode.success,
};
if (requestInfo && requestInfo.extract && requestInfo.extract.logoutrequest) {
tvalue.InResponseTo = requestInfo.extract.logoutrequest.id;
}
rawSamlResponse = libsaml_1.default.replaceTagsByValue(libsaml_1.default.defaultLogoutResponseTemplate.context, tvalue);
}
return {
id: id,
context: buildRedirectURL({
baseUrl: base,
type: urlParams.logoutResponse,
isSigned: entity.target.entitySetting.wantLogoutResponseSigned,
context: rawSamlResponse,
entitySetting: initSetting,
relayState: relayState,
}),
};
}
throw new Error('Missing declaration of metadata');
}
var redirectBinding = {
loginRequestRedirectURL: loginRequestRedirectURL,
logoutRequestRedirectURL: logoutRequestRedirectURL,
logoutResponseRedirectURL: logoutResponseRedirectURL,
};
exports.default = redirectBinding;
//# sourceMappingURL=binding-redirect.js.map
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Gaussian mixture model
"""
# Load external dependencies
from setup import *
# Load internal dependencies
from sklearn.mixture import GaussianMixture
def gmm_fit(X, N):
"""
Fit models with N range components
"""
models = [None for i in range(len(N))]
for i in range(len(N)):
models[i] = GaussianMixture(N[i], n_init=100).fit(X)
return models
def gmm_plot(ax, X, M_best, label=None):
"""
Learn the best-fit GMM models
The fit() method uses Expectation-Maximization to find
the best mixture of Gaussians for the data
Plot the results in three panels:
1) data + best-fit mixture
2) AIC and BIC vs number of components
3) probability that a point came from each component
"""
# Plot data + best-fit mixture
x = np.linspace(X.min()-.2, X.max()+.2, 1000)
logprob, responsibilities = M_best.score_samples(np.array([x]).T)
pdf = np.exp(logprob)
pdf_individual = responsibilities * pdf[:, np.newaxis]
ax.hist(X, 30, normed=True, histtype='stepfilled', alpha=0.5)
ax.plot(x, pdf, '-k', label=label)
ax.plot(x, pdf_individual, '--k', label=label)
# ax.set_xlabel('$x$')
# ax.set_ylabel('$p(x)$')
# print M_best.n_components, M_best.means_.ravel()
def gmm_components(AIC, BIC, N):
"""
Plot AIC and BIC
"""
ax = plt.gca()
ax.plot(N, AIC, '-k', label='AIC')
ax.plot(N, BIC, '--k', label='BIC')
ax.set_xlabel('No. components')
ax.set_ylabel('Information criterion')
ax.legend(loc=2)
def gmm_posterior(ax, X, M_best):
"""
Plot posterior probabilities for each component
"""
ax = plt.gca()
x = np.linspace(X.min()-.2, X.max()+.2, 1000)
p = M_best.predict_proba(np.array([x]).T)
p = p.cumsum(1).T
ax.fill_between(x, 0, p[0], color='gray', alpha=0.3)
# ax.fill_between(x, p[0], p[1], color='gray', alpha=0.5)
# ax.fill_between(x, p[1], 1, color='gray', alpha=0.7)
ax.set_xlim(X.min()-.2, X.max()+.2)
ax.set_ylim(0, 1)
ax.set_xlabel('$\lambda$')
ax.set_ylabel(r'$p(j|\lambda)$')
|
from django.shortcuts import render
from django.template.loader import get_template
from django.http import HttpResponse
import datetime
from newapp.models import Course, Question, Answer
import random
from .forms import ContactForm, QuestionForm, Qform, SearchForm, AnswerForm
from django.db.models import Q
# Create your views here.
def index(request):
now = datetime.datetime.now()
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = Qform(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
# ...
# redirect to a new URL:
question = form.cleaned_data['question']
details = form.cleaned_data['details']
q = Question(question = question, details = details)
q.save()
return HttpResponse('Thanks')
# if a GET (or any other method) we'll create a blank form
else:
form = Qform()
# all_questions = Question.objects.all();
# print(all_questions)
#return render(request, 'index.html', {'form': form}, {'all_questions': all_questions})
return render(request, 'index.html', {'form': form})
def contact(request):
now = datetime.datetime.now()
return render(request, 'contact.html', {'current_date': now})
def faq(request):
now = datetime.datetime.now()
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
# Get user data
query = form.cleaned_data['search']
# Can add more refined search algorithm here
#results = Answer.objects.filter(question__contains = query)
results = Answer.objects.filter(Q(answer_text__contains=query) | Q(question__contains=query))
# If no results come up, say that
if len(results) == 0:
search_status = "Your search returned no results"
else:
search_status = "Search results:"
return render(request, 'faq.html', {'current_date': now, 'all_questions' : results, 'form' : form, 'search_status' : search_status})
# If accessed without POST, render normal template with all FAQ
form = SearchForm()
all_questions = Answer.objects.all()
search_status = "All FAQ:"
return render(request, 'faq.html', {'current_date': now, 'all_questions' : all_questions, 'form' : form, 'search_status' : search_status})
def research(request):
form = AnswerForm()
all_questions = Question.objects.all()
return render(request, 'research.html', {'form' : form, 'all_questions' : all_questions})
def addquestion(request):
if request.method == 'POST':
form = Qform(request.POST)
if form.is_valid():
question = form.cleaned_data['question']
details = form.cleaned_data['details']
q = Question(question = question, details = details)
q.save()
return HttpResponse('Thanks')
else:
form = Qform()
return render(request, 'addquestion.html', {'form': form})
form = Qform()
all_questions = Question.objects.all()
all_answers= Answer.objects.all()
return render(request, 'addquestion.html', {'form': form, 'all_questions' : all_questions, 'all_answers' : all_answers})
def about(request):
return render(request, 'about.html',{})
def contact(request):
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
email = form.cleaned_data['email']
category = form.cleaned_data['category']
subject = form.cleaned_data['subject']
body = form.cleaned_data['body']
form = ContactForm()
return render(request, 'contact.html',{'form':form})
def course(request):
now = datetime.datetime.now()
course = Course.objects.all()
random_course = random.choice(course)
return render(request, 'index.html', {'course': random_course, 'current_date': now})
def department(request, department):
now = datetime.datetime.now()
course = Course.objects.filter(department=department)
random_course = random.choice(course)
return render(request, 'index.html', {'course': random_course, 'current_date': now})
def semester(request, semester):
now = datetime.datetime.now()
course = Course.objects.filter(semester=semester)
random_semester = random.choice(semester)
return render(request, 'index.html', {'semester': random_semester, 'current_date': now})
# def get_answer(request):
# if request.method == 'POST':
# form = QAForm(request.POST)
# if form.is_valid():
# return HttpResponseRedirect('/thanks/')
# else:
# form = NameForm()
# return render(request, 'index.html', {'form': form})
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from horizon import forms
from horizon.utils import functions as utils
from openstack_dashboard.dashboards.settings.user import forms as user_forms
class UserSettingsView(forms.ModalFormView):
form_class = user_forms.UserSettingsForm
template_name = 'settings/user/settings.html'
def get_initial(self):
return {
'language': self.request.session.get(
settings.LANGUAGE_COOKIE_NAME,
self.request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME,
self.request.LANGUAGE_CODE)),
'timezone': self.request.session.get(
'django_timezone',
self.request.COOKIES.get('django_timezone', 'UTC')),
'pagesize': utils.get_page_size(self.request)}
def form_valid(self, form):
return form.handle(self.request, form.cleaned_data)
|
import { makeStyles } from '@material-ui/styles';
const useStyles = makeStyles(theme => ({
root: {
[theme.breakpoints.up('xl')]: {
paddingLeft: theme.spacing(5),
paddingRight: theme.spacing(4),
},
[theme.breakpoints.down('lg')]: {
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(3),
},
[theme.breakpoints.down('md')]: {
paddingLeft: theme.spacing(3),
paddingRight: theme.spacing(2),
},
'& .MuiTab-root':{
paddingLeft: 0,
paddingRight: 30,
minWidth: 0,
fontWeight:'bold'
},
'& .MuiTab-wrapper':{
flexDirection: 'row',
justifyContent: 'flex-start',
textTransform: 'none',
color: '#363636',
[theme.breakpoints.up('xl')]: {
fontSize :20
},
[theme.breakpoints.down('lg')]: {
fontSize :14
},
[theme.breakpoints.down('md')]: {
fontSize :10
},
},
'& .MuiTab-textColorInherit.Mui-selected':{
textDecoration: 'underline',
textUnderlinePosition: 'under'
},
},
title:{
paddingTop: theme.spacing(2),
paddingBottom: theme.spacing(2)
},
titleText: {
[theme.breakpoints.up('xl')]: {
fontSize :35
},
[theme.breakpoints.down('lg')]: {
fontSize :25
},
[theme.breakpoints.down('md')]: {
fontSize :18
},
},
modalTitle: {
[theme.breakpoints.up('xl')]: {
fontSize :28
},
[theme.breakpoints.down('lg')]: {
fontSize :20
},
[theme.breakpoints.down('md')]: {
fontSize :14
},
},
subTitle: {
[theme.breakpoints.up('xl')]: {
fontSize :18
},
[theme.breakpoints.down('lg')]: {
fontSize :13
},
[theme.breakpoints.down('md')]: {
fontSize :9
},
},
tool: {
[theme.breakpoints.up('xl')]: {
minHeight: 67
},
[theme.breakpoints.down('lg')]: {
minHeight: 47
},
[theme.breakpoints.down('md')]: {
minHeight: 33
},
},
paper: {
backgroundColor: theme.palette.background.paper,
boxShadow: theme.shadows[5],
borderRadius: 15,
width: 500
},
padding: {
padding: theme.spacing(2, 4, 3),
},
close: {
cursor: 'pointer',
color: 'gray'
},
div_indicator: {
width: '100%',
height: '100%',
display: 'flex',
position: 'fixed',
paddingLeft: '35%',
alignItems: 'center',
marginTop: '-60px',
zIndex: 999,
},
indicator: {
color: 'gray'
},
}));
export const AddOwnerStyles = makeStyles(theme => ({
paper: {
backgroundColor: theme.palette.background.paper,
borderRadius: 5,
padding: theme.spacing(2, 4, 3),
},
footer: {
[theme.breakpoints.up('xl')]: {
paddingTop: 89,
},
[theme.breakpoints.down('lg')]: {
paddingTop: 62,
},
[theme.breakpoints.down('md')]: {
paddingTop: 43,
},
paddingBottom: 30
},
root: {
'& .MuiOutlinedInput-multiline':{
padding: 0,
lineHeight: 'normal'
},
'& .MuiOutlinedInput-input':{
[theme.breakpoints.up('xl')]: {
padding: '17px 25px',
fontSize: 22,
},
[theme.breakpoints.down('lg')]: {
padding: '12px 18px',
fontSize: 15,
},
[theme.breakpoints.down('md')]: {
padding: '8px 13px',
fontSize: 11,
},
},
'& p':{
marginBottom: 0
}
},
plus:{
color: '#707070',
[theme.breakpoints.up('xl')]: {
width:31 ,
height: 31,
},
[theme.breakpoints.down('lg')]: {
width:22 ,
height: 22,
},
[theme.breakpoints.down('md')]: {
width:15 ,
height: 15,
},
},
input: {
display: 'none'
},
img: {
objectFit:'cover',
cursor: 'pointer',
alignItems: 'center',
justifyContent: 'center',
display: 'flex',
border: '1px dashed rgba(112,112,112,0.43)',
borderRadius: 8,
[theme.breakpoints.up('xl')]: {
width: 116,
height: 92,
marginTop: 20,
marginRight: 20
},
[theme.breakpoints.down('lg')]: {
width: 81,
height: 64,
marginTop: 14,
marginRight: 14
},
[theme.breakpoints.down('md')]: {
width: 57,
height: 45,
marginTop: 10,
marginRight: 10
},
},
title:{
[theme.breakpoints.up('xl')]: {
fontSize: 18,
},
[theme.breakpoints.down('lg')]: {
fontSize: 13,
},
[theme.breakpoints.down('md')]: {
fontSize: 9,
},
},
error:{
color: 'red',
[theme.breakpoints.up('xl')]: {
fontSize: 18,
},
[theme.breakpoints.down('lg')]: {
fontSize: 13,
},
[theme.breakpoints.down('md')]: {
fontSize: 9,
},
},
div_indicator: {
width: '100%',
height: '100%',
display: 'flex',
position: 'fixed',
paddingLeft: '50%',
alignItems: 'center',
marginTop: '-60px',
zIndex: 999,
},
indicator: {
color: 'gray'
},
}));
export const EditOwnerStyles = makeStyles(theme => ({
root: {
[theme.breakpoints.up('xl')]: {
paddingLeft: theme.spacing(5),
paddingRight: theme.spacing(4),
},
[theme.breakpoints.down('lg')]: {
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(3),
},
[theme.breakpoints.down('md')]: {
paddingLeft: theme.spacing(3),
paddingRight: theme.spacing(2),
},
'& .MuiOutlinedInput-multiline':{
padding: 0,
lineHeight: 'normal'
},
'& .MuiOutlinedInput-input':{
[theme.breakpoints.up('xl')]: {
padding: '17px 25px',
fontSize: 22,
},
[theme.breakpoints.down('lg')]: {
padding: '12px 18px',
fontSize: 15,
},
[theme.breakpoints.down('md')]: {
padding: '8px 13px',
fontSize: 11,
},
},
'& p':{
marginBottom: 0
}
},
title:{
paddingTop: theme.spacing(2),
paddingBottom: theme.spacing(2)
},
body: {
[theme.breakpoints.up('xl')]: {
marginTop: 64,
marginBottom: 64,
padding: 40,
borderRadius: 30,
},
[theme.breakpoints.down('lg')]: {
marginTop: 45,
marginBottom: 45,
padding: 28,
borderRadius: 21,
},
[theme.breakpoints.down('md')]: {
marginTop: 32,
marginBottom: 32,
padding: 20,
borderRadius: 15,
},
boxShadow: '0 3px 5px 2px rgba(128, 128, 128, .3)',
},
item:{
marginTop: theme.spacing(5),
},
paper: {
backgroundColor: theme.palette.background.paper,
border: '2px solid #000',
boxShadow: theme.shadows[5],
padding: theme.spacing(2, 4, 3),
},
plus:{
color: '#707070',
[theme.breakpoints.up('xl')]: {
width:54 ,
height: 54,
},
[theme.breakpoints.down('lg')]: {
width:38 ,
height: 38,
},
[theme.breakpoints.down('md')]: {
width:27 ,
height: 27,
},
},
minus:{
cursor: 'pointer',
color: 'white',
backgroundColor: 'red',
borderRadius: '50%',
[theme.breakpoints.up('xl')]: {
width:54 ,
height: 54,
},
[theme.breakpoints.down('lg')]: {
width:38 ,
height: 38,
},
[theme.breakpoints.down('md')]: {
width:27 ,
height: 27,
},
},
size: {
[theme.breakpoints.up('xl')]: {
width: 214,
height: 214,
},
[theme.breakpoints.down('lg')]: {
width: 150,
height: 150,
},
[theme.breakpoints.down('md')]: {
width: 105,
height: 105,
},
},
input: {
display: 'none',
},
div_indicator: {
width: '100%',
height: '100%',
display: 'flex',
position: 'fixed',
paddingLeft: '50%',
alignItems: 'center',
marginTop: '-60px',
zIndex: 999,
},
indicator: {
color: 'gray'
},
backTitle:{
cursor: 'pointer',
[theme.breakpoints.up('xl')]: {
fontSize: 18,
},
[theme.breakpoints.down('lg')]: {
fontSize: 13,
},
[theme.breakpoints.down('md')]: {
fontSize: 9,
},
},
itemTitle:{
[theme.breakpoints.up('xl')]: {
fontSize: 25,
},
[theme.breakpoints.down('lg')]: {
fontSize: 18,
},
[theme.breakpoints.down('md')]: {
fontSize: 13,
},
},
error:{
color: 'red',
[theme.breakpoints.up('xl')]: {
fontSize: 18,
},
[theme.breakpoints.down('lg')]: {
fontSize: 13,
},
[theme.breakpoints.down('md')]: {
fontSize: 9,
},
},
headerTitle:{
[theme.breakpoints.up('xl')]: {
fontSize :35
},
[theme.breakpoints.down('lg')]: {
fontSize :25
},
[theme.breakpoints.down('md')]: {
fontSize :18
},
},
img: {
objectFit:'cover',
cursor: 'pointer',
alignItems: 'center',
justifyContent: 'center',
display: 'flex',
border: '1px dashed rgba(112,112,112,0.43)',
borderRadius: 8,
[theme.breakpoints.up('xl')]: {
width: 362,
height: 278,
marginTop: 30,
marginRight: 30
},
[theme.breakpoints.down('lg')]: {
width: 253,
height: 177,
marginTop: 21,
marginRight: 21
},
[theme.breakpoints.down('md')]: {
width: 177,
height: 124,
marginTop: 15,
marginRight: 15
},
},
editAvatar:{
cursor: 'pointer',
[theme.breakpoints.up('xl')]: {
width: 54,
height: 54,
},
[theme.breakpoints.down('lg')]: {
width: 38,
height: 38,
},
[theme.breakpoints.down('md')]: {
width: 27,
height: 27,
},
backgroundColor: 'white',
borderRadius: '50%',
color: 'gray'
}
}));
export default useStyles;
|
#ifndef __INTFMGR__
#define __INTFMGR__
#include "dbconnector.h"
#include "producerstatetable.h"
#include "orch.h"
#include <map>
#include <string>
#include <set>
struct SubIntfInfo
{
std::string vlanId;
std::string mtu;
std::string adminStatus;
std::string currAdminStatus;
};
typedef std::map<std::string, SubIntfInfo> SubIntfMap;
namespace swss {
class IntfMgr : public Orch
{
public:
IntfMgr(DBConnector *cfgDb, DBConnector *appDb, DBConnector *stateDb, const std::vector<std::string> &tableNames);
using Orch::doTask;
private:
ProducerStateTable m_appIntfTableProducer;
Table m_cfgIntfTable, m_cfgVlanIntfTable, m_cfgLagIntfTable, m_cfgLoopbackIntfTable;
Table m_statePortTable, m_stateLagTable, m_stateVlanTable, m_stateVrfTable, m_stateIntfTable, m_appLagTable;
Table m_neighTable;
SubIntfMap m_subIntfList;
std::set<std::string> m_loopbackIntfList;
std::set<std::string> m_pendingReplayIntfList;
std::set<std::string> m_ipv6LinkLocalModeList;
std::string mySwitchType;
void setIntfIp(const std::string &alias, const std::string &opCmd, const IpPrefix &ipPrefix);
void setIntfVrf(const std::string &alias, const std::string &vrfName);
void setIntfMac(const std::string &alias, const std::string &macAddr);
bool setIntfMpls(const std::string &alias, const std::string &mpls);
bool doIntfGeneralTask(const std::vector<std::string>& keys, std::vector<FieldValueTuple> data, const std::string& op);
bool doIntfAddrTask(const std::vector<std::string>& keys, const std::vector<FieldValueTuple>& data, const std::string& op);
void doTask(Consumer &consumer);
void doPortTableTask(const std::string& key, std::vector<FieldValueTuple> data, std::string op);
bool isIntfStateOk(const std::string &alias);
bool isIntfCreated(const std::string &alias);
bool isIntfChangeVrf(const std::string &alias, const std::string &vrfName);
int getIntfIpCount(const std::string &alias);
void buildIntfReplayList(void);
void setWarmReplayDoneState();
void addLoopbackIntf(const std::string &alias);
void delLoopbackIntf(const std::string &alias);
void flushLoopbackIntfs(void);
std::string getIntfAdminStatus(const std::string &alias);
std::string getIntfMtu(const std::string &alias);
void addHostSubIntf(const std::string&intf, const std::string &subIntf, const std::string &vlan);
std::string setHostSubIntfMtu(const std::string &alias, const std::string &mtu, const std::string &parent_mtu);
std::string setHostSubIntfAdminStatus(const std::string &alias, const std::string &admin_status, const std::string &parent_admin_status);
void removeHostSubIntf(const std::string &subIntf);
void setSubIntfStateOk(const std::string &alias);
void removeSubIntfState(const std::string &alias);
void delIpv6LinkLocalNeigh(const std::string &alias);
bool setIntfProxyArp(const std::string &alias, const std::string &proxy_arp);
bool setIntfGratArp(const std::string &alias, const std::string &grat_arp);
void updateSubIntfAdminStatus(const std::string &alias, const std::string &admin);
void updateSubIntfMtu(const std::string &alias, const std::string &mtu);
bool enableIpv6Flag(const std::string&);
bool m_replayDone {false};
};
}
#endif
|
/*
* Multi2Sim
* Copyright (C) 2012 Rafael Ubal (ubal@ece.neu.edu)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <stdlib.h>
#include <lib/mhandle/mhandle.h>
#include <lib/util/debug.h>
#include "opengl.h"
#include "opengl-buffers.h"
struct x86_opengl_render_buffer_t *x86_opengl_render_buffer_create(int width, int height)
{
struct x86_opengl_render_buffer_t *rb;
/* Allocate */
rb = calloc(1, sizeof(struct x86_opengl_render_buffer_t));
if(!rb)
fatal("%s: out of memory", __FUNCTION__);
/* Initialize */
rb->width = width;
rb->height = height;
rb->buffer = calloc(1, width * height * sizeof(GLuint));
/* Return */
return rb;
}
void x86_opengl_render_buffer_free(struct x86_opengl_render_buffer_t *rb)
{
free(rb->buffer);
free(rb);
}
void x86_opengl_render_buffer_clear(struct x86_opengl_render_buffer_t *rb, int clear_value)
{
if(rb)
memset(rb->buffer, clear_value, rb->width * rb->height * sizeof(GLuint));
}
int x86_opengl_render_buffer_resize(struct x86_opengl_render_buffer_t *rb, int width, int height)
{
/* Invalid size */
if (width < 1 || height < 1)
fatal("%s: invalid size (width = %d, height = %d)\n",
__FUNCTION__, width, height);
/* If same size, just clear it. */
if (rb->width == width && rb->height == height)
{
/* FIXME, currently set value == 1 */
x86_opengl_render_buffer_clear(rb, 0);
return 0;
}
x86_opengl_debug("\tBuffer resized, W x H = %d x %d\n", width, height);
/* Free previous buffer */
if (rb->buffer)
free(rb->buffer);
/* Allocate new buffer */
rb->buffer = calloc(width * height, sizeof(int));
if (!rb->buffer)
fatal("%s: out of memory", __FUNCTION__);
/* Store new size */
rb->width = width;
rb->height = height;
/* Return */
return 0;
}
struct x86_opengl_frame_buffer_t *x86_opengl_frame_buffer_create(int width, int height)
{
/* Variables */
int i;
struct x86_opengl_frame_buffer_t *fb;
/* Allocate */
fb = calloc(1, sizeof(struct x86_opengl_frame_buffer_t));
if(!fb)
fatal("%s: out of memory", __FUNCTION__);
/* Initialization */
fb->width = width;
fb->height = height;
for (i = 0; i < COLOR_BUFFER_COUNT; ++i)
{
fb->color_buffer[i] = x86_opengl_render_buffer_create(width, height);
}
fb->depth_buffer = x86_opengl_render_buffer_create(width, height);
fb->stencil_buffer = x86_opengl_render_buffer_create(width, height);
/* Return */
return fb;
}
void x86_opengl_frame_buffer_free(struct x86_opengl_frame_buffer_t *fb)
{
int i;
for (i = 0; i < COLOR_BUFFER_COUNT; ++i)
{
x86_opengl_render_buffer_free(fb->color_buffer[i]);
}
x86_opengl_render_buffer_free(fb->depth_buffer);
x86_opengl_render_buffer_free(fb->stencil_buffer);
free(fb);
}
void x86_opengl_frame_buffer_clear(struct x86_opengl_frame_buffer_t *fb, GLbitfield mask)
{
int i;
int clear_value;
/* Get current set value */
/* FIXME */
clear_value = 0;
/* Clear buffer */
if (fb)
/* Clear buffers */
if (mask & ~(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT | GL_ACCUM_BUFFER_BIT))
x86_opengl_debug("\tInvalid mask!\n");
if ((mask & GL_COLOR_BUFFER_BIT) == GL_COLOR_BUFFER_BIT)
{
x86_opengl_debug("\tColor buffer cleared to %d\n", clear_value);
for (i = 0; i < COLOR_BUFFER_COUNT; ++i)
x86_opengl_render_buffer_clear(fb->color_buffer[i], clear_value);
}
if ((mask & GL_DEPTH_BUFFER_BIT) == GL_DEPTH_BUFFER_BIT)
{
x86_opengl_debug("\tDepth buffer cleared to %d\n", clear_value);
x86_opengl_render_buffer_clear(fb->depth_buffer, clear_value);
}
if ((mask & GL_STENCIL_BUFFER_BIT) == GL_STENCIL_BUFFER_BIT) {
x86_opengl_debug("\tStencil buffer cleared to %d\n", clear_value);
x86_opengl_render_buffer_clear(fb->stencil_buffer, clear_value);
}
}
int x86_opengl_frame_buffer_resize(struct x86_opengl_frame_buffer_t *fb, int width, int height)
{
int i;
/* Invalid size */
if (width < 1 || height < 1)
fatal("%s: invalid size (width = %d, height = %d)\n",
__FUNCTION__, width, height);
/* If same size, just clear it. */
if (fb->width == width && fb->height == height)
{
/* FIXME */
x86_opengl_frame_buffer_clear(fb, 0);
return 0;
}
/* Resize buffers */
for (i = 0; i < COLOR_BUFFER_COUNT; ++i)
{
x86_opengl_render_buffer_resize(fb->color_buffer[i], width, height);
}
x86_opengl_render_buffer_resize(fb->depth_buffer, width, height);
x86_opengl_render_buffer_resize(fb->stencil_buffer, width, height);
/* Store new size */
fb->width = width;
fb->height = height;
/* Return */
return 0;
}
|
lines = int(input())
inputs = []
for i in range(lines):
inputs.append(input())
for i in range(lines):
if len(inputs[i]) > 10:
print('{}{}{}'.format(inputs[i][0], len(inputs[i])-2, inputs[i][len(inputs[i])-1]))
else:
print(inputs[i])
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 26 12:51:14 2018
@author: gregz
"""
import matplotlib
matplotlib.use('agg')
import argparse as ap
import numpy as np
import os.path as op
from astropy.convolution import Gaussian2DKernel
from astropy.io import fits
from astropy.stats import SigmaClip, biweight_midvariance
from distutils.dir_util import mkpath
from fiber_utils import bspline_x0
from input_utils import setup_logging
from photutils import Background2D, SExtractorBackground, detect_sources
from scipy.interpolate import interp1d
from scipy.signal import savgol_filter
from utils import biweight_location
from wave_utils import get_new_wave
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
from sklearn.gaussian_process.kernels import ConstantKernel
from sklearn.gaussian_process import GaussianProcessRegressor
import matplotlib.pyplot as plt
def setup_my_parser(args=None):
parser = ap.ArgumentParser(add_help=True)
parser.add_argument("-d", "--date",
help='''Date, e.g., 20170321, YYYYMMDD''',
type=str, default=None)
parser.add_argument("-o", "--observation",
help='''Observation number, "00000007" or "7"''',
type=str, default=None)
parser.add_argument("-e", "--exposure_number",
help='''Exposure number, 10''',
type=int, default=None)
parser.add_argument("-m", "--multiname",
help='''multi* base name leaving off "_LL.fits"''',
type=str, default=None)
parser.add_argument("-r", "--rootdir",
help='''Root Directory for Reductions''',
type=str,
default='/work/03946/hetdex/maverick/red1/reductions')
parser.add_argument("-op", "--outpath",
help='''Outpath for adjusted reductions''',
type=str,
default='/work/03946/hetdex/maverick/red1/reductions')
parser.add_argument("-in", "--instrument",
help='''Instrument, e.g., virus''',
type=str, default='virus')
parser.add_argument("-a", "--amps",
help='''amplifiers to use''',
type=str, default='LL, LU, RU, RL')
parser.add_argument("-wl", "--wave_lims",
help='''wavelength limits of the instrume''',
type=str, default='3470, 5530')
parser.add_argument("-rc", "--recalculate_wavelength",
help='''recalculate_wavelength''',
action="count", default=0)
args = parser.parse_args(args=args)
args.log = setup_logging('great_code_ever')
attr = ['date', 'observation', 'exposure_number', 'multiname']
for att in attr:
if getattr(args, att) is None:
args.log.error('Please set "--%s" argument.' % att)
return None
args.multiname = [j.replace(' ', '') for j in args.multiname.split(',')]
args.amps = [j.replace(' ', '') for j in args.amps.split(',')]
args.lims = [float(j.replace(' ', '')) for j in args.wave_lims.split(',')]
return args
def build_filename(rootdir, date, instrument, obs, expn, multiname):
'''
Build directory structure and search for unique observations, and return
a single file for each observation to examine the header.
'''
filename = op.join(rootdir, date, instrument,
'%s%07d' % (instrument, int(obs)),
'exp%02d' % expn, instrument,
multiname)
return filename
def set_multi_extensions(outpath, multiname, amps, nfibs, images=[], names=[]):
for i, amp in enumerate(amps):
fn = multiname + ('_%s.fits' % amp)
try:
F = fits.open(fn)
except:
continue
for name, image in zip(names, images):
F[name].data = np.array(image[(i*nfibs):((i+1)*nfibs)] * 1.,
dtype='float32')
F.writeto(op.join(outpath, op.basename(fn)), overwrite=True)
def get_multi_extensions(multiname, amps):
x, y, spec, wave, twi, tr, ftf = ([], [], [], [], [], [], [])
for amp in amps:
fn = multiname + ('_%s.fits' % amp)
try:
F = fits.open(fn)
except:
continue
try:
x.append(F['ifupos'].data[:, 0])
y.append(F['ifupos'].data[:, 1])
except:
x.append(np.ones((112,)))
y.append(np.ones((112,)))
spec.append(F['spectrum'].data)
try:
twi.append(F['twi_spectrum'].data)
except:
twi.append(F['spectrum'].data)
wave.append(F['wavelength'].data)
ftf.append(F['fiber_to_fiber'].data)
if amp in ['LU', 'RL']:
addtr = F[0].data.shape[0]
else:
addtr = 0.
tr.append(F['trace'].data + addtr)
spec, wave, twi, trace, ftf = [np.array(np.vstack(j), dtype='float64')
for j in [spec, wave, twi, tr, ftf]]
x, y = [np.array(np.hstack(j), dtype='float64') for j in [x, y]]
return x, y, spec, wave, twi, trace, ftf
def rectify(wave, spec, lims, fac=2.5, usesel=True):
if wave.ndim == 2:
N, D = wave.shape
rect_wave = np.linspace(lims[0], lims[1], int(D*fac))
rect_spec = np.zeros((N, len(rect_wave)))
for i in np.arange(N):
dw = np.diff(wave[i])
dw = np.hstack([dw[0], dw])
if usesel:
sel = (spec[i] > 1e-3) * (spec[i] < np.median(spec[i])*1e4)
else:
sel = np.ones((len(spec[i]),), dtype=bool)
if sel.sum() > 10:
I = interp1d(wave[i][sel], (spec[i] / dw)[sel], kind='quadratic',
bounds_error=False, fill_value=-999.)
rect_spec[i, :] = I(rect_wave)
else:
rect_spec[i, :] = 0.0
else:
D = len(wave)
rect_wave = np.linspace(lims[0], lims[1], int(D*fac))
rect_spec = np.zeros((len(rect_wave, )))
dw = np.diff(wave)
dw = np.hstack([dw[0], dw])
I = interp1d(wave, spec / dw, kind='quadratic',
bounds_error=False, fill_value=-999.)
rect_spec = I(rect_wave)
return rect_wave, rect_spec
def fit_bspline(rect_wave, avg, knots=1032):
B, c = bspline_x0(rect_wave, nknots=knots)
smooth = np.dot(c, np.linalg.lstsq(c[~avg.mask, :], avg[~avg.mask])[0])
return np.ma.array(smooth, mask=avg.mask)
def get_avg_spec(wave, spec, twi, lims, mask=None):
if mask is None:
mask = np.ones((wave.shape[0],), dtype=bool)
rect_wave, rect_spec = rectify(wave, spec, lims)
rect_wave, rect_twi = rectify(wave, twi, lims)
y = np.ma.array(rect_spec, mask=((rect_spec == 0.) + (rect_spec == -999.)))
t = np.ma.array(rect_twi, mask=((rect_twi == 0.) + (rect_twi == -999.)))
fac = np.ma.median(t, axis=1)[:, np.newaxis] / np.ma.median(t)
norm = y / fac
avg = biweight_location(norm[mask], axis=(0,))
smooth = fit_bspline(rect_wave, avg, knots=wave.shape[1])
return rect_wave, rect_spec, y, norm, avg, smooth, fac
def fit_continuum(wv, sky, sncut=3., skip=1, fil_len=95, func=np.array):
skym_s = 1. * sky
sky_sm = savgol_filter(skym_s, fil_len, 1)
allind = np.arange(len(wv), dtype=int)
mask = np.zeros(sky.shape, dtype=bool)
for i in np.arange(5):
mad = np.sqrt(biweight_midvariance(sky-sky_sm))
outlier = func(sky - sky_sm) > sncut * mad
sel = np.where(outlier)[0]
for j in np.arange(1, skip+1):
sel = np.union1d(sel, sel + 1)
sel = np.union1d(sel, sel - 1)
sel = np.sort(np.unique(sel))
sel = sel[skip:-skip]
good = np.setdiff1d(allind, sel)
skym_s = 1.*sky
I = interp1d(wv[good], sky_sm[good], kind='linear', bounds_error=False,
fill_value="extrapolate")
skym_s[sel] = I(wv[sel])
sky_sm = savgol_filter(skym_s, fil_len, 1)
mask[sel] = True
return sky_sm, mask
def simple_flat_field(X, func=np.abs, fil_len=21, sncut=1.5, skip=2):
x = np.arange(X.shape[0])
cont = x * 0.
outlier = x * 0.
z = np.ma.median(X, axis=1)
for i in np.arange(4):
xl = i * 112
xh = (i + 1) * 112
cont[xl:xh], outlier[xl:xh] = fit_continuum(x[xl:xh], z[xl:xh],
fil_len=fil_len, skip=skip,
func=func, sncut=sncut)
return z - cont, cont
def make_frame(xloc, yloc, data, scale=1.,
seeing_fac=1.5):
seeing = seeing_fac * scale
a = len(data)
x = np.arange(xloc.min()-scale,
xloc.max()+1*scale, scale)
y = np.arange(yloc.min()-scale,
yloc.max()+1*scale, scale)
xgrid, ygrid = np.meshgrid(x, y)
zimage = xgrid * 0.
d = np.zeros((a,)+xgrid.shape)
w = np.zeros((a,)+xgrid.shape)
for i in np.arange(len(x)):
for j in np.arange(len(y)):
d[:, j, i] = np.sqrt((xloc - xgrid[j, i])**2 +
(yloc - ygrid[j, i])**2)
w[:, j, i] = np.exp(-1./2.*(d[:, j, i]/seeing)**2)
sel = np.where((np.abs(data) > 1e-5) * np.isfinite(data))[0]
ws = w[sel, :, :].sum(axis=0)
zimage = ((data[sel, np.newaxis, np.newaxis] * w[sel]).sum(axis=0) /
ws * 1.9)
return xgrid, ygrid, zimage
def get_sex_background(image, filt_size=21, cols=25):
sigma_clip = SigmaClip(sigma=3., iters=10)
bkg_estimator = SExtractorBackground()
bkg = Background2D(image, (filt_size, cols), filter_size=(1, 1),
bkg_estimator=bkg_estimator, sigma_clip=sigma_clip,
mask=image.mask, exclude_percentile=100)
return bkg.background
def setup_GP():
kernel = (ConstantKernel() + Matern(length_scale=2, nu=3/2) +
WhiteKernel(noise_level=1.))
G = GaussianProcessRegressor(alpha=1e-10, copy_X_train=True, kernel=kernel,
n_restarts_optimizer=0, normalize_y=False,
optimizer='fmin_l_bfgs_b', random_state=None)
return G
def fit_GP(wave, spec, mask):
G = setup_GP()
G.fit(wave[mask, np.newaxis], spec[mask])
return G.predict(wave[:, np.newaxis]), G
def safe_division(num, denom, eps=1e-8, fillval=0.0):
good = np.isfinite(denom) * (np.abs(denom) > eps)
div = num * 0.
if num.ndim == denom.ndim:
div[good] = num[good] / denom[good]
div[~good] = fillval
else:
div[:, good] = num[:, good] / denom[good]
div[:, ~good] = fillval
return div
def smooth_fiber(X, mask, nfibs, wave_sel=None):
if wave_sel is not None:
X.mask[:, wave_sel] = True
z = np.ma.median(X, axis=1)
z.data[mask] = np.nan
z = np.array(z)
x = np.arange(len(z))
model = z * 0.
for i in np.arange(4):
xl = i * nfibs
xh = (i + 1) * nfibs
sel = np.isfinite(z[xl:xh])
G = setup_GP()
G.fit(x[xl:xh][sel, np.newaxis], z[xl:xh][sel])
model[xl:xh] = G.predict(x[xl:xh, np.newaxis])
return model
def make_plot(zimage, xgrid, ygrid, xpos, ypos, good_mask, opath):
fig = plt.figure(figsize=(6, 6))
plt.imshow(zimage, origin='lower', interpolation='none', vmin=-15,
vmax=25, cmap=plt.get_cmap('gray_r'),
extent=[xgrid.min(), xgrid.max(), ygrid.min(), ygrid.max()])
plt.scatter(xpos[good_mask], ypos[good_mask], marker='x', color='g', s=90)
plt.scatter(xpos[~good_mask], ypos[~good_mask], marker='x', color='r',
s=90)
fig.savefig(op.join(opath, 'image.png'))
def mask_sources(xgrid, ygrid, xpos, ypos, zimage, sncut=2.0):
threshold = (biweight_location(zimage) +
sncut * np.sqrt(biweight_midvariance(zimage)))
kernel = Gaussian2DKernel(2, x_size=5, y_size=5)
kernel.normalize()
segm = detect_sources(zimage, threshold, npixels=8, filter_kernel=kernel)
dist = np.sqrt((xgrid - xpos[:, np.newaxis, np.newaxis])**2 +
(ygrid - ypos[:, np.newaxis, np.newaxis])**2)
fiberloc = np.argmin(dist, axis=0)
return np.unique(fiberloc[segm.array > 0])
def make_avg_spec(wave, spec, binsize=35, knots=None):
if knots is None:
knots = wave.shape[1]
ind = np.argsort(wave.ravel())
N, D = wave.shape
wchunks = np.array_split(wave.ravel()[ind],
N * D / binsize)
schunks = np.array_split(spec.ravel()[ind],
N * D / binsize)
nwave = np.array([np.mean(chunk) for chunk in wchunks])
B, c = bspline_x0(nwave, nknots=knots)
nspec = np.array([biweight_location(chunk) for chunk in schunks])
sol = np.linalg.lstsq(c, nspec)[0]
smooth = np.dot(c, sol)
return nwave, smooth
def get_sky_residuals(wave, spec, ftf, good_mask):
skysub_new = wave * 0.
sky_new = wave * 0.
model_new = wave * 0.
nwave, smooth = make_avg_spec(wave[good_mask],
safe_division(spec[good_mask],
ftf[good_mask]))
I = interp1d(nwave, smooth, kind='quadratic', bounds_error=False,
fill_value='extrapolate')
for i in np.arange(wave.shape[0]):
sky_new[i] = I(wave[i]) * ftf[i]
skysub_new[i] = spec[i] - sky_new[i]
model_new[i] = I(wave[i])
return skysub_new, sky_new, model_new
def get_twi_ftf(wave, twi):
ftf_twi = wave * 0.
T = np.ma.array(twi, mask=((twi < 1) + np.isnan(twi)))
fac = biweight_location(T, axis=(1,))[:, np.newaxis] / biweight_location(T)
ftf_twi = fac * np.ones((twi.shape[1],))
for i in np.arange(2):
nwave, smooth = make_avg_spec(wave, twi / ftf_twi)
I = interp1d(nwave, smooth, kind='quadratic', bounds_error=False,
fill_value='extrapolate')
for i in np.arange(wave.shape[0]):
ftf_twi[i] = twi[i] / I(wave[i])
nw, sm = make_avg_spec(wave[i, np.newaxis], ftf_twi[i, np.newaxis],
binsize=25, knots=17)
J = interp1d(nw, sm, kind='quadratic', bounds_error=False,
fill_value='extrapolate')
ftf_twi[i] = J(wave[i])
return ftf_twi
args = setup_my_parser(args=None)
#args = setup_my_parser(args=['-m', 'multi_307_074_076', '-d', '20180624',
# '-o', '8', '-e', '1', '-rc', '-r',
# '/Users/gregz/cure/panacea/work/03946/hetdex/maverick/red1/reductions',
# '-op', '/Users/gregz/cure/reductions'])
#
if args.instrument == 'virus':
args.nfibs = 112
if args.instrument == 'lrs2':
args.nfibs = 140
wave_list = [[3550., 10.], [3735., 10.], [3831., 5.], [3911., 8.],
[4358, 5.], [4862., 5.], [5085., 5.], [5199., 5.], [5460., 5.]]
for multi in args.multiname:
args.log.info('Grabbing info for %s' % multi)
multipath = build_filename(args.rootdir, args.date, args.instrument,
args.observation, args.exposure_number, multi)
xpos, ypos, spec, wave, twi, trace, ftf = get_multi_extensions(multipath,
args.amps)
outpath = build_filename(args.outpath, args.date, args.instrument,
args.observation, args.exposure_number, multi)
outpath = op.dirname(outpath)
mkpath(outpath)
args.log.info('Getting average specrum for %s' % multi)
returned_list = get_avg_spec(wave, spec, twi, args.lims)
rect_wave, rect_spec, y, norm, avg, smooth, fac = returned_list
returned_list = get_avg_spec(wave, twi, twi, args.lims)
rect_wave, rect_twi, y_twi, norm_twi, avg_twi, smooth_twi, fac = returned_list
X = rect_spec / rect_twi * smooth_twi / smooth
flat_field, cont = simple_flat_field(X)
xgrid, ygrid, zimage = make_frame(xpos, ypos, flat_field)
mask = mask_sources(xgrid, ygrid, xpos, ypos, zimage)
good = np.setdiff1d(np.arange(X.shape[0], dtype=int), mask)
good_mask = np.zeros((X.shape[0],))
good_mask[good] = 1.
good_mask = np.array(good_mask, dtype=bool)
ftf = wave * 0.
for i in np.arange(2):
xl = i * args.nfibs * 2
xh = (i+1) * args.nfibs * 2
args.log.info('Building fiber to fiber for fibers: %03d - %03d' %
(xl, xh))
ftf[xl:xh] = get_twi_ftf(wave[xl:xh], twi[xl:xh])
ftf[ftf < 0.] = 0.
if args.recalculate_wavelength:
newwave = wave * 0.
for i in np.arange(2):
xl = i * args.nfibs * 2
xh = (i+1) * args.nfibs * 2
nwave, ntwi = make_avg_spec(wave[xl:xh],
safe_division(twi, ftf)[xl:xh])
args.log.info('Working on the wavelength for fibers: %03d - %03d' %
(xl, xh))
newwave[xl:xh] = get_new_wave(wave[xl:xh], trace[xl:xh],
twi[xl:xh], ftf[xl:xh],
good_mask[xl:xh], nwave, ntwi)
wave0 = wave * 1.
wave = newwave * 1.
args.log.info('Max Wave Correction: %0.2f A' % np.max(newwave-wave0))
args.log.info('Min Wave Correction: %0.2f A' % np.min(newwave-wave0))
wave_sel = []
for wl in wave_list:
wave_sel.append(np.where(np.abs(rect_wave - wl[0]) < wl[1])[0])
wave_sel = np.array(np.setdiff1d(np.arange(len(rect_wave)),
np.hstack(wave_sel)), dtype=int)
args.log.info('Building fiber to fiber again')
ftf = wave * 0.
Y = wave * 0.
for i in np.arange(2):
xl = i * args.nfibs * 2
xh = (i + 1) * args.nfibs * 2
ftf[xl:xh] = get_twi_ftf(wave[xl:xh], twi[xl:xh])
ftf[ftf < 0.] = 0.
skysub, sky, model = get_sky_residuals(wave[xl:xh], spec[xl:xh],
ftf[xl:xh], good_mask[xl:xh])
Y[xl:xh] = skysub / model
args.log.info('Building fiber to fiber for the last time')
cont = smooth_fiber(Y, mask, args.nfibs)[:, np.newaxis]
ftf = ftf + cont
skysub = wave * 0.
sky = wave * 0.
for i in np.arange(2):
xl = i * args.nfibs * 2
xh = (i + 1) * args.nfibs * 2
skysub[xl:xh], sky[xl:xh], model = get_sky_residuals(wave[xl:xh],
spec[xl:xh],
ftf[xl:xh],
good_mask[xl:xh])
Y[xl:xh] = skysub[xl:xh] / model
rect_wave, rect_skysub = rectify(wave, skysub, args.lims, usesel=False)
Z = np.ma.array(rect_skysub, mask=(rect_skysub == -999.))
ZZ = biweight_location(Z, axis=(1,))
xgrid, ygrid, zimage = make_frame(xpos, ypos, ZZ)
mask = mask_sources(xgrid, ygrid, xpos, ypos, zimage)
good = np.setdiff1d(np.arange(X.shape[0], dtype=int), mask)
good_mask = np.zeros((X.shape[0],))
good_mask[good] = 1.
good_mask = np.array(good_mask, dtype=bool)
Y = np.ma.array(Y, mask=np.zeros(Y.shape, dtype=bool))
Y.mask[mask] = True
Y.mask[np.ma.abs(Y) > 0.25] = True
S = np.ma.array(skysub, mask=Y.mask)
args.log.info('Fitting background to sky subtracted image.')
back = get_sex_background(S, 11, 121)
args.log.info('Avg Back Model: %0.2f counts' % biweight_location(back))
args.log.info('Max Back Model: %0.2f counts' % np.max(back))
args.log.info('Min Back Model: %0.2f counts' % np.min(back))
skysub = skysub - back
make_plot(zimage, xgrid, ygrid, xpos, ypos, good_mask, outpath)
set_multi_extensions(outpath, multipath, args.amps, args.nfibs,
images=[ftf, sky, skysub, wave],
names=['fiber_to_fiber', 'sky_spectrum',
'sky_subtracted', 'wavelength'])
def main():
pass
if __name__ == main():
main()
|
/**
* \file
*
* Copyright (c) 2014 Atmel Corporation. All rights reserved.
*
* \asf_license_start
*
* \page License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The name of Atmel may not be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* 4. This software may only be redistributed and used in connection with an
* Atmel microcontroller product.
*
* THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
* EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* \asf_license_stop
*
*/
/**
* Support and FAQ: visit <a href="http://www.atmel.com/design-support/">Atmel Support</a>
*/
#ifndef _SAMG54_USART_COMPONENT_
#define _SAMG54_USART_COMPONENT_
/* ============================================================================= */
/** SOFTWARE API DEFINITION FOR Universal Synchronous Asynchronous Receiver Transmitter */
/* ============================================================================= */
/** \addtogroup SAMG54_USART Universal Synchronous Asynchronous Receiver Transmitter */
/*@{*/
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
/** \brief Usart hardware registers */
typedef struct {
__O uint32_t US_CR; /**< \brief (Usart Offset: 0x0000) Control Register */
__IO uint32_t US_MR; /**< \brief (Usart Offset: 0x0004) Mode Register */
__O uint32_t US_IER; /**< \brief (Usart Offset: 0x0008) Interrupt Enable Register */
__O uint32_t US_IDR; /**< \brief (Usart Offset: 0x000C) Interrupt Disable Register */
__I uint32_t US_IMR; /**< \brief (Usart Offset: 0x0010) Interrupt Mask Register */
__I uint32_t US_CSR; /**< \brief (Usart Offset: 0x0014) Channel Status Register */
__I uint32_t US_RHR; /**< \brief (Usart Offset: 0x0018) Receive Holding Register */
__O uint32_t US_THR; /**< \brief (Usart Offset: 0x001C) Transmit Holding Register */
__IO uint32_t US_BRGR; /**< \brief (Usart Offset: 0x0020) Baud Rate Generator Register */
__IO uint32_t US_RTOR; /**< \brief (Usart Offset: 0x0024) Receiver Time-out Register */
__IO uint32_t US_TTGR; /**< \brief (Usart Offset: 0x0028) Transmitter Timeguard Register */
__I uint32_t Reserved1[5];
__IO uint32_t US_FIDI; /**< \brief (Usart Offset: 0x0040) FI DI Ratio Register */
__I uint32_t US_NER; /**< \brief (Usart Offset: 0x0044) Number of Errors Register */
__I uint32_t Reserved2[1];
__IO uint32_t US_IF; /**< \brief (Usart Offset: 0x004C) IrDA Filter Register */
__I uint32_t Reserved3[37];
__IO uint32_t US_WPMR; /**< \brief (Usart Offset: 0x00E4) Write Protection Mode Register */
__I uint32_t US_WPSR; /**< \brief (Usart Offset: 0x00E8) Write Protection Status Register */
__I uint32_t Reserved4[5];
__IO uint32_t US_RPR; /**< \brief (Usart Offset: 0x100) Receive Pointer Register */
__IO uint32_t US_RCR; /**< \brief (Usart Offset: 0x104) Receive Counter Register */
__IO uint32_t US_TPR; /**< \brief (Usart Offset: 0x108) Transmit Pointer Register */
__IO uint32_t US_TCR; /**< \brief (Usart Offset: 0x10C) Transmit Counter Register */
__IO uint32_t US_RNPR; /**< \brief (Usart Offset: 0x110) Receive Next Pointer Register */
__IO uint32_t US_RNCR; /**< \brief (Usart Offset: 0x114) Receive Next Counter Register */
__IO uint32_t US_TNPR; /**< \brief (Usart Offset: 0x118) Transmit Next Pointer Register */
__IO uint32_t US_TNCR; /**< \brief (Usart Offset: 0x11C) Transmit Next Counter Register */
__O uint32_t US_PTCR; /**< \brief (Usart Offset: 0x120) Transfer Control Register */
__I uint32_t US_PTSR; /**< \brief (Usart Offset: 0x124) Transfer Status Register */
} Usart;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
/* -------- US_CR : (USART Offset: 0x0000) Control Register -------- */
#define US_CR_RSTRX (0x1u << 2) /**< \brief (US_CR) Reset Receiver */
#define US_CR_RSTTX (0x1u << 3) /**< \brief (US_CR) Reset Transmitter */
#define US_CR_RXEN (0x1u << 4) /**< \brief (US_CR) Receiver Enable */
#define US_CR_RXDIS (0x1u << 5) /**< \brief (US_CR) Receiver Disable */
#define US_CR_TXEN (0x1u << 6) /**< \brief (US_CR) Transmitter Enable */
#define US_CR_TXDIS (0x1u << 7) /**< \brief (US_CR) Transmitter Disable */
#define US_CR_RSTSTA (0x1u << 8) /**< \brief (US_CR) Reset Status Bits */
#define US_CR_STTBRK (0x1u << 9) /**< \brief (US_CR) Start Break */
#define US_CR_STPBRK (0x1u << 10) /**< \brief (US_CR) Stop Break */
#define US_CR_STTTO (0x1u << 11) /**< \brief (US_CR) Start Time-out */
#define US_CR_SENDA (0x1u << 12) /**< \brief (US_CR) Send Address */
#define US_CR_RSTIT (0x1u << 13) /**< \brief (US_CR) Reset Iterations */
#define US_CR_RSTNACK (0x1u << 14) /**< \brief (US_CR) Reset Non Acknowledge */
#define US_CR_RETTO (0x1u << 15) /**< \brief (US_CR) Rearm Time-out */
#define US_CR_RTSEN (0x1u << 18) /**< \brief (US_CR) Request to Send Enable */
#define US_CR_RTSDIS (0x1u << 19) /**< \brief (US_CR) Request to Send Disable */
#define US_CR_FCS (0x1u << 18) /**< \brief (US_CR) Force SPI Chip Select */
#define US_CR_RCS (0x1u << 19) /**< \brief (US_CR) Release SPI Chip Select */
/* -------- US_MR : (USART Offset: 0x0004) Mode Register -------- */
#define US_MR_USART_MODE_Pos 0
#define US_MR_USART_MODE_Msk (0xfu << US_MR_USART_MODE_Pos) /**< \brief (US_MR) USART Mode of Operation */
#define US_MR_USART_MODE_NORMAL (0x0u << 0) /**< \brief (US_MR) Normal mode */
#define US_MR_USART_MODE_RS485 (0x1u << 0) /**< \brief (US_MR) RS485 */
#define US_MR_USART_MODE_HW_HANDSHAKING (0x2u << 0) /**< \brief (US_MR) Hardware Handshaking */
#define US_MR_USART_MODE_IS07816_T_0 (0x4u << 0) /**< \brief (US_MR) IS07816 Protocol: T = 0 */
#define US_MR_USART_MODE_IS07816_T_1 (0x6u << 0) /**< \brief (US_MR) IS07816 Protocol: T = 1 */
#define US_MR_USART_MODE_IRDA (0x8u << 0) /**< \brief (US_MR) IrDA */
#define US_MR_USART_MODE_SPI_MASTER (0xEu << 0) /**< \brief (US_MR) SPI master */
#define US_MR_USART_MODE_SPI_SLAVE (0xFu << 0) /**< \brief (US_MR) SPI Slave */
#define US_MR_USCLKS_Pos 4
#define US_MR_USCLKS_Msk (0x3u << US_MR_USCLKS_Pos) /**< \brief (US_MR) Clock Selection */
#define US_MR_USCLKS_MCK (0x0u << 4) /**< \brief (US_MR) Peripheral clock is selected */
#define US_MR_USCLKS_DIV (0x1u << 4) /**< \brief (US_MR) Peripheral clock divided (DIV=8) is selected */
#define US_MR_USCLKS_SCK (0x3u << 4) /**< \brief (US_MR) Serial clock SCK is selected */
#define US_MR_CHRL_Pos 6
#define US_MR_CHRL_Msk (0x3u << US_MR_CHRL_Pos) /**< \brief (US_MR) Character Length */
#define US_MR_CHRL_5_BIT (0x0u << 6) /**< \brief (US_MR) Character length is 5 bits */
#define US_MR_CHRL_6_BIT (0x1u << 6) /**< \brief (US_MR) Character length is 6 bits */
#define US_MR_CHRL_7_BIT (0x2u << 6) /**< \brief (US_MR) Character length is 7 bits */
#define US_MR_CHRL_8_BIT (0x3u << 6) /**< \brief (US_MR) Character length is 8 bits */
#define US_MR_SYNC (0x1u << 8) /**< \brief (US_MR) Synchronous Mode Select */
#define US_MR_PAR_Pos 9
#define US_MR_PAR_Msk (0x7u << US_MR_PAR_Pos) /**< \brief (US_MR) Parity Type */
#define US_MR_PAR_EVEN (0x0u << 9) /**< \brief (US_MR) Even parity */
#define US_MR_PAR_ODD (0x1u << 9) /**< \brief (US_MR) Odd parity */
#define US_MR_PAR_SPACE (0x2u << 9) /**< \brief (US_MR) Parity forced to 0 (Space) */
#define US_MR_PAR_MARK (0x3u << 9) /**< \brief (US_MR) Parity forced to 1 (Mark) */
#define US_MR_PAR_NO (0x4u << 9) /**< \brief (US_MR) No parity */
#define US_MR_PAR_MULTIDROP (0x6u << 9) /**< \brief (US_MR) Multidrop mode */
#define US_MR_NBSTOP_Pos 12
#define US_MR_NBSTOP_Msk (0x3u << US_MR_NBSTOP_Pos) /**< \brief (US_MR) Number of Stop Bits */
#define US_MR_NBSTOP_1_BIT (0x0u << 12) /**< \brief (US_MR) 1 stop bit */
#define US_MR_NBSTOP_1_5_BIT (0x1u << 12) /**< \brief (US_MR) 1.5 stop bit (SYNC = 0) or reserved (SYNC = 1) */
#define US_MR_NBSTOP_2_BIT (0x2u << 12) /**< \brief (US_MR) 2 stop bits */
#define US_MR_CHMODE_Pos 14
#define US_MR_CHMODE_Msk (0x3u << US_MR_CHMODE_Pos) /**< \brief (US_MR) Channel Mode */
#define US_MR_CHMODE_NORMAL (0x0u << 14) /**< \brief (US_MR) Normal mode */
#define US_MR_CHMODE_AUTOMATIC (0x1u << 14) /**< \brief (US_MR) Automatic Echo. Receiver input is connected to the TXD pin. */
#define US_MR_CHMODE_LOCAL_LOOPBACK (0x2u << 14) /**< \brief (US_MR) Local Loopback. Transmitter output is connected to the Receiver Input. */
#define US_MR_CHMODE_REMOTE_LOOPBACK (0x3u << 14) /**< \brief (US_MR) Remote Loopback. RXD pin is internally connected to the TXD pin. */
#define US_MR_MSBF (0x1u << 16) /**< \brief (US_MR) Bit Order */
#define US_MR_MODE9 (0x1u << 17) /**< \brief (US_MR) 9-bit Character Length */
#define US_MR_CLKO (0x1u << 18) /**< \brief (US_MR) Clock Output Select */
#define US_MR_OVER (0x1u << 19) /**< \brief (US_MR) Oversampling Mode */
#define US_MR_INACK (0x1u << 20) /**< \brief (US_MR) Inhibit Non Acknowledge */
#define US_MR_DSNACK (0x1u << 21) /**< \brief (US_MR) Disable Successive NACK */
#define US_MR_INVDATA (0x1u << 23) /**< \brief (US_MR) Inverted Data */
#define US_MR_MAX_ITERATION_Pos 24
#define US_MR_MAX_ITERATION_Msk (0x7u << US_MR_MAX_ITERATION_Pos) /**< \brief (US_MR) Maximum Number of Automatic Iteration */
#define US_MR_MAX_ITERATION(value) ((US_MR_MAX_ITERATION_Msk & ((value) << US_MR_MAX_ITERATION_Pos)))
#define US_MR_FILTER (0x1u << 28) /**< \brief (US_MR) Receive Line Filter */
#define US_MR_CPHA (0x1u << 8) /**< \brief (US_MR) SPI Clock Phase */
#define US_MR_CPOL (0x1u << 16) /**< \brief (US_MR) SPI Clock Polarity */
#define US_MR_WRDBT (0x1u << 20) /**< \brief (US_MR) Wait Read Data Before Transfer */
/* -------- US_IER : (USART Offset: 0x0008) Interrupt Enable Register -------- */
#define US_IER_RXRDY (0x1u << 0) /**< \brief (US_IER) RXRDY Interrupt Enable */
#define US_IER_TXRDY (0x1u << 1) /**< \brief (US_IER) TXRDY Interrupt Enable */
#define US_IER_RXBRK (0x1u << 2) /**< \brief (US_IER) Receiver Break Interrupt Enable */
#define US_IER_ENDRX (0x1u << 3) /**< \brief (US_IER) End of Receive Transfer Interrupt Enable (available in all USART modes of operation) */
#define US_IER_ENDTX (0x1u << 4) /**< \brief (US_IER) End of Transmit Interrupt Enable (available in all USART modes of operation) */
#define US_IER_OVRE (0x1u << 5) /**< \brief (US_IER) Overrun Error Interrupt Enable */
#define US_IER_FRAME (0x1u << 6) /**< \brief (US_IER) Framing Error Interrupt Enable */
#define US_IER_PARE (0x1u << 7) /**< \brief (US_IER) Parity Error Interrupt Enable */
#define US_IER_TIMEOUT (0x1u << 8) /**< \brief (US_IER) Time-out Interrupt Enable */
#define US_IER_TXEMPTY (0x1u << 9) /**< \brief (US_IER) TXEMPTY Interrupt Enable */
#define US_IER_ITER (0x1u << 10) /**< \brief (US_IER) Max number of Repetitions Reached Interrupt Enable */
#define US_IER_TXBUFE (0x1u << 11) /**< \brief (US_IER) Buffer Empty Interrupt Enable (available in all USART modes of operation) */
#define US_IER_RXBUFF (0x1u << 12) /**< \brief (US_IER) Buffer Full Interrupt Enable (available in all USART modes of operation) */
#define US_IER_NACK (0x1u << 13) /**< \brief (US_IER) Non Acknowledge Interrupt Enable */
#define US_IER_CTSIC (0x1u << 19) /**< \brief (US_IER) Clear to Send Input Change Interrupt Enable */
#define US_IER_UNRE (0x1u << 10) /**< \brief (US_IER) SPI Underrun Error Interrupt Enable */
/* -------- US_IDR : (USART Offset: 0x000C) Interrupt Disable Register -------- */
#define US_IDR_RXRDY (0x1u << 0) /**< \brief (US_IDR) RXRDY Interrupt Disable */
#define US_IDR_TXRDY (0x1u << 1) /**< \brief (US_IDR) TXRDY Interrupt Disable */
#define US_IDR_RXBRK (0x1u << 2) /**< \brief (US_IDR) Receiver Break Interrupt Disable */
#define US_IDR_ENDRX (0x1u << 3) /**< \brief (US_IDR) End of Receive Transfer Interrupt Disable (available in all USART modes of operation) */
#define US_IDR_ENDTX (0x1u << 4) /**< \brief (US_IDR) End of Transmit Interrupt Disable (available in all USART modes of operation) */
#define US_IDR_OVRE (0x1u << 5) /**< \brief (US_IDR) Overrun Error Interrupt Enable */
#define US_IDR_FRAME (0x1u << 6) /**< \brief (US_IDR) Framing Error Interrupt Disable */
#define US_IDR_PARE (0x1u << 7) /**< \brief (US_IDR) Parity Error Interrupt Disable */
#define US_IDR_TIMEOUT (0x1u << 8) /**< \brief (US_IDR) Time-out Interrupt Disable */
#define US_IDR_TXEMPTY (0x1u << 9) /**< \brief (US_IDR) TXEMPTY Interrupt Disable */
#define US_IDR_ITER (0x1u << 10) /**< \brief (US_IDR) Max Number of Repetitions Reached Interrupt Disable */
#define US_IDR_TXBUFE (0x1u << 11) /**< \brief (US_IDR) Buffer Empty Interrupt Disable (available in all USART modes of operation) */
#define US_IDR_RXBUFF (0x1u << 12) /**< \brief (US_IDR) Buffer Full Interrupt Disable (available in all USART modes of operation) */
#define US_IDR_NACK (0x1u << 13) /**< \brief (US_IDR) Non Acknowledge Interrupt Disable */
#define US_IDR_CTSIC (0x1u << 19) /**< \brief (US_IDR) Clear to Send Input Change Interrupt Disable */
#define US_IDR_UNRE (0x1u << 10) /**< \brief (US_IDR) SPI Underrun Error Interrupt Disable */
/* -------- US_IMR : (USART Offset: 0x0010) Interrupt Mask Register -------- */
#define US_IMR_RXRDY (0x1u << 0) /**< \brief (US_IMR) RXRDY Interrupt Mask */
#define US_IMR_TXRDY (0x1u << 1) /**< \brief (US_IMR) TXRDY Interrupt Mask */
#define US_IMR_RXBRK (0x1u << 2) /**< \brief (US_IMR) Receiver Break Interrupt Mask */
#define US_IMR_ENDRX (0x1u << 3) /**< \brief (US_IMR) End of Receive Transfer Interrupt Mask (available in all USART modes of operation) */
#define US_IMR_ENDTX (0x1u << 4) /**< \brief (US_IMR) End of Transmit Interrupt Mask (available in all USART modes of operation) */
#define US_IMR_OVRE (0x1u << 5) /**< \brief (US_IMR) Overrun Error Interrupt Mask */
#define US_IMR_FRAME (0x1u << 6) /**< \brief (US_IMR) Framing Error Interrupt Mask */
#define US_IMR_PARE (0x1u << 7) /**< \brief (US_IMR) Parity Error Interrupt Mask */
#define US_IMR_TIMEOUT (0x1u << 8) /**< \brief (US_IMR) Time-out Interrupt Mask */
#define US_IMR_TXEMPTY (0x1u << 9) /**< \brief (US_IMR) TXEMPTY Interrupt Mask */
#define US_IMR_ITER (0x1u << 10) /**< \brief (US_IMR) Max Number of Repetitions Reached Interrupt Mask */
#define US_IMR_TXBUFE (0x1u << 11) /**< \brief (US_IMR) Buffer Empty Interrupt Mask (available in all USART modes of operation) */
#define US_IMR_RXBUFF (0x1u << 12) /**< \brief (US_IMR) Buffer Full Interrupt Mask (available in all USART modes of operation) */
#define US_IMR_NACK (0x1u << 13) /**< \brief (US_IMR) Non Acknowledge Interrupt Mask */
#define US_IMR_CTSIC (0x1u << 19) /**< \brief (US_IMR) Clear to Send Input Change Interrupt Mask */
#define US_IMR_UNRE (0x1u << 10) /**< \brief (US_IMR) SPI Underrun Error Interrupt Mask */
/* -------- US_CSR : (USART Offset: 0x0014) Channel Status Register -------- */
#define US_CSR_RXRDY (0x1u << 0) /**< \brief (US_CSR) Receiver Ready */
#define US_CSR_TXRDY (0x1u << 1) /**< \brief (US_CSR) Transmitter Ready */
#define US_CSR_RXBRK (0x1u << 2) /**< \brief (US_CSR) Break Received/End of Break */
#define US_CSR_ENDRX (0x1u << 3) /**< \brief (US_CSR) End of Receiver Transfer */
#define US_CSR_ENDTX (0x1u << 4) /**< \brief (US_CSR) End of Transmitter Transfer */
#define US_CSR_OVRE (0x1u << 5) /**< \brief (US_CSR) Overrun Error */
#define US_CSR_FRAME (0x1u << 6) /**< \brief (US_CSR) Framing Error */
#define US_CSR_PARE (0x1u << 7) /**< \brief (US_CSR) Parity Error */
#define US_CSR_TIMEOUT (0x1u << 8) /**< \brief (US_CSR) Receiver Time-out */
#define US_CSR_TXEMPTY (0x1u << 9) /**< \brief (US_CSR) Transmitter Empty */
#define US_CSR_ITER (0x1u << 10) /**< \brief (US_CSR) Max Number of Repetitions Reached */
#define US_CSR_TXBUFE (0x1u << 11) /**< \brief (US_CSR) Transmission Buffer Empty */
#define US_CSR_RXBUFF (0x1u << 12) /**< \brief (US_CSR) Reception Buffer Full */
#define US_CSR_NACK (0x1u << 13) /**< \brief (US_CSR) Non Acknowledge Interrupt */
#define US_CSR_CTSIC (0x1u << 19) /**< \brief (US_CSR) Clear to Send Input Change Flag */
#define US_CSR_CTS (0x1u << 23) /**< \brief (US_CSR) Image of CTS Input */
#define US_CSR_UNRE (0x1u << 10) /**< \brief (US_CSR) Underrun Error */
/* -------- US_RHR : (USART Offset: 0x0018) Receive Holding Register -------- */
#define US_RHR_RXCHR_Pos 0
#define US_RHR_RXCHR_Msk (0x1ffu << US_RHR_RXCHR_Pos) /**< \brief (US_RHR) Received Character */
#define US_RHR_RXSYNH (0x1u << 15) /**< \brief (US_RHR) Received Sync */
/* -------- US_THR : (USART Offset: 0x001C) Transmit Holding Register -------- */
#define US_THR_TXCHR_Pos 0
#define US_THR_TXCHR_Msk (0x1ffu << US_THR_TXCHR_Pos) /**< \brief (US_THR) Character to be Transmitted */
#define US_THR_TXCHR(value) ((US_THR_TXCHR_Msk & ((value) << US_THR_TXCHR_Pos)))
#define US_THR_TXSYNH (0x1u << 15) /**< \brief (US_THR) Sync Field to be Transmitted */
/* -------- US_BRGR : (USART Offset: 0x0020) Baud Rate Generator Register -------- */
#define US_BRGR_CD_Pos 0
#define US_BRGR_CD_Msk (0xffffu << US_BRGR_CD_Pos) /**< \brief (US_BRGR) Clock Divider */
#define US_BRGR_CD(value) ((US_BRGR_CD_Msk & ((value) << US_BRGR_CD_Pos)))
#define US_BRGR_FP_Pos 16
#define US_BRGR_FP_Msk (0x7u << US_BRGR_FP_Pos) /**< \brief (US_BRGR) Fractional Part */
#define US_BRGR_FP(value) ((US_BRGR_FP_Msk & ((value) << US_BRGR_FP_Pos)))
/* -------- US_RTOR : (USART Offset: 0x0024) Receiver Time-out Register -------- */
#define US_RTOR_TO_Pos 0
#define US_RTOR_TO_Msk (0xffffu << US_RTOR_TO_Pos) /**< \brief (US_RTOR) Time-out Value */
#define US_RTOR_TO(value) ((US_RTOR_TO_Msk & ((value) << US_RTOR_TO_Pos)))
/* -------- US_TTGR : (USART Offset: 0x0028) Transmitter Timeguard Register -------- */
#define US_TTGR_TG_Pos 0
#define US_TTGR_TG_Msk (0xffu << US_TTGR_TG_Pos) /**< \brief (US_TTGR) Timeguard Value */
#define US_TTGR_TG(value) ((US_TTGR_TG_Msk & ((value) << US_TTGR_TG_Pos)))
/* -------- US_FIDI : (USART Offset: 0x0040) FI DI Ratio Register -------- */
#define US_FIDI_FI_DI_RATIO_Pos 0
#define US_FIDI_FI_DI_RATIO_Msk (0x7ffu << US_FIDI_FI_DI_RATIO_Pos) /**< \brief (US_FIDI) FI Over DI Ratio Value */
#define US_FIDI_FI_DI_RATIO(value) ((US_FIDI_FI_DI_RATIO_Msk & ((value) << US_FIDI_FI_DI_RATIO_Pos)))
/* -------- US_NER : (USART Offset: 0x0044) Number of Errors Register -------- */
#define US_NER_NB_ERRORS_Pos 0
#define US_NER_NB_ERRORS_Msk (0xffu << US_NER_NB_ERRORS_Pos) /**< \brief (US_NER) Number of Errors */
/* -------- US_IF : (USART Offset: 0x004C) IrDA Filter Register -------- */
#define US_IF_IRDA_FILTER_Pos 0
#define US_IF_IRDA_FILTER_Msk (0xffu << US_IF_IRDA_FILTER_Pos) /**< \brief (US_IF) IrDA Filter */
#define US_IF_IRDA_FILTER(value) ((US_IF_IRDA_FILTER_Msk & ((value) << US_IF_IRDA_FILTER_Pos)))
/* -------- US_WPMR : (USART Offset: 0x00E4) Write Protection Mode Register -------- */
#define US_WPMR_WPEN (0x1u << 0) /**< \brief (US_WPMR) Write Protection Enable */
#define US_WPMR_WPKEY_Pos 8
#define US_WPMR_WPKEY_Msk (0xffffffu << US_WPMR_WPKEY_Pos) /**< \brief (US_WPMR) Write Protection Key */
#define US_WPMR_WPKEY_PASSWD (0x555341u << 8) /**< \brief (US_WPMR) Writing any other value in this field aborts the write operation of the WPEN bit. Always reads as 0. */
/* -------- US_WPSR : (USART Offset: 0x00E8) Write Protection Status Register -------- */
#define US_WPSR_WPVS (0x1u << 0) /**< \brief (US_WPSR) Write Protection Violation Status */
#define US_WPSR_WPVSRC_Pos 8
#define US_WPSR_WPVSRC_Msk (0xffffu << US_WPSR_WPVSRC_Pos) /**< \brief (US_WPSR) Write Protection Violation Source */
/* -------- US_RPR : (USART Offset: 0x100) Receive Pointer Register -------- */
#define US_RPR_RXPTR_Pos 0
#define US_RPR_RXPTR_Msk (0xffffffffu << US_RPR_RXPTR_Pos) /**< \brief (US_RPR) Receive Pointer Register */
#define US_RPR_RXPTR(value) ((US_RPR_RXPTR_Msk & ((value) << US_RPR_RXPTR_Pos)))
/* -------- US_RCR : (USART Offset: 0x104) Receive Counter Register -------- */
#define US_RCR_RXCTR_Pos 0
#define US_RCR_RXCTR_Msk (0xffffu << US_RCR_RXCTR_Pos) /**< \brief (US_RCR) Receive Counter Register */
#define US_RCR_RXCTR(value) ((US_RCR_RXCTR_Msk & ((value) << US_RCR_RXCTR_Pos)))
/* -------- US_TPR : (USART Offset: 0x108) Transmit Pointer Register -------- */
#define US_TPR_TXPTR_Pos 0
#define US_TPR_TXPTR_Msk (0xffffffffu << US_TPR_TXPTR_Pos) /**< \brief (US_TPR) Transmit Counter Register */
#define US_TPR_TXPTR(value) ((US_TPR_TXPTR_Msk & ((value) << US_TPR_TXPTR_Pos)))
/* -------- US_TCR : (USART Offset: 0x10C) Transmit Counter Register -------- */
#define US_TCR_TXCTR_Pos 0
#define US_TCR_TXCTR_Msk (0xffffu << US_TCR_TXCTR_Pos) /**< \brief (US_TCR) Transmit Counter Register */
#define US_TCR_TXCTR(value) ((US_TCR_TXCTR_Msk & ((value) << US_TCR_TXCTR_Pos)))
/* -------- US_RNPR : (USART Offset: 0x110) Receive Next Pointer Register -------- */
#define US_RNPR_RXNPTR_Pos 0
#define US_RNPR_RXNPTR_Msk (0xffffffffu << US_RNPR_RXNPTR_Pos) /**< \brief (US_RNPR) Receive Next Pointer */
#define US_RNPR_RXNPTR(value) ((US_RNPR_RXNPTR_Msk & ((value) << US_RNPR_RXNPTR_Pos)))
/* -------- US_RNCR : (USART Offset: 0x114) Receive Next Counter Register -------- */
#define US_RNCR_RXNCTR_Pos 0
#define US_RNCR_RXNCTR_Msk (0xffffu << US_RNCR_RXNCTR_Pos) /**< \brief (US_RNCR) Receive Next Counter */
#define US_RNCR_RXNCTR(value) ((US_RNCR_RXNCTR_Msk & ((value) << US_RNCR_RXNCTR_Pos)))
/* -------- US_TNPR : (USART Offset: 0x118) Transmit Next Pointer Register -------- */
#define US_TNPR_TXNPTR_Pos 0
#define US_TNPR_TXNPTR_Msk (0xffffffffu << US_TNPR_TXNPTR_Pos) /**< \brief (US_TNPR) Transmit Next Pointer */
#define US_TNPR_TXNPTR(value) ((US_TNPR_TXNPTR_Msk & ((value) << US_TNPR_TXNPTR_Pos)))
/* -------- US_TNCR : (USART Offset: 0x11C) Transmit Next Counter Register -------- */
#define US_TNCR_TXNCTR_Pos 0
#define US_TNCR_TXNCTR_Msk (0xffffu << US_TNCR_TXNCTR_Pos) /**< \brief (US_TNCR) Transmit Counter Next */
#define US_TNCR_TXNCTR(value) ((US_TNCR_TXNCTR_Msk & ((value) << US_TNCR_TXNCTR_Pos)))
/* -------- US_PTCR : (USART Offset: 0x120) Transfer Control Register -------- */
#define US_PTCR_RXTEN (0x1u << 0) /**< \brief (US_PTCR) Receiver Transfer Enable */
#define US_PTCR_RXTDIS (0x1u << 1) /**< \brief (US_PTCR) Receiver Transfer Disable */
#define US_PTCR_TXTEN (0x1u << 8) /**< \brief (US_PTCR) Transmitter Transfer Enable */
#define US_PTCR_TXTDIS (0x1u << 9) /**< \brief (US_PTCR) Transmitter Transfer Disable */
#define US_PTCR_RXCBEN (0x1u << 16) /**< \brief (US_PTCR) Receiver Circular Buffer Enable */
#define US_PTCR_RXCBDIS (0x1u << 17) /**< \brief (US_PTCR) Receiver Circular Buffer Disable */
#define US_PTCR_TXCBEN (0x1u << 18) /**< \brief (US_PTCR) Transmitter Circular Buffer Enable */
#define US_PTCR_TXCBDIS (0x1u << 19) /**< \brief (US_PTCR) Transmitter Circular Buffer Disable */
#define US_PTCR_ERRCLR (0x1u << 24) /**< \brief (US_PTCR) Transfer Bus Error Clear */
/* -------- US_PTSR : (USART Offset: 0x124) Transfer Status Register -------- */
#define US_PTSR_RXTEN (0x1u << 0) /**< \brief (US_PTSR) Receiver Transfer Enable */
#define US_PTSR_TXTEN (0x1u << 8) /**< \brief (US_PTSR) Transmitter Transfer Enable */
#define US_PTSR_RXCBEN (0x1u << 16) /**< \brief (US_PTSR) Receiver Transfer Enable */
#define US_PTSR_TXCBEN (0x1u << 18) /**< \brief (US_PTSR) Transmitter Transfer Enable */
#define US_PTSR_ERR (0x1u << 24) /**< \brief (US_PTSR) Transfer Bus Error */
/*@}*/
#endif /* _SAMG54_USART_COMPONENT_ */
|
/**
* Copyright (c) 2015 - 2019, Nordic Semiconductor ASA
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form, except as embedded into a Nordic
* Semiconductor ASA integrated circuit in a product or a software update for
* such product, must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* 3. Neither the name of Nordic Semiconductor ASA nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* 4. This software, with or without modification, must only be used with a
* Nordic Semiconductor ASA integrated circuit.
*
* 5. Any software provided in binary form under this license must not be reverse
* engineered, decompiled, modified and/or disassembled.
*
* THIS SOFTWARE IS PROVIDED BY NORDIC SEMICONDUCTOR ASA "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NORDIC SEMICONDUCTOR ASA OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "sdk_common.h"
#if NRF_MODULE_ENABLED(APP_UART)
#include "app_uart.h"
#include "app_fifo.h"
#include "nrf_drv_uart.h"
#include "nrf_assert.h"
static nrf_drv_uart_t app_uart_inst = NRF_DRV_UART_INSTANCE(APP_UART_DRIVER_INSTANCE);
static __INLINE uint32_t fifo_length(app_fifo_t * const fifo)
{
uint32_t tmp = fifo->read_pos;
return fifo->write_pos - tmp;
}
#define FIFO_LENGTH(F) fifo_length(&F) /**< Macro to calculate length of a FIFO. */
static app_uart_event_handler_t m_event_handler; /**< Event handler function. */
static uint8_t tx_buffer[1];
static uint8_t rx_buffer[1];
static bool m_rx_ovf;
static app_fifo_t m_rx_fifo; /**< RX FIFO buffer for storing data received on the UART until the application fetches them using app_uart_get(). */
static app_fifo_t m_tx_fifo; /**< TX FIFO buffer for storing data to be transmitted on the UART when TXD is ready. Data is put to the buffer on using app_uart_put(). */
static void uart_event_handler(nrf_drv_uart_event_t * p_event, void* p_context)
{
app_uart_evt_t app_uart_event;
uint32_t err_code;
switch (p_event->type)
{
case NRF_DRV_UART_EVT_RX_DONE:
// If 0, then this is a RXTO event with no new bytes.
if(p_event->data.rxtx.bytes == 0)
{
// A new start RX is needed to continue to receive data
(void)nrf_drv_uart_rx(&app_uart_inst, rx_buffer, 1);
break;
}
// Write received byte to FIFO.
err_code = app_fifo_put(&m_rx_fifo, p_event->data.rxtx.p_data[0]);
if (err_code != NRF_SUCCESS)
{
app_uart_event.evt_type = APP_UART_FIFO_ERROR;
app_uart_event.data.error_code = err_code;
m_event_handler(&app_uart_event);
}
// Notify that there are data available.
else if (FIFO_LENGTH(m_rx_fifo) != 0)
{
app_uart_event.evt_type = APP_UART_DATA_READY;
m_event_handler(&app_uart_event);
}
// Start new RX if size in buffer.
if (FIFO_LENGTH(m_rx_fifo) <= m_rx_fifo.buf_size_mask)
{
(void)nrf_drv_uart_rx(&app_uart_inst, rx_buffer, 1);
}
else
{
// Overflow in RX FIFO.
m_rx_ovf = true;
}
break;
case NRF_DRV_UART_EVT_ERROR:
app_uart_event.evt_type = APP_UART_COMMUNICATION_ERROR;
app_uart_event.data.error_communication = p_event->data.error.error_mask;
(void)nrf_drv_uart_rx(&app_uart_inst, rx_buffer, 1);
m_event_handler(&app_uart_event);
break;
case NRF_DRV_UART_EVT_TX_DONE:
// Get next byte from FIFO.
if (app_fifo_get(&m_tx_fifo, tx_buffer) == NRF_SUCCESS)
{
(void)nrf_drv_uart_tx(&app_uart_inst, tx_buffer, 1);
}
else
{
// Last byte from FIFO transmitted, notify the application.
app_uart_event.evt_type = APP_UART_TX_EMPTY;
m_event_handler(&app_uart_event);
}
break;
default:
break;
}
}
uint32_t app_uart_init(const app_uart_comm_params_t * p_comm_params,
app_uart_buffers_t * p_buffers,
app_uart_event_handler_t event_handler,
app_irq_priority_t irq_priority)
{
uint32_t err_code;
m_event_handler = event_handler;
if (p_buffers == NULL)
{
return NRF_ERROR_INVALID_PARAM;
}
// Configure buffer RX buffer.
err_code = app_fifo_init(&m_rx_fifo, p_buffers->rx_buf, p_buffers->rx_buf_size);
VERIFY_SUCCESS(err_code);
// Configure buffer TX buffer.
err_code = app_fifo_init(&m_tx_fifo, p_buffers->tx_buf, p_buffers->tx_buf_size);
VERIFY_SUCCESS(err_code);
nrf_drv_uart_config_t config = NRF_DRV_UART_DEFAULT_CONFIG;
config.baudrate = (nrf_uart_baudrate_t)p_comm_params->baud_rate;
config.hwfc = (p_comm_params->flow_control == APP_UART_FLOW_CONTROL_DISABLED) ?
NRF_UART_HWFC_DISABLED : NRF_UART_HWFC_ENABLED;
config.interrupt_priority = irq_priority;
config.parity = p_comm_params->use_parity ? NRF_UART_PARITY_INCLUDED : NRF_UART_PARITY_EXCLUDED;
config.pselcts = p_comm_params->cts_pin_no;
config.pselrts = p_comm_params->rts_pin_no;
config.pselrxd = p_comm_params->rx_pin_no;
config.pseltxd = p_comm_params->tx_pin_no;
err_code = nrf_drv_uart_init(&app_uart_inst, &config, uart_event_handler);
VERIFY_SUCCESS(err_code);
m_rx_ovf = false;
// Turn on receiver if RX pin is connected
if (p_comm_params->rx_pin_no != UART_PIN_DISCONNECTED)
{
return nrf_drv_uart_rx(&app_uart_inst, rx_buffer,1);
}
else
{
return NRF_SUCCESS;
}
}
uint32_t app_uart_flush(void)
{
uint32_t err_code;
err_code = app_fifo_flush(&m_rx_fifo);
VERIFY_SUCCESS(err_code);
err_code = app_fifo_flush(&m_tx_fifo);
VERIFY_SUCCESS(err_code);
return NRF_SUCCESS;
}
uint32_t app_uart_get(uint8_t * p_byte)
{
ASSERT(p_byte);
bool rx_ovf = m_rx_ovf;
ret_code_t err_code = app_fifo_get(&m_rx_fifo, p_byte);
// If FIFO was full new request to receive one byte was not scheduled. Must be done here.
if (rx_ovf)
{
m_rx_ovf = false;
uint32_t uart_err_code = nrf_drv_uart_rx(&app_uart_inst, rx_buffer, 1);
// RX resume should never fail.
APP_ERROR_CHECK(uart_err_code);
}
return err_code;
}
uint32_t app_uart_put(uint8_t byte)
{
uint32_t err_code;
err_code = app_fifo_put(&m_tx_fifo, byte);
if (err_code == NRF_SUCCESS)
{
// The new byte has been added to FIFO. It will be picked up from there
// (in 'uart_event_handler') when all preceding bytes are transmitted.
// But if UART is not transmitting anything at the moment, we must start
// a new transmission here.
if (!nrf_drv_uart_tx_in_progress(&app_uart_inst))
{
// This operation should be almost always successful, since we've
// just added a byte to FIFO, but if some bigger delay occurred
// (some heavy interrupt handler routine has been executed) since
// that time, FIFO might be empty already.
if (app_fifo_get(&m_tx_fifo, tx_buffer) == NRF_SUCCESS)
{
err_code = nrf_drv_uart_tx(&app_uart_inst, tx_buffer, 1);
}
}
}
return err_code;
}
uint32_t app_uart_close(void)
{
nrf_drv_uart_uninit(&app_uart_inst);
return NRF_SUCCESS;
}
#endif //NRF_MODULE_ENABLED(APP_UART)
|
/**
* @license AngularJS v1.2.10
* (c) 2010-2014 Google, Inc. http://angularjs.org
* License: MIT
*/
(function(window, angular, undefined) {'use strict';
/**
* @ngdoc overview
* @name ngRoute
* @description
*
* # ngRoute
*
* The `ngRoute` module provides routing and deeplinking services and directives for angular apps.
*
* ## Example
* See {@link ngRoute.$route#example $route} for an example of configuring and using `ngRoute`.
*
* {@installModule route}
*
* <div doc-module-components="ngRoute"></div>
*/
/* global -ngRouteModule */
var ngRouteModule = angular.module('ngRoute', ['ng']).
provider('$route', $RouteProvider);
/**
* @ngdoc object
* @name ngRoute.$routeProvider
* @function
*
* @description
*
* Used for configuring routes.
*
* ## Example
* See {@link ngRoute.$route#example $route} for an example of configuring and using `ngRoute`.
*
* ## Dependencies
* Requires the {@link ngRoute `ngRoute`} module to be installed.
*/
function $RouteProvider(){
function inherit(parent, extra) {
return angular.extend(new (angular.extend(function() {}, {prototype:parent}))(), extra);
}
var routes = {};
/**
* @ngdoc method
* @name ngRoute.$routeProvider#when
* @methodOf ngRoute.$routeProvider
*
* @param {string} path Route path (matched against `$location.path`). If `$location.path`
* contains redundant trailing slash or is missing one, the route will still match and the
* `$location.path` will be updated to add or drop the trailing slash to exactly match the
* route definition.
*
* * `path` can contain named groups starting with a colon: e.g. `:name`. All characters up
* to the next slash are matched and stored in `$routeParams` under the given `name`
* when the route matches.
* * `path` can contain named groups starting with a colon and ending with a star:
* e.g.`:name*`. All characters are eagerly stored in `$routeParams` under the given `name`
* when the route matches.
* * `path` can contain optional named groups with a question mark: e.g.`:name?`.
*
* For example, routes like `/color/:color/largecode/:largecode*\/edit` will match
* `/color/brown/largecode/code/with/slashs/edit` and extract:
*
* * `color: brown`
* * `largecode: code/with/slashs`.
*
*
* @param {Object} route Mapping information to be assigned to `$route.current` on route
* match.
*
* Object properties:
*
* - `controller` – `{(string|function()=}` – Controller fn that should be associated with
* newly created scope or the name of a {@link angular.Module#controller registered
* controller} if passed as a string.
* - `controllerAs` – `{string=}` – A controller alias name. If present the controller will be
* published to scope under the `controllerAs` name.
* - `template` – `{string=|function()=}` – html template as a string or a function that
* returns an html template as a string which should be used by {@link
* ngRoute.directive:ngView ngView} or {@link ng.directive:ngInclude ngInclude} directives.
* This property takes precedence over `templateUrl`.
*
* If `template` is a function, it will be called with the following parameters:
*
* - `{Array.<Object>}` - route parameters extracted from the current
* `$location.path()` by applying the current route
*
* - `templateUrl` – `{string=|function()=}` – path or function that returns a path to an html
* template that should be used by {@link ngRoute.directive:ngView ngView}.
*
* If `templateUrl` is a function, it will be called with the following parameters:
*
* - `{Array.<Object>}` - route parameters extracted from the current
* `$location.path()` by applying the current route
*
* - `resolve` - `{Object.<string, function>=}` - An optional map of dependencies which should
* be injected into the controller. If any of these dependencies are promises, the router
* will wait for them all to be resolved or one to be rejected before the controller is
* instantiated.
* If all the promises are resolved successfully, the values of the resolved promises are
* injected and {@link ngRoute.$route#$routeChangeSuccess $routeChangeSuccess} event is
* fired. If any of the promises are rejected the
* {@link ngRoute.$route#$routeChangeError $routeChangeError} event is fired. The map object
* is:
*
* - `key` – `{string}`: a name of a dependency to be injected into the controller.
* - `factory` - `{string|function}`: If `string` then it is an alias for a service.
* Otherwise if function, then it is {@link api/AUTO.$injector#invoke injected}
* and the return value is treated as the dependency. If the result is a promise, it is
* resolved before its value is injected into the controller. Be aware that
* `ngRoute.$routeParams` will still refer to the previous route within these resolve
* functions. Use `$route.current.params` to access the new route parameters, instead.
*
* - `redirectTo` – {(string|function())=} – value to update
* {@link ng.$location $location} path with and trigger route redirection.
*
* If `redirectTo` is a function, it will be called with the following parameters:
*
* - `{Object.<string>}` - route parameters extracted from the current
* `$location.path()` by applying the current route templateUrl.
* - `{string}` - current `$location.path()`
* - `{Object}` - current `$location.search()`
*
* The custom `redirectTo` function is expected to return a string which will be used
* to update `$location.path()` and `$location.search()`.
*
* - `[reloadOnSearch=true]` - {boolean=} - reload route when only `$location.search()`
* or `$location.hash()` changes.
*
* If the option is set to `false` and url in the browser changes, then
* `$routeUpdate` event is broadcasted on the root scope.
*
* - `[caseInsensitiveMatch=false]` - {boolean=} - match routes without being case sensitive
*
* If the option is set to `true`, then the particular route can be matched without being
* case sensitive
*
* @returns {Object} self
*
* @description
* Adds a new route definition to the `$route` service.
*/
this.when = function(path, route) {
routes[path] = angular.extend(
{reloadOnSearch: true},
route,
path && pathRegExp(path, route)
);
// create redirection for trailing slashes
if (path) {
var redirectPath = (path[path.length-1] == '/')
? path.substr(0, path.length-1)
: path +'/';
routes[redirectPath] = angular.extend(
{redirectTo: path},
pathRegExp(redirectPath, route)
);
}
return this;
};
/**
* @param path {string} path
* @param opts {Object} options
* @return {?Object}
*
* @description
* Normalizes the given path, returning a regular expression
* and the original path.
*
* Inspired by pathRexp in visionmedia/express/lib/utils.js.
*/
function pathRegExp(path, opts) {
var insensitive = opts.caseInsensitiveMatch,
ret = {
originalPath: path,
regexp: path
},
keys = ret.keys = [];
path = path
.replace(/([().])/g, '\\$1')
.replace(/(\/)?:(\w+)([\?\*])?/g, function(_, slash, key, option){
var optional = option === '?' ? option : null;
var star = option === '*' ? option : null;
keys.push({ name: key, optional: !!optional });
slash = slash || '';
return ''
+ (optional ? '' : slash)
+ '(?:'
+ (optional ? slash : '')
+ (star && '(.+?)' || '([^/]+)')
+ (optional || '')
+ ')'
+ (optional || '');
})
.replace(/([\/$\*])/g, '\\$1');
ret.regexp = new RegExp('^' + path + '$', insensitive ? 'i' : '');
return ret;
}
/**
* @ngdoc method
* @name ngRoute.$routeProvider#otherwise
* @methodOf ngRoute.$routeProvider
*
* @description
* Sets route definition that will be used on route change when no other route definition
* is matched.
*
* @param {Object} params Mapping information to be assigned to `$route.current`.
* @returns {Object} self
*/
this.otherwise = function(params) {
this.when(null, params);
return this;
};
this.$get = ['$rootScope',
'$location',
'$routeParams',
'$q',
'$injector',
'$http',
'$templateCache',
'$sce',
function($rootScope, $location, $routeParams, $q, $injector, $http, $templateCache, $sce) {
/**
* @ngdoc object
* @name ngRoute.$route
* @requires $location
* @requires $routeParams
*
* @property {Object} current Reference to the current route definition.
* The route definition contains:
*
* - `controller`: The controller constructor as define in route definition.
* - `locals`: A map of locals which is used by {@link ng.$controller $controller} service for
* controller instantiation. The `locals` contain
* the resolved values of the `resolve` map. Additionally the `locals` also contain:
*
* - `$scope` - The current route scope.
* - `$template` - The current route template HTML.
*
* @property {Array.<Object>} routes Array of all configured routes.
*
* @description
* `$route` is used for deep-linking URLs to controllers and views (HTML partials).
* It watches `$location.url()` and tries to map the path to an existing route definition.
*
* Requires the {@link ngRoute `ngRoute`} module to be installed.
*
* You can define routes through {@link ngRoute.$routeProvider $routeProvider}'s API.
*
* The `$route` service is typically used in conjunction with the
* {@link ngRoute.directive:ngView `ngView`} directive and the
* {@link ngRoute.$routeParams `$routeParams`} service.
*
* @example
This example shows how changing the URL hash causes the `$route` to match a route against the
URL, and the `ngView` pulls in the partial.
Note that this example is using {@link ng.directive:script inlined templates}
to get it working on jsfiddle as well.
<example module="ngViewExample" deps="angular-route.js">
<file name="index.html">
<div ng-controller="MainCntl">
Choose:
<a href="Book/Moby">Moby</a> |
<a href="Book/Moby/ch/1">Moby: Ch1</a> |
<a href="Book/Gatsby">Gatsby</a> |
<a href="Book/Gatsby/ch/4?key=value">Gatsby: Ch4</a> |
<a href="Book/Scarlet">Scarlet Letter</a><br/>
<div ng-view></div>
<hr />
<pre>$location.path() = {{$location.path()}}</pre>
<pre>$route.current.templateUrl = {{$route.current.templateUrl}}</pre>
<pre>$route.current.params = {{$route.current.params}}</pre>
<pre>$route.current.scope.name = {{$route.current.scope.name}}</pre>
<pre>$routeParams = {{$routeParams}}</pre>
</div>
</file>
<file name="book.html">
controller: {{name}}<br />
Book Id: {{params.bookId}}<br />
</file>
<file name="chapter.html">
controller: {{name}}<br />
Book Id: {{params.bookId}}<br />
Chapter Id: {{params.chapterId}}
</file>
<file name="script.js">
angular.module('ngViewExample', ['ngRoute'])
.config(function($routeProvider, $locationProvider) {
$routeProvider.when('/Book/:bookId', {
templateUrl: 'book.html',
controller: BookCntl,
resolve: {
// I will cause a 1 second delay
delay: function($q, $timeout) {
var delay = $q.defer();
$timeout(delay.resolve, 1000);
return delay.promise;
}
}
});
$routeProvider.when('/Book/:bookId/ch/:chapterId', {
templateUrl: 'chapter.html',
controller: ChapterCntl
});
// configure html5 to get links working on jsfiddle
$locationProvider.html5Mode(true);
});
function MainCntl($scope, $route, $routeParams, $location) {
$scope.$route = $route;
$scope.$location = $location;
$scope.$routeParams = $routeParams;
}
function BookCntl($scope, $routeParams) {
$scope.name = "BookCntl";
$scope.params = $routeParams;
}
function ChapterCntl($scope, $routeParams) {
$scope.name = "ChapterCntl";
$scope.params = $routeParams;
}
</file>
<file name="scenario.js">
it('should load and compile correct template', function() {
element('a:contains("Moby: Ch1")').click();
var content = element('.doc-example-live [ng-view]').text();
expect(content).toMatch(/controller\: ChapterCntl/);
expect(content).toMatch(/Book Id\: Moby/);
expect(content).toMatch(/Chapter Id\: 1/);
element('a:contains("Scarlet")').click();
sleep(2); // promises are not part of scenario waiting
content = element('.doc-example-live [ng-view]').text();
expect(content).toMatch(/controller\: BookCntl/);
expect(content).toMatch(/Book Id\: Scarlet/);
});
</file>
</example>
*/
/**
* @ngdoc event
* @name ngRoute.$route#$routeChangeStart
* @eventOf ngRoute.$route
* @eventType broadcast on root scope
* @description
* Broadcasted before a route change. At this point the route services starts
* resolving all of the dependencies needed for the route change to occur.
* Typically this involves fetching the view template as well as any dependencies
* defined in `resolve` route property. Once all of the dependencies are resolved
* `$routeChangeSuccess` is fired.
*
* @param {Object} angularEvent Synthetic event object.
* @param {Route} next Future route information.
* @param {Route} current Current route information.
*/
/**
* @ngdoc event
* @name ngRoute.$route#$routeChangeSuccess
* @eventOf ngRoute.$route
* @eventType broadcast on root scope
* @description
* Broadcasted after a route dependencies are resolved.
* {@link ngRoute.directive:ngView ngView} listens for the directive
* to instantiate the controller and render the view.
*
* @param {Object} angularEvent Synthetic event object.
* @param {Route} current Current route information.
* @param {Route|Undefined} previous Previous route information, or undefined if current is
* first route entered.
*/
/**
* @ngdoc event
* @name ngRoute.$route#$routeChangeError
* @eventOf ngRoute.$route
* @eventType broadcast on root scope
* @description
* Broadcasted if any of the resolve promises are rejected.
*
* @param {Object} angularEvent Synthetic event object
* @param {Route} current Current route information.
* @param {Route} previous Previous route information.
* @param {Route} rejection Rejection of the promise. Usually the error of the failed promise.
*/
/**
* @ngdoc event
* @name ngRoute.$route#$routeUpdate
* @eventOf ngRoute.$route
* @eventType broadcast on root scope
* @description
*
* The `reloadOnSearch` property has been set to false, and we are reusing the same
* instance of the Controller.
*/
var forceReload = false,
$route = {
routes: routes,
/**
* @ngdoc method
* @name ngRoute.$route#reload
* @methodOf ngRoute.$route
*
* @description
* Causes `$route` service to reload the current route even if
* {@link ng.$location $location} hasn't changed.
*
* As a result of that, {@link ngRoute.directive:ngView ngView}
* creates new scope, reinstantiates the controller.
*/
reload: function() {
forceReload = true;
$rootScope.$evalAsync(updateRoute);
}
};
$rootScope.$on('$locationChangeSuccess', updateRoute);
return $route;
/////////////////////////////////////////////////////
/**
* @param on {string} current url
* @param route {Object} route regexp to match the url against
* @return {?Object}
*
* @description
* Check if the route matches the current url.
*
* Inspired by match in
* visionmedia/express/lib/router/router.js.
*/
function switchRouteMatcher(on, route) {
var keys = route.keys,
params = {};
if (!route.regexp) return null;
var m = route.regexp.exec(on);
if (!m) return null;
for (var i = 1, len = m.length; i < len; ++i) {
var key = keys[i - 1];
var val = 'string' == typeof m[i]
? decodeURIComponent(m[i])
: m[i];
if (key && val) {
params[key.name] = val;
}
}
return params;
}
function updateRoute() {
var next = parseRoute(),
last = $route.current;
if (next && last && next.$$route === last.$$route
&& angular.equals(next.pathParams, last.pathParams)
&& !next.reloadOnSearch && !forceReload) {
last.params = next.params;
angular.copy(last.params, $routeParams);
$rootScope.$broadcast('$routeUpdate', last);
} else if (next || last) {
forceReload = false;
$rootScope.$broadcast('$routeChangeStart', next, last);
$route.current = next;
if (next) {
if (next.redirectTo) {
if (angular.isString(next.redirectTo)) {
$location.path(interpolate(next.redirectTo, next.params)).search(next.params)
.replace();
} else {
$location.url(next.redirectTo(next.pathParams, $location.path(), $location.search()))
.replace();
}
}
}
$q.when(next).
then(function() {
if (next) {
var locals = angular.extend({}, next.resolve),
template, templateUrl;
angular.forEach(locals, function(value, key) {
locals[key] = angular.isString(value) ?
$injector.get(value) : $injector.invoke(value);
});
if (angular.isDefined(template = next.template)) {
if (angular.isFunction(template)) {
template = template(next.params);
}
} else if (angular.isDefined(templateUrl = next.templateUrl)) {
if (angular.isFunction(templateUrl)) {
templateUrl = templateUrl(next.params);
}
templateUrl = $sce.getTrustedResourceUrl(templateUrl);
if (angular.isDefined(templateUrl)) {
next.loadedTemplateUrl = templateUrl;
template = $http.get(templateUrl, {cache: $templateCache}).
then(function(response) { return response.data; });
}
}
if (angular.isDefined(template)) {
locals['$template'] = template;
}
return $q.all(locals);
}
}).
// after route change
then(function(locals) {
if (next == $route.current) {
if (next) {
next.locals = locals;
angular.copy(next.params, $routeParams);
}
$rootScope.$broadcast('$routeChangeSuccess', next, last);
}
}, function(error) {
if (next == $route.current) {
$rootScope.$broadcast('$routeChangeError', next, last, error);
}
});
}
}
/**
* @returns the current active route, by matching it against the URL
*/
function parseRoute() {
// Match a route
var params, match;
angular.forEach(routes, function(route, path) {
if (!match && (params = switchRouteMatcher($location.path(), route))) {
match = inherit(route, {
params: angular.extend({}, $location.search(), params),
pathParams: params});
match.$$route = route;
}
});
// No route matched; fallback to "otherwise" route
return match || routes[null] && inherit(routes[null], {params: {}, pathParams:{}});
}
/**
* @returns interpolation of the redirect path with the parameters
*/
function interpolate(string, params) {
var result = [];
angular.forEach((string||'').split(':'), function(segment, i) {
if (i === 0) {
result.push(segment);
} else {
var segmentMatch = segment.match(/(\w+)(.*)/);
var key = segmentMatch[1];
result.push(params[key]);
result.push(segmentMatch[2] || '');
delete params[key];
}
});
return result.join('');
}
}];
}
ngRouteModule.provider('$routeParams', $RouteParamsProvider);
/**
* @ngdoc object
* @name ngRoute.$routeParams
* @requires $route
*
* @description
* The `$routeParams` service allows you to retrieve the current set of route parameters.
*
* Requires the {@link ngRoute `ngRoute`} module to be installed.
*
* The route parameters are a combination of {@link ng.$location `$location`}'s
* {@link ng.$location#methods_search `search()`} and {@link ng.$location#methods_path `path()`}.
* The `path` parameters are extracted when the {@link ngRoute.$route `$route`} path is matched.
*
* In case of parameter name collision, `path` params take precedence over `search` params.
*
* The service guarantees that the identity of the `$routeParams` object will remain unchanged
* (but its properties will likely change) even when a route change occurs.
*
* Note that the `$routeParams` are only updated *after* a route change completes successfully.
* This means that you cannot rely on `$routeParams` being correct in route resolve functions.
* Instead you can use `$route.current.params` to access the new route's parameters.
*
* @example
* <pre>
* // Given:
* // URL: http://server.com/index.html#/Chapter/1/Section/2?search=moby
* // Route: /Chapter/:chapterId/Section/:sectionId
* //
* // Then
* $routeParams ==> {chapterId:1, sectionId:2, search:'moby'}
* </pre>
*/
function $RouteParamsProvider() {
this.$get = function() { return {}; };
}
ngRouteModule.directive('ngView', ngViewFactory);
ngRouteModule.directive('ngView', ngViewFillContentFactory);
/**
* @ngdoc directive
* @name ngRoute.directive:ngView
* @restrict ECA
*
* @description
* # Overview
* `ngView` is a directive that complements the {@link ngRoute.$route $route} service by
* including the rendered template of the current route into the main layout (`index.html`) file.
* Every time the current route changes, the included view changes with it according to the
* configuration of the `$route` service.
*
* Requires the {@link ngRoute `ngRoute`} module to be installed.
*
* @animations
* enter - animation is used to bring new content into the browser.
* leave - animation is used to animate existing content away.
*
* The enter and leave animation occur concurrently.
*
* @scope
* @priority 400
* @param {string=} onload Expression to evaluate whenever the view updates.
*
* @param {string=} autoscroll Whether `ngView` should call {@link ng.$anchorScroll
* $anchorScroll} to scroll the viewport after the view is updated.
*
* - If the attribute is not set, disable scrolling.
* - If the attribute is set without value, enable scrolling.
* - Otherwise enable scrolling only if the `autoscroll` attribute value evaluated
* as an expression yields a truthy value.
* @example
<example module="ngViewExample" deps="angular-route.js" animations="true">
<file name="index.html">
<div ng-controller="MainCntl as main">
Choose:
<a href="Book/Moby">Moby</a> |
<a href="Book/Moby/ch/1">Moby: Ch1</a> |
<a href="Book/Gatsby">Gatsby</a> |
<a href="Book/Gatsby/ch/4?key=value">Gatsby: Ch4</a> |
<a href="Book/Scarlet">Scarlet Letter</a><br/>
<div class="view-animate-container">
<div ng-view class="view-animate"></div>
</div>
<hr />
<pre>$location.path() = {{main.$location.path()}}</pre>
<pre>$route.current.templateUrl = {{main.$route.current.templateUrl}}</pre>
<pre>$route.current.params = {{main.$route.current.params}}</pre>
<pre>$route.current.scope.name = {{main.$route.current.scope.name}}</pre>
<pre>$routeParams = {{main.$routeParams}}</pre>
</div>
</file>
<file name="book.html">
<div>
controller: {{book.name}}<br />
Book Id: {{book.params.bookId}}<br />
</div>
</file>
<file name="chapter.html">
<div>
controller: {{chapter.name}}<br />
Book Id: {{chapter.params.bookId}}<br />
Chapter Id: {{chapter.params.chapterId}}
</div>
</file>
<file name="animations.css">
.view-animate-container {
position:relative;
height:100px!important;
position:relative;
background:white;
border:1px solid black;
height:40px;
overflow:hidden;
}
.view-animate {
padding:10px;
}
.view-animate.ng-enter, .view-animate.ng-leave {
-webkit-transition:all cubic-bezier(0.250, 0.460, 0.450, 0.940) 1.5s;
transition:all cubic-bezier(0.250, 0.460, 0.450, 0.940) 1.5s;
display:block;
width:100%;
border-left:1px solid black;
position:absolute;
top:0;
left:0;
right:0;
bottom:0;
padding:10px;
}
.view-animate.ng-enter {
left:100%;
}
.view-animate.ng-enter.ng-enter-active {
left:0;
}
.view-animate.ng-leave.ng-leave-active {
left:-100%;
}
</file>
<file name="script.js">
angular.module('ngViewExample', ['ngRoute', 'ngAnimate'],
function($routeProvider, $locationProvider) {
$routeProvider.when('/Book/:bookId', {
templateUrl: 'book.html',
controller: BookCntl,
controllerAs: 'book'
});
$routeProvider.when('/Book/:bookId/ch/:chapterId', {
templateUrl: 'chapter.html',
controller: ChapterCntl,
controllerAs: 'chapter'
});
// configure html5 to get links working on jsfiddle
$locationProvider.html5Mode(true);
});
function MainCntl($route, $routeParams, $location) {
this.$route = $route;
this.$location = $location;
this.$routeParams = $routeParams;
}
function BookCntl($routeParams) {
this.name = "BookCntl";
this.params = $routeParams;
}
function ChapterCntl($routeParams) {
this.name = "ChapterCntl";
this.params = $routeParams;
}
</file>
<file name="scenario.js">
it('should load and compile correct template', function() {
element('a:contains("Moby: Ch1")').click();
var content = element('.doc-example-live [ng-view]').text();
expect(content).toMatch(/controller\: ChapterCntl/);
expect(content).toMatch(/Book Id\: Moby/);
expect(content).toMatch(/Chapter Id\: 1/);
element('a:contains("Scarlet")').click();
content = element('.doc-example-live [ng-view]').text();
expect(content).toMatch(/controller\: BookCntl/);
expect(content).toMatch(/Book Id\: Scarlet/);
});
</file>
</example>
*/
/**
* @ngdoc event
* @name ngRoute.directive:ngView#$viewContentLoaded
* @eventOf ngRoute.directive:ngView
* @eventType emit on the current ngView scope
* @description
* Emitted every time the ngView content is reloaded.
*/
ngViewFactory.$inject = ['$route', '$anchorScroll', '$animate'];
function ngViewFactory( $route, $anchorScroll, $animate) {
return {
restrict: 'ECA',
terminal: true,
priority: 400,
transclude: 'element',
link: function(scope, $element, attr, ctrl, $transclude) {
var currentScope,
currentElement,
autoScrollExp = attr.autoscroll,
onloadExp = attr.onload || '';
scope.$on('$routeChangeSuccess', update);
update();
function cleanupLastView() {
if (currentScope) {
currentScope.$destroy();
currentScope = null;
}
if(currentElement) {
$animate.leave(currentElement);
currentElement = null;
}
}
function update() {
var locals = $route.current && $route.current.locals,
template = locals && locals.$template;
if (angular.isDefined(template)) {
var newScope = scope.$new();
var current = $route.current;
// Note: This will also link all children of ng-view that were contained in the original
// html. If that content contains controllers, ... they could pollute/change the scope.
// However, using ng-view on an element with additional content does not make sense...
// Note: We can't remove them in the cloneAttchFn of $transclude as that
// function is called before linking the content, which would apply child
// directives to non existing elements.
var clone = $transclude(newScope, function(clone) {
$animate.enter(clone, null, currentElement || $element, function onNgViewEnter () {
if (angular.isDefined(autoScrollExp)
&& (!autoScrollExp || scope.$eval(autoScrollExp))) {
$anchorScroll();
}
});
cleanupLastView();
});
currentElement = clone;
currentScope = current.scope = newScope;
currentScope.$emit('$viewContentLoaded');
currentScope.$eval(onloadExp);
} else {
cleanupLastView();
}
}
}
};
}
// This directive is called during the $transclude call of the first `ngView` directive.
// It will replace and compile the content of the element with the loaded template.
// We need this directive so that the element content is already filled when
// the link function of another directive on the same element as ngView
// is called.
ngViewFillContentFactory.$inject = ['$compile', '$controller', '$route'];
function ngViewFillContentFactory($compile, $controller, $route) {
return {
restrict: 'ECA',
priority: -400,
link: function(scope, $element) {
var current = $route.current,
locals = current.locals;
$element.html(locals.$template);
var link = $compile($element.contents());
if (current.controller) {
locals.$scope = scope;
var controller = $controller(current.controller, locals);
if (current.controllerAs) {
scope[current.controllerAs] = controller;
}
$element.data('$ngControllerController', controller);
$element.children().data('$ngControllerController', controller);
}
link(scope);
}
};
}
})(window, window.angular);
|
/*!
* OpenUI5
* (c) Copyright 2009-2020 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
sap.ui.define(["sap/ui/base/ManagedObject","sap/ui/dom/units/Rem","sap/base/Log"],function(e,t,n){"use strict";function i(e){if(e===null||e===undefined){return e}if(e===0||e==="0"){return 0}var i=e.match(/^(\d+(\.\d+)?)(px|rem)$/),o;if(i){if(i[3]==="px"){o=parseFloat(i[1])}else{o=t.toPx(parseFloat(i[1]))}}else{n.error("Css size '"+e+"' is not supported for some features in GridContainer. Only 'px' and 'rem' are supported.");o=NaN}return Math.ceil(o)}var o=e.extend("sap.f.GridContainerSettings",{metadata:{library:"sap.f",properties:{columns:{type:"int"},columnSize:{type:"sap.ui.core.CSSSize",defaultValue:"80px"},minColumnSize:{type:"sap.ui.core.CSSSize"},maxColumnSize:{type:"sap.ui.core.CSSSize"},rowSize:{type:"sap.ui.core.CSSSize",defaultValue:"80px"},gap:{type:"sap.ui.core.CSSSize",defaultValue:"16px"}}}});o.prototype.getColumnSizeInPx=function(){return i(this.getColumnSize())};o.prototype.getMinColumnSizeInPx=function(){return i(this.getMinColumnSize())};o.prototype.getRowSizeInPx=function(){return i(this.getRowSize())};o.prototype.getGapInPx=function(){return i(this.getGap())};o.prototype.getComputedColumnsCount=function(e){if(this.getColumns()){return this.getColumns()}var t=this.getGapInPx(),n=this.getColumnSizeInPx();return Math.floor((e+t)/(n+t))};o.prototype.calculateRowsForItem=function(e){var t=this.getGapInPx(),n=this.getRowSizeInPx();return Math.ceil((e+t)/(n+t))};o.prototype.calculateColumnsForItem=function(e){var t=this.getGapInPx(),n=this.getColumnSizeInPx();return Math.ceil((e+t)/(n+t))};return o});
|
"""SQL io tests
The SQL tests are broken down in different classes:
- `PandasSQLTest`: base class with common methods for all test classes
- Tests for the public API (only tests with sqlite3)
- `_TestSQLApi` base class
- `TestSQLApi`: test the public API with sqlalchemy engine
- `TestSQLiteFallbackApi`: test the public API with a sqlite DBAPI
connection
- Tests for the different SQL flavors (flavor specific type conversions)
- Tests for the sqlalchemy mode: `_TestSQLAlchemy` is the base class with
common methods, `_TestSQLAlchemyConn` tests the API with a SQLAlchemy
Connection object. The different tested flavors (sqlite3, MySQL,
PostgreSQL) derive from the base class
- Tests for the fallback mode (`TestSQLiteFallback`)
"""
import csv
from datetime import date, datetime, time
from io import StringIO
import sqlite3
import warnings
import numpy as np
import pytest
from pandas.core.dtypes.common import is_datetime64_dtype, is_datetime64tz_dtype
import pandas as pd
from pandas import (
DataFrame,
Index,
MultiIndex,
Series,
Timestamp,
concat,
date_range,
isna,
to_datetime,
to_timedelta,
)
import pandas._testing as tm
import pandas.io.sql as sql
from pandas.io.sql import read_sql_query, read_sql_table
try:
import sqlalchemy
import sqlalchemy.schema
import sqlalchemy.sql.sqltypes as sqltypes
from sqlalchemy.ext import declarative
from sqlalchemy.orm import session as sa_session
SQLALCHEMY_INSTALLED = True
except ImportError:
SQLALCHEMY_INSTALLED = False
SQL_STRINGS = {
"create_iris": {
"sqlite": """CREATE TABLE iris (
"SepalLength" REAL,
"SepalWidth" REAL,
"PetalLength" REAL,
"PetalWidth" REAL,
"Name" TEXT
)""",
"mysql": """CREATE TABLE iris (
`SepalLength` DOUBLE,
`SepalWidth` DOUBLE,
`PetalLength` DOUBLE,
`PetalWidth` DOUBLE,
`Name` VARCHAR(200)
)""",
"postgresql": """CREATE TABLE iris (
"SepalLength" DOUBLE PRECISION,
"SepalWidth" DOUBLE PRECISION,
"PetalLength" DOUBLE PRECISION,
"PetalWidth" DOUBLE PRECISION,
"Name" VARCHAR(200)
)""",
},
"insert_iris": {
"sqlite": """INSERT INTO iris VALUES(?, ?, ?, ?, ?)""",
"mysql": """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");""",
"postgresql": """INSERT INTO iris VALUES(%s, %s, %s, %s, %s);""",
},
"create_test_types": {
"sqlite": """CREATE TABLE types_test_data (
"TextCol" TEXT,
"DateCol" TEXT,
"IntDateCol" INTEGER,
"IntDateOnlyCol" INTEGER,
"FloatCol" REAL,
"IntCol" INTEGER,
"BoolCol" INTEGER,
"IntColWithNull" INTEGER,
"BoolColWithNull" INTEGER
)""",
"mysql": """CREATE TABLE types_test_data (
`TextCol` TEXT,
`DateCol` DATETIME,
`IntDateCol` INTEGER,
`IntDateOnlyCol` INTEGER,
`FloatCol` DOUBLE,
`IntCol` INTEGER,
`BoolCol` BOOLEAN,
`IntColWithNull` INTEGER,
`BoolColWithNull` BOOLEAN
)""",
"postgresql": """CREATE TABLE types_test_data (
"TextCol" TEXT,
"DateCol" TIMESTAMP,
"DateColWithTz" TIMESTAMP WITH TIME ZONE,
"IntDateCol" INTEGER,
"IntDateOnlyCol" INTEGER,
"FloatCol" DOUBLE PRECISION,
"IntCol" INTEGER,
"BoolCol" BOOLEAN,
"IntColWithNull" INTEGER,
"BoolColWithNull" BOOLEAN
)""",
},
"insert_test_types": {
"sqlite": {
"query": """
INSERT INTO types_test_data
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
"fields": (
"TextCol",
"DateCol",
"IntDateCol",
"IntDateOnlyCol",
"FloatCol",
"IntCol",
"BoolCol",
"IntColWithNull",
"BoolColWithNull",
),
},
"mysql": {
"query": """
INSERT INTO types_test_data
VALUES("%s", %s, %s, %s, %s, %s, %s, %s, %s)
""",
"fields": (
"TextCol",
"DateCol",
"IntDateCol",
"IntDateOnlyCol",
"FloatCol",
"IntCol",
"BoolCol",
"IntColWithNull",
"BoolColWithNull",
),
},
"postgresql": {
"query": """
INSERT INTO types_test_data
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
""",
"fields": (
"TextCol",
"DateCol",
"DateColWithTz",
"IntDateCol",
"IntDateOnlyCol",
"FloatCol",
"IntCol",
"BoolCol",
"IntColWithNull",
"BoolColWithNull",
),
},
},
"read_parameters": {
"sqlite": "SELECT * FROM iris WHERE Name=? AND SepalLength=?",
"mysql": 'SELECT * FROM iris WHERE `Name`="%s" AND `SepalLength`=%s',
"postgresql": 'SELECT * FROM iris WHERE "Name"=%s AND "SepalLength"=%s',
},
"read_named_parameters": {
"sqlite": """
SELECT * FROM iris WHERE Name=:name AND SepalLength=:length
""",
"mysql": """
SELECT * FROM iris WHERE
`Name`="%(name)s" AND `SepalLength`=%(length)s
""",
"postgresql": """
SELECT * FROM iris WHERE
"Name"=%(name)s AND "SepalLength"=%(length)s
""",
},
"read_no_parameters_with_percent": {
"sqlite": "SELECT * FROM iris WHERE Name LIKE '%'",
"mysql": "SELECT * FROM iris WHERE `Name` LIKE '%'",
"postgresql": "SELECT * FROM iris WHERE \"Name\" LIKE '%'",
},
"create_view": {
"sqlite": """
CREATE VIEW iris_view AS
SELECT * FROM iris
"""
},
}
class MixInBase:
def teardown_method(self, method):
# if setup fails, there may not be a connection to close.
if hasattr(self, "conn"):
for tbl in self._get_all_tables():
self.drop_table(tbl)
self._close_conn()
class MySQLMixIn(MixInBase):
def drop_table(self, table_name):
cur = self.conn.cursor()
cur.execute(f"DROP TABLE IF EXISTS {sql._get_valid_mysql_name(table_name)}")
self.conn.commit()
def _get_all_tables(self):
cur = self.conn.cursor()
cur.execute("SHOW TABLES")
return [table[0] for table in cur.fetchall()]
def _close_conn(self):
from pymysql.err import Error
try:
self.conn.close()
except Error:
pass
class SQLiteMixIn(MixInBase):
def drop_table(self, table_name):
self.conn.execute(
f"DROP TABLE IF EXISTS {sql._get_valid_sqlite_name(table_name)}"
)
self.conn.commit()
def _get_all_tables(self):
c = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
return [table[0] for table in c.fetchall()]
def _close_conn(self):
self.conn.close()
class SQLAlchemyMixIn(MixInBase):
def drop_table(self, table_name):
sql.SQLDatabase(self.conn).drop_table(table_name)
def _get_all_tables(self):
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
table_list = meta.tables.keys()
return table_list
def _close_conn(self):
pass
class PandasSQLTest:
"""
Base class with common private methods for SQLAlchemy and fallback cases.
"""
def _get_exec(self):
if hasattr(self.conn, "execute"):
return self.conn
else:
return self.conn.cursor()
@pytest.fixture(params=[("data", "iris.csv")])
def load_iris_data(self, datapath, request):
import io
iris_csv_file = datapath(*request.param)
if not hasattr(self, "conn"):
self.setup_connect()
self.drop_table("iris")
self._get_exec().execute(SQL_STRINGS["create_iris"][self.flavor])
with io.open(iris_csv_file, mode="r", newline=None) as iris_csv:
r = csv.reader(iris_csv)
next(r) # skip header row
ins = SQL_STRINGS["insert_iris"][self.flavor]
for row in r:
self._get_exec().execute(ins, row)
def _load_iris_view(self):
self.drop_table("iris_view")
self._get_exec().execute(SQL_STRINGS["create_view"][self.flavor])
def _check_iris_loaded_frame(self, iris_frame):
pytype = iris_frame.dtypes[0].type
row = iris_frame.iloc[0]
assert issubclass(pytype, np.floating)
tm.equalContents(row.values, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"])
def _load_test1_data(self):
columns = ["index", "A", "B", "C", "D"]
data = [
(
"2000-01-03 00:00:00",
0.980268513777,
3.68573087906,
-0.364216805298,
-1.15973806169,
),
(
"2000-01-04 00:00:00",
1.04791624281,
-0.0412318367011,
-0.16181208307,
0.212549316967,
),
(
"2000-01-05 00:00:00",
0.498580885705,
0.731167677815,
-0.537677223318,
1.34627041952,
),
(
"2000-01-06 00:00:00",
1.12020151869,
1.56762092543,
0.00364077397681,
0.67525259227,
),
]
self.test_frame1 = DataFrame(data, columns=columns)
def _load_test2_data(self):
df = DataFrame(
dict(
A=[4, 1, 3, 6],
B=["asd", "gsq", "ylt", "jkl"],
C=[1.1, 3.1, 6.9, 5.3],
D=[False, True, True, False],
E=["1990-11-22", "1991-10-26", "1993-11-26", "1995-12-12"],
)
)
df["E"] = to_datetime(df["E"])
self.test_frame2 = df
def _load_test3_data(self):
columns = ["index", "A", "B"]
data = [
("2000-01-03 00:00:00", 2 ** 31 - 1, -1.987670),
("2000-01-04 00:00:00", -29, -0.0412318367011),
("2000-01-05 00:00:00", 20000, 0.731167677815),
("2000-01-06 00:00:00", -290867, 1.56762092543),
]
self.test_frame3 = DataFrame(data, columns=columns)
def _load_raw_sql(self):
self.drop_table("types_test_data")
self._get_exec().execute(SQL_STRINGS["create_test_types"][self.flavor])
ins = SQL_STRINGS["insert_test_types"][self.flavor]
data = [
{
"TextCol": "first",
"DateCol": "2000-01-03 00:00:00",
"DateColWithTz": "2000-01-01 00:00:00-08:00",
"IntDateCol": 535852800,
"IntDateOnlyCol": 20101010,
"FloatCol": 10.10,
"IntCol": 1,
"BoolCol": False,
"IntColWithNull": 1,
"BoolColWithNull": False,
},
{
"TextCol": "first",
"DateCol": "2000-01-04 00:00:00",
"DateColWithTz": "2000-06-01 00:00:00-07:00",
"IntDateCol": 1356998400,
"IntDateOnlyCol": 20101212,
"FloatCol": 10.10,
"IntCol": 1,
"BoolCol": False,
"IntColWithNull": None,
"BoolColWithNull": None,
},
]
for d in data:
self._get_exec().execute(
ins["query"], [d[field] for field in ins["fields"]]
)
def _count_rows(self, table_name):
result = (
self._get_exec()
.execute(f"SELECT count(*) AS count_1 FROM {table_name}")
.fetchone()
)
return result[0]
def _read_sql_iris(self):
iris_frame = self.pandasSQL.read_query("SELECT * FROM iris")
self._check_iris_loaded_frame(iris_frame)
def _read_sql_iris_parameter(self):
query = SQL_STRINGS["read_parameters"][self.flavor]
params = ["Iris-setosa", 5.1]
iris_frame = self.pandasSQL.read_query(query, params=params)
self._check_iris_loaded_frame(iris_frame)
def _read_sql_iris_named_parameter(self):
query = SQL_STRINGS["read_named_parameters"][self.flavor]
params = {"name": "Iris-setosa", "length": 5.1}
iris_frame = self.pandasSQL.read_query(query, params=params)
self._check_iris_loaded_frame(iris_frame)
def _read_sql_iris_no_parameter_with_percent(self):
query = SQL_STRINGS["read_no_parameters_with_percent"][self.flavor]
iris_frame = self.pandasSQL.read_query(query, params=None)
self._check_iris_loaded_frame(iris_frame)
def _to_sql(self, method=None):
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=method)
assert self.pandasSQL.has_table("test_frame1")
num_entries = len(self.test_frame1)
num_rows = self._count_rows("test_frame1")
assert num_rows == num_entries
# Nuke table
self.drop_table("test_frame1")
def _to_sql_empty(self):
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1.iloc[:0], "test_frame1")
def _to_sql_fail(self):
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail")
assert self.pandasSQL.has_table("test_frame1")
msg = "Table 'test_frame1' already exists"
with pytest.raises(ValueError, match=msg):
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail")
self.drop_table("test_frame1")
def _to_sql_replace(self):
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail")
# Add to table again
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="replace")
assert self.pandasSQL.has_table("test_frame1")
num_entries = len(self.test_frame1)
num_rows = self._count_rows("test_frame1")
assert num_rows == num_entries
self.drop_table("test_frame1")
def _to_sql_append(self):
# Nuke table just in case
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="fail")
# Add to table again
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", if_exists="append")
assert self.pandasSQL.has_table("test_frame1")
num_entries = 2 * len(self.test_frame1)
num_rows = self._count_rows("test_frame1")
assert num_rows == num_entries
self.drop_table("test_frame1")
def _to_sql_method_callable(self):
check = [] # used to double check function below is really being used
def sample(pd_table, conn, keys, data_iter):
check.append(1)
data = [dict(zip(keys, row)) for row in data_iter]
conn.execute(pd_table.table.insert(), data)
self.drop_table("test_frame1")
self.pandasSQL.to_sql(self.test_frame1, "test_frame1", method=sample)
assert self.pandasSQL.has_table("test_frame1")
assert check == [1]
num_entries = len(self.test_frame1)
num_rows = self._count_rows("test_frame1")
assert num_rows == num_entries
# Nuke table
self.drop_table("test_frame1")
def _roundtrip(self):
self.drop_table("test_frame_roundtrip")
self.pandasSQL.to_sql(self.test_frame1, "test_frame_roundtrip")
result = self.pandasSQL.read_query("SELECT * FROM test_frame_roundtrip")
result.set_index("level_0", inplace=True)
# result.index.astype(int)
result.index.name = None
tm.assert_frame_equal(result, self.test_frame1)
def _execute_sql(self):
# drop_sql = "DROP TABLE IF EXISTS test" # should already be done
iris_results = self.pandasSQL.execute("SELECT * FROM iris")
row = iris_results.fetchone()
tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"])
def _to_sql_save_index(self):
df = DataFrame.from_records(
[(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A"]
)
self.pandasSQL.to_sql(df, "test_to_sql_saves_index")
ix_cols = self._get_index_columns("test_to_sql_saves_index")
assert ix_cols == [["A"]]
def _transaction_test(self):
with self.pandasSQL.run_transaction() as trans:
trans.execute("CREATE TABLE test_trans (A INT, B TEXT)")
class DummyException(Exception):
pass
# Make sure when transaction is rolled back, no rows get inserted
ins_sql = "INSERT INTO test_trans (A,B) VALUES (1, 'blah')"
try:
with self.pandasSQL.run_transaction() as trans:
trans.execute(ins_sql)
raise DummyException("error")
except DummyException:
# ignore raised exception
pass
res = self.pandasSQL.read_query("SELECT * FROM test_trans")
assert len(res) == 0
# Make sure when transaction is committed, rows do get inserted
with self.pandasSQL.run_transaction() as trans:
trans.execute(ins_sql)
res2 = self.pandasSQL.read_query("SELECT * FROM test_trans")
assert len(res2) == 1
# -----------------------------------------------------------------------------
# -- Testing the public API
class _TestSQLApi(PandasSQLTest):
"""
Base class to test the public API.
From this two classes are derived to run these tests for both the
sqlalchemy mode (`TestSQLApi`) and the fallback mode
(`TestSQLiteFallbackApi`). These tests are run with sqlite3. Specific
tests for the different sql flavours are included in `_TestSQLAlchemy`.
Notes:
flavor can always be passed even in SQLAlchemy mode,
should be correctly ignored.
we don't use drop_table because that isn't part of the public api
"""
flavor = "sqlite"
mode: str
def setup_connect(self):
self.conn = self.connect()
@pytest.fixture(autouse=True)
def setup_method(self, load_iris_data):
self.load_test_data_and_sql()
def load_test_data_and_sql(self):
self._load_iris_view()
self._load_test1_data()
self._load_test2_data()
self._load_test3_data()
self._load_raw_sql()
def test_read_sql_iris(self):
iris_frame = sql.read_sql_query("SELECT * FROM iris", self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_read_sql_view(self):
iris_frame = sql.read_sql_query("SELECT * FROM iris_view", self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_to_sql(self):
sql.to_sql(self.test_frame1, "test_frame1", self.conn)
assert sql.has_table("test_frame1", self.conn)
def test_to_sql_fail(self):
sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail")
assert sql.has_table("test_frame2", self.conn)
msg = "Table 'test_frame2' already exists"
with pytest.raises(ValueError, match=msg):
sql.to_sql(self.test_frame1, "test_frame2", self.conn, if_exists="fail")
def test_to_sql_replace(self):
sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="fail")
# Add to table again
sql.to_sql(self.test_frame1, "test_frame3", self.conn, if_exists="replace")
assert sql.has_table("test_frame3", self.conn)
num_entries = len(self.test_frame1)
num_rows = self._count_rows("test_frame3")
assert num_rows == num_entries
def test_to_sql_append(self):
sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="fail")
# Add to table again
sql.to_sql(self.test_frame1, "test_frame4", self.conn, if_exists="append")
assert sql.has_table("test_frame4", self.conn)
num_entries = 2 * len(self.test_frame1)
num_rows = self._count_rows("test_frame4")
assert num_rows == num_entries
def test_to_sql_type_mapping(self):
sql.to_sql(self.test_frame3, "test_frame5", self.conn, index=False)
result = sql.read_sql("SELECT * FROM test_frame5", self.conn)
tm.assert_frame_equal(self.test_frame3, result)
def test_to_sql_series(self):
s = Series(np.arange(5, dtype="int64"), name="series")
sql.to_sql(s, "test_series", self.conn, index=False)
s2 = sql.read_sql_query("SELECT * FROM test_series", self.conn)
tm.assert_frame_equal(s.to_frame(), s2)
def test_roundtrip(self):
sql.to_sql(self.test_frame1, "test_frame_roundtrip", con=self.conn)
result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn)
# HACK!
result.index = self.test_frame1.index
result.set_index("level_0", inplace=True)
result.index.astype(int)
result.index.name = None
tm.assert_frame_equal(result, self.test_frame1)
def test_roundtrip_chunksize(self):
sql.to_sql(
self.test_frame1,
"test_frame_roundtrip",
con=self.conn,
index=False,
chunksize=2,
)
result = sql.read_sql_query("SELECT * FROM test_frame_roundtrip", con=self.conn)
tm.assert_frame_equal(result, self.test_frame1)
def test_execute_sql(self):
# drop_sql = "DROP TABLE IF EXISTS test" # should already be done
iris_results = sql.execute("SELECT * FROM iris", con=self.conn)
row = iris_results.fetchone()
tm.equalContents(row, [5.1, 3.5, 1.4, 0.2, "Iris-setosa"])
def test_date_parsing(self):
# Test date parsing in read_sql
# No Parsing
df = sql.read_sql_query("SELECT * FROM types_test_data", self.conn)
assert not issubclass(df.DateCol.dtype.type, np.datetime64)
df = sql.read_sql_query(
"SELECT * FROM types_test_data", self.conn, parse_dates=["DateCol"]
)
assert issubclass(df.DateCol.dtype.type, np.datetime64)
assert df.DateCol.tolist() == [
pd.Timestamp(2000, 1, 3, 0, 0, 0),
pd.Timestamp(2000, 1, 4, 0, 0, 0),
]
df = sql.read_sql_query(
"SELECT * FROM types_test_data",
self.conn,
parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"},
)
assert issubclass(df.DateCol.dtype.type, np.datetime64)
assert df.DateCol.tolist() == [
pd.Timestamp(2000, 1, 3, 0, 0, 0),
pd.Timestamp(2000, 1, 4, 0, 0, 0),
]
df = sql.read_sql_query(
"SELECT * FROM types_test_data", self.conn, parse_dates=["IntDateCol"]
)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
assert df.IntDateCol.tolist() == [
pd.Timestamp(1986, 12, 25, 0, 0, 0),
pd.Timestamp(2013, 1, 1, 0, 0, 0),
]
df = sql.read_sql_query(
"SELECT * FROM types_test_data", self.conn, parse_dates={"IntDateCol": "s"}
)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
assert df.IntDateCol.tolist() == [
pd.Timestamp(1986, 12, 25, 0, 0, 0),
pd.Timestamp(2013, 1, 1, 0, 0, 0),
]
df = sql.read_sql_query(
"SELECT * FROM types_test_data",
self.conn,
parse_dates={"IntDateOnlyCol": "%Y%m%d"},
)
assert issubclass(df.IntDateOnlyCol.dtype.type, np.datetime64)
assert df.IntDateOnlyCol.tolist() == [
pd.Timestamp("2010-10-10"),
pd.Timestamp("2010-12-12"),
]
def test_date_and_index(self):
# Test case where same column appears in parse_date and index_col
df = sql.read_sql_query(
"SELECT * FROM types_test_data",
self.conn,
index_col="DateCol",
parse_dates=["DateCol", "IntDateCol"],
)
assert issubclass(df.index.dtype.type, np.datetime64)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
def test_timedelta(self):
# see #6921
df = to_timedelta(Series(["00:00:01", "00:00:03"], name="foo")).to_frame()
with tm.assert_produces_warning(UserWarning):
df.to_sql("test_timedelta", self.conn)
result = sql.read_sql_query("SELECT * FROM test_timedelta", self.conn)
tm.assert_series_equal(result["foo"], df["foo"].astype("int64"))
def test_complex_raises(self):
df = DataFrame({"a": [1 + 1j, 2j]})
msg = "Complex datatypes not supported"
with pytest.raises(ValueError, match=msg):
df.to_sql("test_complex", self.conn)
@pytest.mark.parametrize(
"index_name,index_label,expected",
[
# no index name, defaults to 'index'
(None, None, "index"),
# specifying index_label
(None, "other_label", "other_label"),
# using the index name
("index_name", None, "index_name"),
# has index name, but specifying index_label
("index_name", "other_label", "other_label"),
# index name is integer
(0, None, "0"),
# index name is None but index label is integer
(None, 0, "0"),
],
)
def test_to_sql_index_label(self, index_name, index_label, expected):
temp_frame = DataFrame({"col1": range(4)})
temp_frame.index.name = index_name
query = "SELECT * FROM test_index_label"
sql.to_sql(temp_frame, "test_index_label", self.conn, index_label=index_label)
frame = sql.read_sql_query(query, self.conn)
assert frame.columns[0] == expected
def test_to_sql_index_label_multiindex(self):
temp_frame = DataFrame(
{"col1": range(4)},
index=MultiIndex.from_product([("A0", "A1"), ("B0", "B1")]),
)
# no index name, defaults to 'level_0' and 'level_1'
sql.to_sql(temp_frame, "test_index_label", self.conn)
frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn)
assert frame.columns[0] == "level_0"
assert frame.columns[1] == "level_1"
# specifying index_label
sql.to_sql(
temp_frame,
"test_index_label",
self.conn,
if_exists="replace",
index_label=["A", "B"],
)
frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn)
assert frame.columns[:2].tolist() == ["A", "B"]
# using the index name
temp_frame.index.names = ["A", "B"]
sql.to_sql(temp_frame, "test_index_label", self.conn, if_exists="replace")
frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn)
assert frame.columns[:2].tolist() == ["A", "B"]
# has index name, but specifying index_label
sql.to_sql(
temp_frame,
"test_index_label",
self.conn,
if_exists="replace",
index_label=["C", "D"],
)
frame = sql.read_sql_query("SELECT * FROM test_index_label", self.conn)
assert frame.columns[:2].tolist() == ["C", "D"]
msg = "Length of 'index_label' should match number of levels, which is 2"
with pytest.raises(ValueError, match=msg):
sql.to_sql(
temp_frame,
"test_index_label",
self.conn,
if_exists="replace",
index_label="C",
)
def test_multiindex_roundtrip(self):
df = DataFrame.from_records(
[(1, 2.1, "line1"), (2, 1.5, "line2")],
columns=["A", "B", "C"],
index=["A", "B"],
)
df.to_sql("test_multiindex_roundtrip", self.conn)
result = sql.read_sql_query(
"SELECT * FROM test_multiindex_roundtrip", self.conn, index_col=["A", "B"]
)
tm.assert_frame_equal(df, result, check_index_type=True)
def test_integer_col_names(self):
df = DataFrame([[1, 2], [3, 4]], columns=[0, 1])
sql.to_sql(df, "test_frame_integer_col_names", self.conn, if_exists="replace")
def test_get_schema(self):
create_sql = sql.get_schema(self.test_frame1, "test", con=self.conn)
assert "CREATE" in create_sql
def test_get_schema_dtypes(self):
float_frame = DataFrame({"a": [1.1, 1.2], "b": [2.1, 2.2]})
dtype = sqlalchemy.Integer if self.mode == "sqlalchemy" else "INTEGER"
create_sql = sql.get_schema(
float_frame, "test", con=self.conn, dtype={"b": dtype}
)
assert "CREATE" in create_sql
assert "INTEGER" in create_sql
def test_get_schema_keys(self):
frame = DataFrame({"Col1": [1.1, 1.2], "Col2": [2.1, 2.2]})
create_sql = sql.get_schema(frame, "test", con=self.conn, keys="Col1")
constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("Col1")'
assert constraint_sentence in create_sql
# multiple columns as key (GH10385)
create_sql = sql.get_schema(
self.test_frame1, "test", con=self.conn, keys=["A", "B"]
)
constraint_sentence = 'CONSTRAINT test_pk PRIMARY KEY ("A", "B")'
assert constraint_sentence in create_sql
def test_chunksize_read(self):
df = DataFrame(np.random.randn(22, 5), columns=list("abcde"))
df.to_sql("test_chunksize", self.conn, index=False)
# reading the query in one time
res1 = sql.read_sql_query("select * from test_chunksize", self.conn)
# reading the query in chunks with read_sql_query
res2 = DataFrame()
i = 0
sizes = [5, 5, 5, 5, 2]
for chunk in sql.read_sql_query(
"select * from test_chunksize", self.conn, chunksize=5
):
res2 = concat([res2, chunk], ignore_index=True)
assert len(chunk) == sizes[i]
i += 1
tm.assert_frame_equal(res1, res2)
# reading the query in chunks with read_sql_query
if self.mode == "sqlalchemy":
res3 = DataFrame()
i = 0
sizes = [5, 5, 5, 5, 2]
for chunk in sql.read_sql_table("test_chunksize", self.conn, chunksize=5):
res3 = concat([res3, chunk], ignore_index=True)
assert len(chunk) == sizes[i]
i += 1
tm.assert_frame_equal(res1, res3)
def test_categorical(self):
# GH8624
# test that categorical gets written correctly as dense column
df = DataFrame(
{
"person_id": [1, 2, 3],
"person_name": ["John P. Doe", "Jane Dove", "John P. Doe"],
}
)
df2 = df.copy()
df2["person_name"] = df2["person_name"].astype("category")
df2.to_sql("test_categorical", self.conn, index=False)
res = sql.read_sql_query("SELECT * FROM test_categorical", self.conn)
tm.assert_frame_equal(res, df)
def test_unicode_column_name(self):
# GH 11431
df = DataFrame([[1, 2], [3, 4]], columns=["\xe9", "b"])
df.to_sql("test_unicode", self.conn, index=False)
def test_escaped_table_name(self):
# GH 13206
df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]})
df.to_sql("d1187b08-4943-4c8d-a7f6", self.conn, index=False)
res = sql.read_sql_query("SELECT * FROM `d1187b08-4943-4c8d-a7f6`", self.conn)
tm.assert_frame_equal(res, df)
@pytest.mark.single
@pytest.mark.skipif(not SQLALCHEMY_INSTALLED, reason="SQLAlchemy not installed")
class TestSQLApi(SQLAlchemyMixIn, _TestSQLApi):
"""
Test the public API as it would be used directly
Tests for `read_sql_table` are included here, as this is specific for the
sqlalchemy mode.
"""
flavor = "sqlite"
mode = "sqlalchemy"
def connect(self):
return sqlalchemy.create_engine("sqlite:///:memory:")
def test_read_table_columns(self):
# test columns argument in read_table
sql.to_sql(self.test_frame1, "test_frame", self.conn)
cols = ["A", "B"]
result = sql.read_sql_table("test_frame", self.conn, columns=cols)
assert result.columns.tolist() == cols
def test_read_table_index_col(self):
# test columns argument in read_table
sql.to_sql(self.test_frame1, "test_frame", self.conn)
result = sql.read_sql_table("test_frame", self.conn, index_col="index")
assert result.index.names == ["index"]
result = sql.read_sql_table("test_frame", self.conn, index_col=["A", "B"])
assert result.index.names == ["A", "B"]
result = sql.read_sql_table(
"test_frame", self.conn, index_col=["A", "B"], columns=["C", "D"]
)
assert result.index.names == ["A", "B"]
assert result.columns.tolist() == ["C", "D"]
def test_read_sql_delegate(self):
iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn)
iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
iris_frame1 = sql.read_sql_table("iris", self.conn)
iris_frame2 = sql.read_sql("iris", self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
def test_not_reflect_all_tables(self):
# create invalid table
qry = """CREATE TABLE invalid (x INTEGER, y UNKNOWN);"""
self.conn.execute(qry)
qry = """CREATE TABLE other_table (x INTEGER, y INTEGER);"""
self.conn.execute(qry)
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
sql.read_sql_table("other_table", self.conn)
sql.read_sql_query("SELECT * FROM other_table", self.conn)
# Verify some things
assert len(w) == 0
def test_warning_case_insensitive_table_name(self):
# see gh-7815
#
# We can't test that this warning is triggered, a the database
# configuration would have to be altered. But here we test that
# the warning is certainly NOT triggered in a normal case.
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# This should not trigger a Warning
self.test_frame1.to_sql("CaseSensitive", self.conn)
# Verify some things
assert len(w) == 0
def _get_index_columns(self, tbl_name):
from sqlalchemy.engine import reflection
insp = reflection.Inspector.from_engine(self.conn)
ixs = insp.get_indexes("test_index_saved")
ixs = [i["column_names"] for i in ixs]
return ixs
def test_sqlalchemy_type_mapping(self):
# Test Timestamp objects (no datetime64 because of timezone) (GH9085)
df = DataFrame(
{"time": to_datetime(["201412120154", "201412110254"], utc=True)}
)
db = sql.SQLDatabase(self.conn)
table = sql.SQLTable("test_type", db, frame=df)
# GH 9086: TIMESTAMP is the suggested type for datetimes with timezones
assert isinstance(table.table.c["time"].type, sqltypes.TIMESTAMP)
def test_database_uri_string(self):
# Test read_sql and .to_sql method with a database URI (GH10654)
test_frame1 = self.test_frame1
# db_uri = 'sqlite:///:memory:' # raises
# sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) near
# "iris": syntax error [SQL: 'iris']
with tm.ensure_clean() as name:
db_uri = "sqlite:///" + name
table = "iris"
test_frame1.to_sql(table, db_uri, if_exists="replace", index=False)
test_frame2 = sql.read_sql(table, db_uri)
test_frame3 = sql.read_sql_table(table, db_uri)
query = "SELECT * FROM iris"
test_frame4 = sql.read_sql_query(query, db_uri)
tm.assert_frame_equal(test_frame1, test_frame2)
tm.assert_frame_equal(test_frame1, test_frame3)
tm.assert_frame_equal(test_frame1, test_frame4)
# using driver that will not be installed on Travis to trigger error
# in sqlalchemy.create_engine -> test passing of this error to user
try:
# the rest of this test depends on pg8000's being absent
import pg8000 # noqa
pytest.skip("pg8000 is installed")
except ImportError:
pass
db_uri = "postgresql+pg8000://user:pass@host/dbname"
with pytest.raises(ImportError, match="pg8000"):
sql.read_sql("select * from table", db_uri)
def _make_iris_table_metadata(self):
sa = sqlalchemy
metadata = sa.MetaData()
iris = sa.Table(
"iris",
metadata,
sa.Column("SepalLength", sa.REAL),
sa.Column("SepalWidth", sa.REAL),
sa.Column("PetalLength", sa.REAL),
sa.Column("PetalWidth", sa.REAL),
sa.Column("Name", sa.TEXT),
)
return iris
def test_query_by_text_obj(self):
# WIP : GH10846
name_text = sqlalchemy.text("select * from iris where name=:name")
iris_df = sql.read_sql(name_text, self.conn, params={"name": "Iris-versicolor"})
all_names = set(iris_df["Name"])
assert all_names == {"Iris-versicolor"}
def test_query_by_select_obj(self):
# WIP : GH10846
iris = self._make_iris_table_metadata()
name_select = sqlalchemy.select([iris]).where(
iris.c.Name == sqlalchemy.bindparam("name")
)
iris_df = sql.read_sql(name_select, self.conn, params={"name": "Iris-setosa"})
all_names = set(iris_df["Name"])
assert all_names == {"Iris-setosa"}
class _EngineToConnMixin:
"""
A mixin that causes setup_connect to create a conn rather than an engine.
"""
@pytest.fixture(autouse=True)
def setup_method(self, load_iris_data):
super().load_test_data_and_sql()
engine = self.conn
conn = engine.connect()
self.__tx = conn.begin()
self.pandasSQL = sql.SQLDatabase(conn)
self.__engine = engine
self.conn = conn
yield
self.__tx.rollback()
self.conn.close()
self.conn = self.__engine
self.pandasSQL = sql.SQLDatabase(self.__engine)
@pytest.mark.single
class TestSQLApiConn(_EngineToConnMixin, TestSQLApi):
pass
@pytest.mark.single
class TestSQLiteFallbackApi(SQLiteMixIn, _TestSQLApi):
"""
Test the public sqlite connection fallback API
"""
flavor = "sqlite"
mode = "fallback"
def connect(self, database=":memory:"):
return sqlite3.connect(database)
def test_sql_open_close(self):
# Test if the IO in the database still work if the connection closed
# between the writing and reading (as in many real situations).
with tm.ensure_clean() as name:
conn = self.connect(name)
sql.to_sql(self.test_frame3, "test_frame3_legacy", conn, index=False)
conn.close()
conn = self.connect(name)
result = sql.read_sql_query("SELECT * FROM test_frame3_legacy;", conn)
conn.close()
tm.assert_frame_equal(self.test_frame3, result)
@pytest.mark.skipif(SQLALCHEMY_INSTALLED, reason="SQLAlchemy is installed")
def test_con_string_import_error(self):
conn = "mysql://root@localhost/pandas_nosetest"
msg = "Using URI string without sqlalchemy installed"
with pytest.raises(ImportError, match=msg):
sql.read_sql("SELECT * FROM iris", conn)
def test_read_sql_delegate(self):
iris_frame1 = sql.read_sql_query("SELECT * FROM iris", self.conn)
iris_frame2 = sql.read_sql("SELECT * FROM iris", self.conn)
tm.assert_frame_equal(iris_frame1, iris_frame2)
msg = "Execution failed on sql 'iris': near \"iris\": syntax error"
with pytest.raises(sql.DatabaseError, match=msg):
sql.read_sql("iris", self.conn)
def test_safe_names_warning(self):
# GH 6798
df = DataFrame([[1, 2], [3, 4]], columns=["a", "b "]) # has a space
# warns on create table with spaces in names
with tm.assert_produces_warning():
sql.to_sql(df, "test_frame3_legacy", self.conn, index=False)
def test_get_schema2(self):
# without providing a connection object (available for backwards comp)
create_sql = sql.get_schema(self.test_frame1, "test")
assert "CREATE" in create_sql
def _get_sqlite_column_type(self, schema, column):
for col in schema.split("\n"):
if col.split()[0].strip('""') == column:
return col.split()[1]
raise ValueError(f"Column {column} not found")
def test_sqlite_type_mapping(self):
# Test Timestamp objects (no datetime64 because of timezone) (GH9085)
df = DataFrame(
{"time": to_datetime(["201412120154", "201412110254"], utc=True)}
)
db = sql.SQLiteDatabase(self.conn)
table = sql.SQLiteTable("test_type", db, frame=df)
schema = table.sql_schema()
assert self._get_sqlite_column_type(schema, "time") == "TIMESTAMP"
# -----------------------------------------------------------------------------
# -- Database flavor specific tests
class _TestSQLAlchemy(SQLAlchemyMixIn, PandasSQLTest):
"""
Base class for testing the sqlalchemy backend.
Subclasses for specific database types are created below. Tests that
deviate for each flavor are overwritten there.
"""
flavor: str
@pytest.fixture(autouse=True, scope="class")
def setup_class(cls):
cls.setup_import()
cls.setup_driver()
conn = cls.connect()
conn.connect()
def load_test_data_and_sql(self):
self._load_raw_sql()
self._load_test1_data()
@pytest.fixture(autouse=True)
def setup_method(self, load_iris_data):
self.load_test_data_and_sql()
@classmethod
def setup_import(cls):
# Skip this test if SQLAlchemy not available
if not SQLALCHEMY_INSTALLED:
pytest.skip("SQLAlchemy not installed")
@classmethod
def setup_driver(cls):
raise NotImplementedError()
@classmethod
def connect(cls):
raise NotImplementedError()
def setup_connect(self):
try:
self.conn = self.connect()
self.pandasSQL = sql.SQLDatabase(self.conn)
# to test if connection can be made:
self.conn.connect()
except sqlalchemy.exc.OperationalError:
pytest.skip(f"Can't connect to {self.flavor} server")
def test_read_sql(self):
self._read_sql_iris()
def test_read_sql_parameter(self):
self._read_sql_iris_parameter()
def test_read_sql_named_parameter(self):
self._read_sql_iris_named_parameter()
def test_to_sql(self):
self._to_sql()
def test_to_sql_empty(self):
self._to_sql_empty()
def test_to_sql_fail(self):
self._to_sql_fail()
def test_to_sql_replace(self):
self._to_sql_replace()
def test_to_sql_append(self):
self._to_sql_append()
def test_to_sql_method_multi(self):
self._to_sql(method="multi")
def test_to_sql_method_callable(self):
self._to_sql_method_callable()
def test_create_table(self):
temp_conn = self.connect()
temp_frame = DataFrame(
{"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}
)
pandasSQL = sql.SQLDatabase(temp_conn)
pandasSQL.to_sql(temp_frame, "temp_frame")
assert temp_conn.has_table("temp_frame")
def test_drop_table(self):
temp_conn = self.connect()
temp_frame = DataFrame(
{"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}
)
pandasSQL = sql.SQLDatabase(temp_conn)
pandasSQL.to_sql(temp_frame, "temp_frame")
assert temp_conn.has_table("temp_frame")
pandasSQL.drop_table("temp_frame")
assert not temp_conn.has_table("temp_frame")
def test_roundtrip(self):
self._roundtrip()
def test_execute_sql(self):
self._execute_sql()
def test_read_table(self):
iris_frame = sql.read_sql_table("iris", con=self.conn)
self._check_iris_loaded_frame(iris_frame)
def test_read_table_columns(self):
iris_frame = sql.read_sql_table(
"iris", con=self.conn, columns=["SepalLength", "SepalLength"]
)
tm.equalContents(iris_frame.columns.values, ["SepalLength", "SepalLength"])
def test_read_table_absent_raises(self):
msg = "Table this_doesnt_exist not found"
with pytest.raises(ValueError, match=msg):
sql.read_sql_table("this_doesnt_exist", con=self.conn)
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
assert issubclass(df.FloatCol.dtype.type, np.floating)
assert issubclass(df.IntCol.dtype.type, np.integer)
assert issubclass(df.BoolCol.dtype.type, np.bool_)
# Int column with NA values stays as float
assert issubclass(df.IntColWithNull.dtype.type, np.floating)
# Bool column with NA values becomes object
assert issubclass(df.BoolColWithNull.dtype.type, np.object)
def test_bigint(self):
# int64 should be converted to BigInteger, GH7433
df = DataFrame(data={"i64": [2 ** 62]})
df.to_sql("test_bigint", self.conn, index=False)
result = sql.read_sql_table("test_bigint", self.conn)
tm.assert_frame_equal(df, result)
def test_default_date_load(self):
df = sql.read_sql_table("types_test_data", self.conn)
# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
# MySQL SHOULD be converted.
assert issubclass(df.DateCol.dtype.type, np.datetime64)
def test_datetime_with_timezone(self):
# edge case that converts postgresql datetime with time zone types
# to datetime64[ns,psycopg2.tz.FixedOffsetTimezone..], which is ok
# but should be more natural, so coerce to datetime64[ns] for now
def check(col):
# check that a column is either datetime64[ns]
# or datetime64[ns, UTC]
if is_datetime64_dtype(col.dtype):
# "2000-01-01 00:00:00-08:00" should convert to
# "2000-01-01 08:00:00"
assert col[0] == Timestamp("2000-01-01 08:00:00")
# "2000-06-01 00:00:00-07:00" should convert to
# "2000-06-01 07:00:00"
assert col[1] == Timestamp("2000-06-01 07:00:00")
elif is_datetime64tz_dtype(col.dtype):
assert str(col.dt.tz) == "UTC"
# "2000-01-01 00:00:00-08:00" should convert to
# "2000-01-01 08:00:00"
# "2000-06-01 00:00:00-07:00" should convert to
# "2000-06-01 07:00:00"
# GH 6415
expected_data = [
Timestamp("2000-01-01 08:00:00", tz="UTC"),
Timestamp("2000-06-01 07:00:00", tz="UTC"),
]
expected = Series(expected_data, name=col.name)
tm.assert_series_equal(col, expected)
else:
raise AssertionError(
f"DateCol loaded with incorrect type -> {col.dtype}"
)
# GH11216
df = pd.read_sql_query("select * from types_test_data", self.conn)
if not hasattr(df, "DateColWithTz"):
pytest.skip("no column with datetime with time zone")
# this is parsed on Travis (linux), but not on macosx for some reason
# even with the same versions of psycopg2 & sqlalchemy, possibly a
# Postgresql server version difference
col = df.DateColWithTz
assert is_datetime64tz_dtype(col.dtype)
df = pd.read_sql_query(
"select * from types_test_data", self.conn, parse_dates=["DateColWithTz"]
)
if not hasattr(df, "DateColWithTz"):
pytest.skip("no column with datetime with time zone")
col = df.DateColWithTz
assert is_datetime64tz_dtype(col.dtype)
assert str(col.dt.tz) == "UTC"
check(df.DateColWithTz)
df = pd.concat(
list(
pd.read_sql_query(
"select * from types_test_data", self.conn, chunksize=1
)
),
ignore_index=True,
)
col = df.DateColWithTz
assert is_datetime64tz_dtype(col.dtype)
assert str(col.dt.tz) == "UTC"
expected = sql.read_sql_table("types_test_data", self.conn)
col = expected.DateColWithTz
assert is_datetime64tz_dtype(col.dtype)
tm.assert_series_equal(df.DateColWithTz, expected.DateColWithTz)
# xref #7139
# this might or might not be converted depending on the postgres driver
df = sql.read_sql_table("types_test_data", self.conn)
check(df.DateColWithTz)
def test_datetime_with_timezone_roundtrip(self):
# GH 9086
# Write datetimetz data to a db and read it back
# For dbs that support timestamps with timezones, should get back UTC
# otherwise naive data should be returned
expected = DataFrame(
{"A": date_range("2013-01-01 09:00:00", periods=3, tz="US/Pacific")}
)
expected.to_sql("test_datetime_tz", self.conn, index=False)
if self.flavor == "postgresql":
# SQLAlchemy "timezones" (i.e. offsets) are coerced to UTC
expected["A"] = expected["A"].dt.tz_convert("UTC")
else:
# Otherwise, timestamps are returned as local, naive
expected["A"] = expected["A"].dt.tz_localize(None)
result = sql.read_sql_table("test_datetime_tz", self.conn)
tm.assert_frame_equal(result, expected)
result = sql.read_sql_query("SELECT * FROM test_datetime_tz", self.conn)
if self.flavor == "sqlite":
# read_sql_query does not return datetime type like read_sql_table
assert isinstance(result.loc[0, "A"], str)
result["A"] = to_datetime(result["A"])
tm.assert_frame_equal(result, expected)
def test_out_of_bounds_datetime(self):
# GH 26761
data = pd.DataFrame({"date": datetime(9999, 1, 1)}, index=[0])
data.to_sql("test_datetime_obb", self.conn, index=False)
result = sql.read_sql_table("test_datetime_obb", self.conn)
expected = pd.DataFrame([pd.NaT], columns=["date"])
tm.assert_frame_equal(result, expected)
def test_naive_datetimeindex_roundtrip(self):
# GH 23510
# Ensure that a naive DatetimeIndex isn't converted to UTC
dates = date_range("2018-01-01", periods=5, freq="6H")._with_freq(None)
expected = DataFrame({"nums": range(5)}, index=dates)
expected.to_sql("foo_table", self.conn, index_label="info_date")
result = sql.read_sql_table("foo_table", self.conn, index_col="info_date")
# result index with gain a name from a set_index operation; expected
tm.assert_frame_equal(result, expected, check_names=False)
def test_date_parsing(self):
# No Parsing
df = sql.read_sql_table("types_test_data", self.conn)
expected_type = object if self.flavor == "sqlite" else np.datetime64
assert issubclass(df.DateCol.dtype.type, expected_type)
df = sql.read_sql_table("types_test_data", self.conn, parse_dates=["DateCol"])
assert issubclass(df.DateCol.dtype.type, np.datetime64)
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates={"DateCol": "%Y-%m-%d %H:%M:%S"}
)
assert issubclass(df.DateCol.dtype.type, np.datetime64)
df = sql.read_sql_table(
"types_test_data",
self.conn,
parse_dates={"DateCol": {"format": "%Y-%m-%d %H:%M:%S"}},
)
assert issubclass(df.DateCol.dtype.type, np.datetime64)
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates=["IntDateCol"]
)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates={"IntDateCol": "s"}
)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
df = sql.read_sql_table(
"types_test_data", self.conn, parse_dates={"IntDateCol": {"unit": "s"}}
)
assert issubclass(df.IntDateCol.dtype.type, np.datetime64)
def test_datetime(self):
df = DataFrame(
{"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)}
)
df.to_sql("test_datetime", self.conn)
# with read_table -> type information from schema used
result = sql.read_sql_table("test_datetime", self.conn)
result = result.drop("index", axis=1)
tm.assert_frame_equal(result, df)
# with read_sql -> no type information -> sqlite has no native
result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn)
result = result.drop("index", axis=1)
if self.flavor == "sqlite":
assert isinstance(result.loc[0, "A"], str)
result["A"] = to_datetime(result["A"])
tm.assert_frame_equal(result, df)
else:
tm.assert_frame_equal(result, df)
def test_datetime_NaT(self):
df = DataFrame(
{"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)}
)
df.loc[1, "A"] = np.nan
df.to_sql("test_datetime", self.conn, index=False)
# with read_table -> type information from schema used
result = sql.read_sql_table("test_datetime", self.conn)
tm.assert_frame_equal(result, df)
# with read_sql -> no type information -> sqlite has no native
result = sql.read_sql_query("SELECT * FROM test_datetime", self.conn)
if self.flavor == "sqlite":
assert isinstance(result.loc[0, "A"], str)
result["A"] = to_datetime(result["A"], errors="coerce")
tm.assert_frame_equal(result, df)
else:
tm.assert_frame_equal(result, df)
def test_datetime_date(self):
# test support for datetime.date
df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"])
df.to_sql("test_date", self.conn, index=False)
res = read_sql_table("test_date", self.conn)
result = res["a"]
expected = to_datetime(df["a"])
# comes back as datetime64
tm.assert_series_equal(result, expected)
def test_datetime_time(self):
# test support for datetime.time
df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"])
df.to_sql("test_time", self.conn, index=False)
res = read_sql_table("test_time", self.conn)
tm.assert_frame_equal(res, df)
# GH8341
# first, use the fallback to have the sqlite adapter put in place
sqlite_conn = TestSQLiteFallback.connect()
sql.to_sql(df, "test_time2", sqlite_conn, index=False)
res = sql.read_sql_query("SELECT * FROM test_time2", sqlite_conn)
ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f"))
tm.assert_frame_equal(ref, res) # check if adapter is in place
# then test if sqlalchemy is unaffected by the sqlite adapter
sql.to_sql(df, "test_time3", self.conn, index=False)
if self.flavor == "sqlite":
res = sql.read_sql_query("SELECT * FROM test_time3", self.conn)
ref = df.applymap(lambda _: _.strftime("%H:%M:%S.%f"))
tm.assert_frame_equal(ref, res)
res = sql.read_sql_table("test_time3", self.conn)
tm.assert_frame_equal(df, res)
def test_mixed_dtype_insert(self):
# see GH6509
s1 = Series(2 ** 25 + 1, dtype=np.int32)
s2 = Series(0.0, dtype=np.float32)
df = DataFrame({"s1": s1, "s2": s2})
# write and read again
df.to_sql("test_read_write", self.conn, index=False)
df2 = sql.read_sql_table("test_read_write", self.conn)
tm.assert_frame_equal(df, df2, check_dtype=False, check_exact=True)
def test_nan_numeric(self):
# NaNs in numeric float column
df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]})
df.to_sql("test_nan", self.conn, index=False)
# with read_table
result = sql.read_sql_table("test_nan", self.conn)
tm.assert_frame_equal(result, df)
# with read_sql
result = sql.read_sql_query("SELECT * FROM test_nan", self.conn)
tm.assert_frame_equal(result, df)
def test_nan_fullcolumn(self):
# full NaN column (numeric float column)
df = DataFrame({"A": [0, 1, 2], "B": [np.nan, np.nan, np.nan]})
df.to_sql("test_nan", self.conn, index=False)
# with read_table
result = sql.read_sql_table("test_nan", self.conn)
tm.assert_frame_equal(result, df)
# with read_sql -> not type info from table -> stays None
df["B"] = df["B"].astype("object")
df["B"] = None
result = sql.read_sql_query("SELECT * FROM test_nan", self.conn)
tm.assert_frame_equal(result, df)
def test_nan_string(self):
# NaNs in string column
df = DataFrame({"A": [0, 1, 2], "B": ["a", "b", np.nan]})
df.to_sql("test_nan", self.conn, index=False)
# NaNs are coming back as None
df.loc[2, "B"] = None
# with read_table
result = sql.read_sql_table("test_nan", self.conn)
tm.assert_frame_equal(result, df)
# with read_sql
result = sql.read_sql_query("SELECT * FROM test_nan", self.conn)
tm.assert_frame_equal(result, df)
def _get_index_columns(self, tbl_name):
from sqlalchemy.engine import reflection
insp = reflection.Inspector.from_engine(self.conn)
ixs = insp.get_indexes(tbl_name)
ixs = [i["column_names"] for i in ixs]
return ixs
def test_to_sql_save_index(self):
self._to_sql_save_index()
def test_transactions(self):
self._transaction_test()
def test_get_schema_create_table(self):
# Use a dataframe without a bool column, since MySQL converts bool to
# TINYINT (which read_sql_table returns as an int and causes a dtype
# mismatch)
self._load_test3_data()
tbl = "test_get_schema_create_table"
create_sql = sql.get_schema(self.test_frame3, tbl, con=self.conn)
blank_test_df = self.test_frame3.iloc[:0]
self.drop_table(tbl)
self.conn.execute(create_sql)
returned_df = sql.read_sql_table(tbl, self.conn)
tm.assert_frame_equal(returned_df, blank_test_df, check_index_type=False)
self.drop_table(tbl)
def test_dtype(self):
cols = ["A", "B"]
data = [(0.8, True), (0.9, None)]
df = DataFrame(data, columns=cols)
df.to_sql("dtype_test", self.conn)
df.to_sql("dtype_test2", self.conn, dtype={"B": sqlalchemy.TEXT})
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
sqltype = meta.tables["dtype_test2"].columns["B"].type
assert isinstance(sqltype, sqlalchemy.TEXT)
msg = "The type of B is not a SQLAlchemy type"
with pytest.raises(ValueError, match=msg):
df.to_sql("error", self.conn, dtype={"B": str})
# GH9083
df.to_sql("dtype_test3", self.conn, dtype={"B": sqlalchemy.String(10)})
meta.reflect()
sqltype = meta.tables["dtype_test3"].columns["B"].type
assert isinstance(sqltype, sqlalchemy.String)
assert sqltype.length == 10
# single dtype
df.to_sql("single_dtype_test", self.conn, dtype=sqlalchemy.TEXT)
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
sqltypea = meta.tables["single_dtype_test"].columns["A"].type
sqltypeb = meta.tables["single_dtype_test"].columns["B"].type
assert isinstance(sqltypea, sqlalchemy.TEXT)
assert isinstance(sqltypeb, sqlalchemy.TEXT)
def test_notna_dtype(self):
cols = {
"Bool": Series([True, None]),
"Date": Series([datetime(2012, 5, 1), None]),
"Int": Series([1, None], dtype="object"),
"Float": Series([1.1, None]),
}
df = DataFrame(cols)
tbl = "notna_dtype_test"
df.to_sql(tbl, self.conn)
returned_df = sql.read_sql_table(tbl, self.conn) # noqa
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
if self.flavor == "mysql":
my_type = sqltypes.Integer
else:
my_type = sqltypes.Boolean
col_dict = meta.tables[tbl].columns
assert isinstance(col_dict["Bool"].type, my_type)
assert isinstance(col_dict["Date"].type, sqltypes.DateTime)
assert isinstance(col_dict["Int"].type, sqltypes.Integer)
assert isinstance(col_dict["Float"].type, sqltypes.Float)
def test_double_precision(self):
V = 1.23456789101112131415
df = DataFrame(
{
"f32": Series([V], dtype="float32"),
"f64": Series([V], dtype="float64"),
"f64_as_f32": Series([V], dtype="float64"),
"i32": Series([5], dtype="int32"),
"i64": Series([5], dtype="int64"),
}
)
df.to_sql(
"test_dtypes",
self.conn,
index=False,
if_exists="replace",
dtype={"f64_as_f32": sqlalchemy.Float(precision=23)},
)
res = sql.read_sql_table("test_dtypes", self.conn)
# check precision of float64
assert np.round(df["f64"].iloc[0], 14) == np.round(res["f64"].iloc[0], 14)
# check sql types
meta = sqlalchemy.schema.MetaData(bind=self.conn)
meta.reflect()
col_dict = meta.tables["test_dtypes"].columns
assert str(col_dict["f32"].type) == str(col_dict["f64_as_f32"].type)
assert isinstance(col_dict["f32"].type, sqltypes.Float)
assert isinstance(col_dict["f64"].type, sqltypes.Float)
assert isinstance(col_dict["i32"].type, sqltypes.Integer)
assert isinstance(col_dict["i64"].type, sqltypes.BigInteger)
def test_connectable_issue_example(self):
# This tests the example raised in issue
# https://github.com/pandas-dev/pandas/issues/10104
def foo(connection):
query = "SELECT test_foo_data FROM test_foo_data"
return sql.read_sql_query(query, con=connection)
def bar(connection, data):
data.to_sql(name="test_foo_data", con=connection, if_exists="append")
def main(connectable):
with connectable.connect() as conn:
with conn.begin():
foo_data = conn.run_callable(foo)
conn.run_callable(bar, foo_data)
DataFrame({"test_foo_data": [0, 1, 2]}).to_sql("test_foo_data", self.conn)
main(self.conn)
def test_temporary_table(self):
test_data = "Hello, World!"
expected = DataFrame({"spam": [test_data]})
Base = declarative.declarative_base()
class Temporary(Base):
__tablename__ = "temp_test"
__table_args__ = {"prefixes": ["TEMPORARY"]}
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
spam = sqlalchemy.Column(sqlalchemy.Unicode(30), nullable=False)
Session = sa_session.sessionmaker(bind=self.conn)
session = Session()
with session.transaction:
conn = session.connection()
Temporary.__table__.create(conn)
session.add(Temporary(spam=test_data))
session.flush()
df = sql.read_sql_query(sql=sqlalchemy.select([Temporary.spam]), con=conn)
tm.assert_frame_equal(df, expected)
class _TestSQLAlchemyConn(_EngineToConnMixin, _TestSQLAlchemy):
def test_transactions(self):
pytest.skip("Nested transactions rollbacks don't work with Pandas")
class _TestSQLiteAlchemy:
"""
Test the sqlalchemy backend against an in-memory sqlite database.
"""
flavor = "sqlite"
@classmethod
def connect(cls):
return sqlalchemy.create_engine("sqlite:///:memory:")
@classmethod
def setup_driver(cls):
# sqlite3 is built-in
cls.driver = None
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
assert issubclass(df.FloatCol.dtype.type, np.floating)
assert issubclass(df.IntCol.dtype.type, np.integer)
# sqlite has no boolean type, so integer type is returned
assert issubclass(df.BoolCol.dtype.type, np.integer)
# Int column with NA values stays as float
assert issubclass(df.IntColWithNull.dtype.type, np.floating)
# Non-native Bool column with NA values stays as float
assert issubclass(df.BoolColWithNull.dtype.type, np.floating)
def test_default_date_load(self):
df = sql.read_sql_table("types_test_data", self.conn)
# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
assert not issubclass(df.DateCol.dtype.type, np.datetime64)
def test_bigint_warning(self):
# test no warning for BIGINT (to support int64) is raised (GH7433)
df = DataFrame({"a": [1, 2]}, dtype="int64")
df.to_sql("test_bigintwarning", self.conn, index=False)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
sql.read_sql_table("test_bigintwarning", self.conn)
assert len(w) == 0
class _TestMySQLAlchemy:
"""
Test the sqlalchemy backend against an MySQL database.
"""
flavor = "mysql"
@classmethod
def connect(cls):
url = "mysql+{driver}://root@localhost/pandas_nosetest"
return sqlalchemy.create_engine(
url.format(driver=cls.driver), connect_args=cls.connect_args
)
@classmethod
def setup_driver(cls):
pymysql = pytest.importorskip("pymysql")
cls.driver = "pymysql"
cls.connect_args = {"client_flag": pymysql.constants.CLIENT.MULTI_STATEMENTS}
def test_default_type_conversion(self):
df = sql.read_sql_table("types_test_data", self.conn)
assert issubclass(df.FloatCol.dtype.type, np.floating)
assert issubclass(df.IntCol.dtype.type, np.integer)
# MySQL has no real BOOL type (it's an alias for TINYINT)
assert issubclass(df.BoolCol.dtype.type, np.integer)
# Int column with NA values stays as float
assert issubclass(df.IntColWithNull.dtype.type, np.floating)
# Bool column with NA = int column with NA values => becomes float
assert issubclass(df.BoolColWithNull.dtype.type, np.floating)
def test_read_procedure(self):
import pymysql
# see GH7324. Although it is more an api test, it is added to the
# mysql tests as sqlite does not have stored procedures
df = DataFrame({"a": [1, 2, 3], "b": [0.1, 0.2, 0.3]})
df.to_sql("test_procedure", self.conn, index=False)
proc = """DROP PROCEDURE IF EXISTS get_testdb;
CREATE PROCEDURE get_testdb ()
BEGIN
SELECT * FROM test_procedure;
END"""
connection = self.conn.connect()
trans = connection.begin()
try:
r1 = connection.execute(proc) # noqa
trans.commit()
except pymysql.Error:
trans.rollback()
raise
res1 = sql.read_sql_query("CALL get_testdb();", self.conn)
tm.assert_frame_equal(df, res1)
# test delegation to read_sql_query
res2 = sql.read_sql("CALL get_testdb();", self.conn)
tm.assert_frame_equal(df, res2)
class _TestPostgreSQLAlchemy:
"""
Test the sqlalchemy backend against an PostgreSQL database.
"""
flavor = "postgresql"
@classmethod
def connect(cls):
url = "postgresql+{driver}://postgres@localhost/pandas_nosetest"
return sqlalchemy.create_engine(url.format(driver=cls.driver))
@classmethod
def setup_driver(cls):
pytest.importorskip("psycopg2")
cls.driver = "psycopg2"
def test_schema_support(self):
# only test this for postgresql (schema's not supported in
# mysql/sqlite)
df = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]})
# create a schema
self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;")
self.conn.execute("CREATE SCHEMA other;")
# write dataframe to different schema's
df.to_sql("test_schema_public", self.conn, index=False)
df.to_sql(
"test_schema_public_explicit", self.conn, index=False, schema="public"
)
df.to_sql("test_schema_other", self.conn, index=False, schema="other")
# read dataframes back in
res1 = sql.read_sql_table("test_schema_public", self.conn)
tm.assert_frame_equal(df, res1)
res2 = sql.read_sql_table("test_schema_public_explicit", self.conn)
tm.assert_frame_equal(df, res2)
res3 = sql.read_sql_table(
"test_schema_public_explicit", self.conn, schema="public"
)
tm.assert_frame_equal(df, res3)
res4 = sql.read_sql_table("test_schema_other", self.conn, schema="other")
tm.assert_frame_equal(df, res4)
msg = "Table test_schema_other not found"
with pytest.raises(ValueError, match=msg):
sql.read_sql_table("test_schema_other", self.conn, schema="public")
# different if_exists options
# create a schema
self.conn.execute("DROP SCHEMA IF EXISTS other CASCADE;")
self.conn.execute("CREATE SCHEMA other;")
# write dataframe with different if_exists options
df.to_sql("test_schema_other", self.conn, schema="other", index=False)
df.to_sql(
"test_schema_other",
self.conn,
schema="other",
index=False,
if_exists="replace",
)
df.to_sql(
"test_schema_other",
self.conn,
schema="other",
index=False,
if_exists="append",
)
res = sql.read_sql_table("test_schema_other", self.conn, schema="other")
tm.assert_frame_equal(concat([df, df], ignore_index=True), res)
# specifying schema in user-provided meta
# The schema won't be applied on another Connection
# because of transactional schemas
if isinstance(self.conn, sqlalchemy.engine.Engine):
engine2 = self.connect()
meta = sqlalchemy.MetaData(engine2, schema="other")
pdsql = sql.SQLDatabase(engine2, meta=meta)
pdsql.to_sql(df, "test_schema_other2", index=False)
pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="replace")
pdsql.to_sql(df, "test_schema_other2", index=False, if_exists="append")
res1 = sql.read_sql_table("test_schema_other2", self.conn, schema="other")
res2 = pdsql.read_table("test_schema_other2")
tm.assert_frame_equal(res1, res2)
def test_copy_from_callable_insertion_method(self):
# GH 8953
# Example in io.rst found under _io.sql.method
# not available in sqlite, mysql
def psql_insert_copy(table, conn, keys, data_iter):
# gets a DBAPI connection that can provide a cursor
dbapi_conn = conn.connection
with dbapi_conn.cursor() as cur:
s_buf = StringIO()
writer = csv.writer(s_buf)
writer.writerows(data_iter)
s_buf.seek(0)
columns = ", ".join(f'"{k}"' for k in keys)
if table.schema:
table_name = f"{table.schema}.{table.name}"
else:
table_name = table.name
sql_query = f"COPY {table_name} ({columns}) FROM STDIN WITH CSV"
cur.copy_expert(sql=sql_query, file=s_buf)
expected = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]})
expected.to_sql(
"test_copy_insert", self.conn, index=False, method=psql_insert_copy
)
result = sql.read_sql_table("test_copy_insert", self.conn)
tm.assert_frame_equal(result, expected)
@pytest.mark.single
@pytest.mark.db
class TestMySQLAlchemy(_TestMySQLAlchemy, _TestSQLAlchemy):
pass
@pytest.mark.single
@pytest.mark.db
class TestMySQLAlchemyConn(_TestMySQLAlchemy, _TestSQLAlchemyConn):
pass
@pytest.mark.single
@pytest.mark.db
class TestPostgreSQLAlchemy(_TestPostgreSQLAlchemy, _TestSQLAlchemy):
pass
@pytest.mark.single
@pytest.mark.db
class TestPostgreSQLAlchemyConn(_TestPostgreSQLAlchemy, _TestSQLAlchemyConn):
pass
@pytest.mark.single
class TestSQLiteAlchemy(_TestSQLiteAlchemy, _TestSQLAlchemy):
pass
@pytest.mark.single
class TestSQLiteAlchemyConn(_TestSQLiteAlchemy, _TestSQLAlchemyConn):
pass
# -----------------------------------------------------------------------------
# -- Test Sqlite / MySQL fallback
@pytest.mark.single
class TestSQLiteFallback(SQLiteMixIn, PandasSQLTest):
"""
Test the fallback mode against an in-memory sqlite database.
"""
flavor = "sqlite"
@classmethod
def connect(cls):
return sqlite3.connect(":memory:")
def setup_connect(self):
self.conn = self.connect()
def load_test_data_and_sql(self):
self.pandasSQL = sql.SQLiteDatabase(self.conn)
self._load_test1_data()
@pytest.fixture(autouse=True)
def setup_method(self, load_iris_data):
self.load_test_data_and_sql()
def test_read_sql(self):
self._read_sql_iris()
def test_read_sql_parameter(self):
self._read_sql_iris_parameter()
def test_read_sql_named_parameter(self):
self._read_sql_iris_named_parameter()
def test_to_sql(self):
self._to_sql()
def test_to_sql_empty(self):
self._to_sql_empty()
def test_to_sql_fail(self):
self._to_sql_fail()
def test_to_sql_replace(self):
self._to_sql_replace()
def test_to_sql_append(self):
self._to_sql_append()
def test_to_sql_method_multi(self):
# GH 29921
self._to_sql(method="multi")
def test_create_and_drop_table(self):
temp_frame = DataFrame(
{"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}
)
self.pandasSQL.to_sql(temp_frame, "drop_test_frame")
assert self.pandasSQL.has_table("drop_test_frame")
self.pandasSQL.drop_table("drop_test_frame")
assert not self.pandasSQL.has_table("drop_test_frame")
def test_roundtrip(self):
self._roundtrip()
def test_execute_sql(self):
self._execute_sql()
def test_datetime_date(self):
# test support for datetime.date
df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"])
df.to_sql("test_date", self.conn, index=False)
res = read_sql_query("SELECT * FROM test_date", self.conn)
if self.flavor == "sqlite":
# comes back as strings
tm.assert_frame_equal(res, df.astype(str))
elif self.flavor == "mysql":
tm.assert_frame_equal(res, df)
def test_datetime_time(self):
# test support for datetime.time, GH #8341
df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"])
df.to_sql("test_time", self.conn, index=False)
res = read_sql_query("SELECT * FROM test_time", self.conn)
if self.flavor == "sqlite":
# comes back as strings
expected = df.applymap(lambda _: _.strftime("%H:%M:%S.%f"))
tm.assert_frame_equal(res, expected)
def _get_index_columns(self, tbl_name):
ixs = sql.read_sql_query(
"SELECT * FROM sqlite_master WHERE type = 'index' "
+ f"AND tbl_name = '{tbl_name}'",
self.conn,
)
ix_cols = []
for ix_name in ixs.name:
ix_info = sql.read_sql_query(f"PRAGMA index_info({ix_name})", self.conn)
ix_cols.append(ix_info.name.tolist())
return ix_cols
def test_to_sql_save_index(self):
self._to_sql_save_index()
def test_transactions(self):
self._transaction_test()
def _get_sqlite_column_type(self, table, column):
recs = self.conn.execute(f"PRAGMA table_info({table})")
for cid, name, ctype, not_null, default, pk in recs:
if name == column:
return ctype
raise ValueError(f"Table {table}, column {column} not found")
def test_dtype(self):
if self.flavor == "mysql":
pytest.skip("Not applicable to MySQL legacy")
cols = ["A", "B"]
data = [(0.8, True), (0.9, None)]
df = DataFrame(data, columns=cols)
df.to_sql("dtype_test", self.conn)
df.to_sql("dtype_test2", self.conn, dtype={"B": "STRING"})
# sqlite stores Boolean values as INTEGER
assert self._get_sqlite_column_type("dtype_test", "B") == "INTEGER"
assert self._get_sqlite_column_type("dtype_test2", "B") == "STRING"
msg = r"B \(<class 'bool'>\) not a string"
with pytest.raises(ValueError, match=msg):
df.to_sql("error", self.conn, dtype={"B": bool})
# single dtype
df.to_sql("single_dtype_test", self.conn, dtype="STRING")
assert self._get_sqlite_column_type("single_dtype_test", "A") == "STRING"
assert self._get_sqlite_column_type("single_dtype_test", "B") == "STRING"
def test_notna_dtype(self):
if self.flavor == "mysql":
pytest.skip("Not applicable to MySQL legacy")
cols = {
"Bool": Series([True, None]),
"Date": Series([datetime(2012, 5, 1), None]),
"Int": Series([1, None], dtype="object"),
"Float": Series([1.1, None]),
}
df = DataFrame(cols)
tbl = "notna_dtype_test"
df.to_sql(tbl, self.conn)
assert self._get_sqlite_column_type(tbl, "Bool") == "INTEGER"
assert self._get_sqlite_column_type(tbl, "Date") == "TIMESTAMP"
assert self._get_sqlite_column_type(tbl, "Int") == "INTEGER"
assert self._get_sqlite_column_type(tbl, "Float") == "REAL"
def test_illegal_names(self):
# For sqlite, these should work fine
df = DataFrame([[1, 2], [3, 4]], columns=["a", "b"])
msg = "Empty table or column name specified"
with pytest.raises(ValueError, match=msg):
df.to_sql("", self.conn)
for ndx, weird_name in enumerate(
[
"test_weird_name]",
"test_weird_name[",
"test_weird_name`",
'test_weird_name"',
"test_weird_name'",
"_b.test_weird_name_01-30",
'"_b.test_weird_name_01-30"',
"99beginswithnumber",
"12345",
"\xe9",
]
):
df.to_sql(weird_name, self.conn)
sql.table_exists(weird_name, self.conn)
df2 = DataFrame([[1, 2], [3, 4]], columns=["a", weird_name])
c_tbl = f"test_weird_col_name{ndx:d}"
df2.to_sql(c_tbl, self.conn)
sql.table_exists(c_tbl, self.conn)
# -----------------------------------------------------------------------------
# -- Old tests from 0.13.1 (before refactor using sqlalchemy)
def date_format(dt):
"""Returns date in YYYYMMDD format."""
return dt.strftime("%Y%m%d")
_formatters = {
datetime: "'{}'".format,
str: "'{}'".format,
np.str_: "'{}'".format,
bytes: "'{}'".format,
float: "{:.8f}".format,
int: "{:d}".format,
type(None): lambda x: "NULL",
np.float64: "{:.10f}".format,
bool: "'{!s}'".format,
}
def format_query(sql, *args):
"""
"""
processed_args = []
for arg in args:
if isinstance(arg, float) and isna(arg):
arg = None
formatter = _formatters[type(arg)]
processed_args.append(formatter(arg))
return sql % tuple(processed_args)
def tquery(query, con=None, cur=None):
"""Replace removed sql.tquery function"""
res = sql.execute(query, con=con, cur=cur).fetchall()
if res is None:
return None
else:
return list(res)
@pytest.mark.single
class TestXSQLite(SQLiteMixIn):
@pytest.fixture(autouse=True)
def setup_method(self, request, datapath):
self.method = request.function
self.conn = sqlite3.connect(":memory:")
# In some test cases we may close db connection
# Re-open conn here so we can perform cleanup in teardown
yield
self.method = request.function
self.conn = sqlite3.connect(":memory:")
def test_basic(self):
frame = tm.makeTimeDataFrame()
self._check_roundtrip(frame)
def test_write_row_by_row(self):
frame = tm.makeTimeDataFrame()
frame.iloc[0, 0] = np.nan
create_sql = sql.get_schema(frame, "test")
cur = self.conn.cursor()
cur.execute(create_sql)
cur = self.conn.cursor()
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
for idx, row in frame.iterrows():
fmt_sql = format_query(ins, *row)
tquery(fmt_sql, cur=cur)
self.conn.commit()
result = sql.read_sql("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame, check_less_precise=True)
def test_execute(self):
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, "test")
cur = self.conn.cursor()
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (?, ?, ?, ?)"
row = frame.iloc[0]
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()
result = sql.read_sql("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])
def test_schema(self):
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, "test")
lines = create_sql.splitlines()
for l in lines:
tokens = l.split(" ")
if len(tokens) == 2 and tokens[0] == "A":
assert tokens[1] == "DATETIME"
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, "test", keys=["A", "B"])
lines = create_sql.splitlines()
assert 'PRIMARY KEY ("A", "B")' in create_sql
cur = self.conn.cursor()
cur.execute(create_sql)
def test_execute_fail(self):
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a, b)
);
"""
cur = self.conn.cursor()
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn)
with pytest.raises(Exception):
sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn)
def test_execute_closed_connection(self):
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a, b)
);
"""
cur = self.conn.cursor()
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
self.conn.close()
with pytest.raises(Exception):
tquery("select * from test", con=self.conn)
def test_na_roundtrip(self):
pass
def _check_roundtrip(self, frame):
sql.to_sql(frame, name="test_table", con=self.conn, index=False)
result = sql.read_sql("select * from test_table", self.conn)
# HACK! Change this once indexes are handled properly.
result.index = frame.index
expected = frame
tm.assert_frame_equal(result, expected)
frame["txt"] = ["a"] * len(frame)
frame2 = frame.copy()
new_idx = Index(np.arange(len(frame2))) + 10
frame2["Idx"] = new_idx.copy()
sql.to_sql(frame2, name="test_table2", con=self.conn, index=False)
result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx")
expected = frame.copy()
expected.index = new_idx
expected.index.name = "Idx"
tm.assert_frame_equal(expected, result)
def test_keyword_as_column_names(self):
df = DataFrame({"From": np.ones(5)})
sql.to_sql(df, con=self.conn, name="testkeywords", index=False)
def test_onecolumn_of_integer(self):
# GH 3628
# a column_of_integers dataframe should transfer well to sql
mono_df = DataFrame([1, 2], columns=["c0"])
sql.to_sql(mono_df, con=self.conn, name="mono_df", index=False)
# computing the sum via sql
con_x = self.conn
the_sum = sum(my_c0[0] for my_c0 in con_x.execute("select * from mono_df"))
# it should not fail, and gives 3 ( Issue #3628 )
assert the_sum == 3
result = sql.read_sql("select * from mono_df", con_x)
tm.assert_frame_equal(result, mono_df)
def test_if_exists(self):
df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]})
df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]})
table_name = "table_if_exists"
sql_select = f"SELECT * FROM {table_name}"
def clean_up(test_table_to_drop):
"""
Drops tables created from individual tests
so no dependencies arise from sequential tests
"""
self.drop_table(test_table_to_drop)
msg = "'notvalidvalue' is not valid for if_exists"
with pytest.raises(ValueError, match=msg):
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="notvalidvalue",
)
clean_up(table_name)
# test if_exists='fail'
sql.to_sql(
frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail"
)
msg = "Table 'table_if_exists' already exists"
with pytest.raises(ValueError, match=msg):
sql.to_sql(
frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail"
)
# test if_exists='replace'
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="replace",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")]
sql.to_sql(
frame=df_if_exists_2,
con=self.conn,
name=table_name,
if_exists="replace",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")]
clean_up(table_name)
# test if_exists='append'
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="fail",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")]
sql.to_sql(
frame=df_if_exists_2,
con=self.conn,
name=table_name,
if_exists="append",
index=False,
)
assert tquery(sql_select, con=self.conn) == [
(1, "A"),
(2, "B"),
(3, "C"),
(4, "D"),
(5, "E"),
]
clean_up(table_name)
@pytest.mark.single
@pytest.mark.db
@pytest.mark.skip(
reason="gh-13611: there is no support for MySQL if SQLAlchemy is not installed"
)
class TestXMySQL(MySQLMixIn):
@pytest.fixture(autouse=True, scope="class")
def setup_class(cls):
pymysql = pytest.importorskip("pymysql")
pymysql.connect(host="localhost", user="root", passwd="", db="pandas_nosetest")
try:
pymysql.connect(read_default_group="pandas")
except pymysql.ProgrammingError as err:
raise RuntimeError(
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf."
) from err
except pymysql.Error as err:
raise RuntimeError(
"Cannot connect to database. "
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf."
) from err
@pytest.fixture(autouse=True)
def setup_method(self, request, datapath):
pymysql = pytest.importorskip("pymysql")
pymysql.connect(host="localhost", user="root", passwd="", db="pandas_nosetest")
try:
pymysql.connect(read_default_group="pandas")
except pymysql.ProgrammingError as err:
raise RuntimeError(
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf."
) from err
except pymysql.Error as err:
raise RuntimeError(
"Cannot connect to database. "
"Create a group of connection parameters under the heading "
"[pandas] in your system's mysql default file, "
"typically located at ~/.my.cnf or /etc/.my.cnf."
) from err
self.method = request.function
def test_basic(self):
frame = tm.makeTimeDataFrame()
self._check_roundtrip(frame)
def test_write_row_by_row(self):
frame = tm.makeTimeDataFrame()
frame.iloc[0, 0] = np.nan
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, "test")
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
for idx, row in frame.iterrows():
fmt_sql = format_query(ins, *row)
tquery(fmt_sql, cur=cur)
self.conn.commit()
result = sql.read_sql("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame, check_less_precise=True)
# GH#32571 result comes back rounded to 6 digits in some builds;
# no obvious pattern
def test_chunksize_read_type(self):
frame = tm.makeTimeDataFrame()
frame.index.name = "index"
drop_sql = "DROP TABLE IF EXISTS test"
cur = self.conn.cursor()
cur.execute(drop_sql)
sql.to_sql(frame, name="test", con=self.conn)
query = "select * from test"
chunksize = 5
chunk_gen = pd.read_sql_query(
sql=query, con=self.conn, chunksize=chunksize, index_col="index"
)
chunk_df = next(chunk_gen)
tm.assert_frame_equal(frame[:chunksize], chunk_df)
def test_execute(self):
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, "test")
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
cur.execute(create_sql)
ins = "INSERT INTO test VALUES (%s, %s, %s, %s)"
row = frame.iloc[0].values.tolist()
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()
result = sql.read_sql("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])
def test_schema(self):
frame = tm.makeTimeDataFrame()
create_sql = sql.get_schema(frame, "test")
lines = create_sql.splitlines()
for l in lines:
tokens = l.split(" ")
if len(tokens) == 2 and tokens[0] == "A":
assert tokens[1] == "DATETIME"
frame = tm.makeTimeDataFrame()
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = sql.get_schema(frame, "test", keys=["A", "B"])
lines = create_sql.splitlines()
assert "PRIMARY KEY (`A`, `B`)" in create_sql
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
def test_execute_fail(self):
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a(5), b(5))
);
"""
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn)
with pytest.raises(Exception):
sql.execute('INSERT INTO test VALUES("foo", "bar", 7)', self.conn)
def test_execute_closed_connection(self, request, datapath):
drop_sql = "DROP TABLE IF EXISTS test"
create_sql = """
CREATE TABLE test
(
a TEXT,
b TEXT,
c REAL,
PRIMARY KEY (a(5), b(5))
);
"""
cur = self.conn.cursor()
cur.execute(drop_sql)
cur.execute(create_sql)
sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn)
self.conn.close()
with pytest.raises(Exception):
tquery("select * from test", con=self.conn)
# Initialize connection again (needed for tearDown)
self.setup_method(request, datapath)
def test_na_roundtrip(self):
pass
def _check_roundtrip(self, frame):
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.to_sql(frame, name="test_table", con=self.conn, index=False)
result = sql.read_sql("select * from test_table", self.conn)
# HACK! Change this once indexes are handled properly.
result.index = frame.index
result.index.name = frame.index.name
expected = frame
tm.assert_frame_equal(result, expected)
frame["txt"] = ["a"] * len(frame)
frame2 = frame.copy()
index = Index(np.arange(len(frame2))) + 10
frame2["Idx"] = index
drop_sql = "DROP TABLE IF EXISTS test_table2"
cur = self.conn.cursor()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.to_sql(frame2, name="test_table2", con=self.conn, index=False)
result = sql.read_sql("select * from test_table2", self.conn, index_col="Idx")
expected = frame.copy()
# HACK! Change this once indexes are handled properly.
expected.index = index
expected.index.names = result.index.names
tm.assert_frame_equal(expected, result)
def test_keyword_as_column_names(self):
df = DataFrame({"From": np.ones(5)})
sql.to_sql(
df, con=self.conn, name="testkeywords", if_exists="replace", index=False
)
def test_if_exists(self):
df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]})
df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]})
table_name = "table_if_exists"
sql_select = f"SELECT * FROM {table_name}"
def clean_up(test_table_to_drop):
"""
Drops tables created from individual tests
so no dependencies arise from sequential tests
"""
self.drop_table(test_table_to_drop)
# test if invalid value for if_exists raises appropriate error
with pytest.raises(ValueError, match="<insert message here>"):
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="notvalidvalue",
)
clean_up(table_name)
# test if_exists='fail'
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="fail",
index=False,
)
with pytest.raises(ValueError, match="<insert message here>"):
sql.to_sql(
frame=df_if_exists_1, con=self.conn, name=table_name, if_exists="fail"
)
# test if_exists='replace'
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="replace",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")]
sql.to_sql(
frame=df_if_exists_2,
con=self.conn,
name=table_name,
if_exists="replace",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(3, "C"), (4, "D"), (5, "E")]
clean_up(table_name)
# test if_exists='append'
sql.to_sql(
frame=df_if_exists_1,
con=self.conn,
name=table_name,
if_exists="fail",
index=False,
)
assert tquery(sql_select, con=self.conn) == [(1, "A"), (2, "B")]
sql.to_sql(
frame=df_if_exists_2,
con=self.conn,
name=table_name,
if_exists="append",
index=False,
)
assert tquery(sql_select, con=self.conn) == [
(1, "A"),
(2, "B"),
(3, "C"),
(4, "D"),
(5, "E"),
]
clean_up(table_name)
|
export class Car {
brand;
color;
roofColor;
options1;
options2;
/**
* @param {string} brand
* @param {number} color
* @param {number} roofColor
* @param {Array<number>} options1
* @param {Array<number>} options2
*/
constructor(brand, color, roofColor, options1, options2) {
this.brand = brand;
this.color = color;
this.roofColor = roofColor;
this.options1 = options1;
this.options2 = options2;
}
}
export class VolvoCar extends Car {
volvoWheels;
/**
* @param {number} volvoWheels
*/
constructor(volvoWheels) {
super();
this.volvoWheels = volvoWheels;
}
}
export class FordCar extends Car {
fordRoof;
/**
* @param {string} fordRoof
*/
constructor(fordRoof) {
super();
this.fordRoof = fordRoof;
}
}
export const Color = {
Black: 10,
Red: 100,
Blue: 101,
}
export class PolestarCar extends VolvoCar {
polestarEngine;
/**
* @param {string} polestarEngine
*/
constructor(polestarEngine) {
super();
this.polestarEngine = polestarEngine;
}
}
export class Api {
url= "";
init= {};
revive = (key, value) => {
if (value !== null && value !== undefined) {
if(/\d{4}-(?:0[1-9]|1[0-2])-(?:0[1-9]|[1-2]\d|3[0-1])T(?:[0-1]\d|2[0-3]):[0-5]\d:[0-5]\d(?:\.\d+|)(?:Z|(?:\+|-)(?:\d{2}):?(?:\d{2}))/.exec(value)) {
return new Date(value);
}
}
return value;
};
post = (url, data) => {
return new Promise( (resolve, reject) => {
let formData = new FormData();
Object.keys(data).forEach((key) => {
formData.append(key, data[key])
});
fetch(url, {
...this.init,
method: 'post',
body: formData,
}).then((resp) => {
if(resp.ok) {
resp.text().then((txt)=> {
const output = JSON.parse(txt, this.revive);
resolve(output);
});
}
else {
reject({status: resp.status, statusText: resp.statusText, reason: null});
}
}).catch((e) => {
reject({status: null, statusText: null, reason: '' + e});
})
});
};
get = (url, data) => {
return new Promise( (resolve, reject) => {
const u = new URL(url);
Object.keys(data).forEach((key) => {
u.searchParams.append(key, data[key])
});
fetch(u.toString(), {
...this.init,
method: 'get'
}).then((resp) => {
if(resp.ok) {
resp.text().then((txt)=> {
const output = JSON.parse(txt, this.revive);
resolve(output);
});
}
else {
reject({status: resp.status, statusText: resp.statusText, reason: null});
}
}).catch((e) => {
reject({status: null, statusText: null, reason: '' + e});
})
});
};
/**
* @param {Array<Car>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<Car>>}
*/
arrayOfCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/arrayOfCars`, data);
return this.post(`${this.url}/arrayOfCars`, data);
}
/**
* @param {Array<PolestarCar>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<PolestarCar>>}
*/
arrayOfPolestarCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/arrayOfPolestarCars`, data);
return this.post(`${this.url}/arrayOfPolestarCars`, data);
}
/**
* @param {Array<VolvoCar>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<VolvoCar>>}
*/
arrayOfVolvoCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/arrayOfVolvoCars`, data);
return this.post(`${this.url}/arrayOfVolvoCars`, data);
}
/**
* @param {Car} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Car>}
*/
car = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/car`, data);
return this.post(`${this.url}/car`, data);
}
/**
* @param {FordCar} input
* @param {string} method 'get' or 'post'
* @returns {Promise<FordCar>}
*/
fordCar = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/fordCar`, data);
return this.post(`${this.url}/fordCar`, data);
}
/**
* @param {Array<Car>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<Car>>}
*/
iEnumerableOfCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/iEnumerableOfCars`, data);
return this.post(`${this.url}/iEnumerableOfCars`, data);
}
/**
* @param {Array<PolestarCar>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<PolestarCar>>}
*/
iEnumerableOfPolestarCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/iEnumerableOfPolestarCars`, data);
return this.post(`${this.url}/iEnumerableOfPolestarCars`, data);
}
/**
* @param {Array<VolvoCar>} input
* @param {string} method 'get' or 'post'
* @returns {Promise<Array<VolvoCar>>}
*/
iEnumerableOfVolvoCars = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/iEnumerableOfVolvoCars`, data);
return this.post(`${this.url}/iEnumerableOfVolvoCars`, data);
}
/**
* @param {PolestarCar} input
* @param {string} method 'get' or 'post'
* @returns {Promise<PolestarCar>}
*/
polestarCar = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/polestarCar`, data);
return this.post(`${this.url}/polestarCar`, data);
}
/**
* @param {VolvoCar} input
* @param {string} method 'get' or 'post'
* @returns {Promise<VolvoCar>}
*/
volvoCar = (input, method = 'post') => {
const data = {};
data.input = JSON.stringify(input);
if(method === 'get') return this.get(`${this.url}/volvoCar`, data);
return this.post(`${this.url}/volvoCar`, data);
}
}
export const api = new Api();
export default api;
|
from numpy import around
def range_step(min_: int, max_: int, size: int) :
return (max_ - min_) / size
def round(iterable_, decimals: int):
return around(iterable_, decimals)
|
//
// NSArray+PrestoData.h
//
// Copyright (c) 2015 Daniel Hall
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#import <Foundation/Foundation.h>
/** A category containing all the methods used by Presto Data for searching, parsing, and modifying array elements */
@interface NSArray (PrestoData)
/** A replacement for the default [NSObject description] property / method. This property includes descriptions of values unique to PrestoData, like pd_innerValue and also prints the description of the elements in the correct order using the PrestoData orderedKeys property. */
@property (nonatomic, readonly) NSString *pd_description;
/** An equality checking method specifically to compare NSArrays that contain PrestoData dictionaries. Use instead of the normal [NSObject isEqual:] method
*
* @param array Another array that contains PrestoData dictionaries
* @return YES if the array contain the same elements, otherwise NO
*/
- (BOOL)pd_isEqualToArray:(NSArray *)array;
/** A copy method that produces deep copies specifically used to copy NSArrays that contain PrestoData dictionaries. Use instead of the normal [NSObject copy] method
*
* @return A deep copy of this array containing copies of its contents instead of pointers to the original contents
*/
- (instancetype)pd_copy;
/**---------------------------------------------------------------------------------------
* @name Creating an Array from JSON
* ---------------------------------------------------------------------------------------
*/
/** Returns an NSArray from UTF8-encoded JSON data using the default attribute name for inner values: "innerValue"
*
* @param jsonData An NSData instance that contains a UTF8-encoded JSON string
* @return An NSArray instance if the JSON represented an array, otherwise nil
*/
+ (instancetype)pd_arrayFromJSONData:(NSData *)jsonData;
/** Returns an NSArray from UTF8-encoded JSON data using the specified attribute name for inner values. In other words, if you will be converting JSON to XML later, and want a specific attribute to be used as the "inner value" of the XML element instead of an XML attribute, specify the name of that JSON attribute here.
*
* @param jsonData An NSData instance that contains a UTF8-encoded JSON string
* @param key The name of the JSON attribute which will signify that the value should be mapped to the inner value of each element dictionary in the resulting array, and in the event of conversion to XML.
* @return An NSArray instance if the JSON represented an array, otherwise nil
*/
+ (instancetype)pd_arrayFromJSONData:(NSData *)jsonData keyForInnerValue:(NSString *)key;
/**---------------------------------------------------------------------------------------
* @name Changing Element Attributes
* ---------------------------------------------------------------------------------------
*/
/** Sets the value for a specific attribute on all elements contained in this array. Returns the modified array so methods can be chained.
*
* @param value The new value to store. Valid types are NSString and NSNumber
* @param attribute The name of the attribute the value will be set for. If the attribute already exists, its current value will be overwritten. If an attribute with this name does not already exist, it will be created.
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_setValue:(id)value forAttribute:(NSString *)attribute;
/** Deletes the specified attribute from all elements in the array
*
* @param attribute The name of the attribute to delete
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_deleteAttribute:(NSString *)attribute;
/** Sets the inner value for all elements contained in this array. Returns the modified array so methods can be chained.
*
* Inner values do not generally need to be used when parsing dictionaries into and out of JSON strings. The inner value exists in PrestoData dictionaries for compatibility with XML, which may contain a value inside the element directly, rather than as part of a named attribute or a sub-element
*
* @param value The new value to store. Valid types are NSString and NSNumber.
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_setInnerValue:(id)value;
/**---------------------------------------------------------------------------------------
* @name Adding and Removing Elements
* ---------------------------------------------------------------------------------------
*/
/** Adds a child element to each existing element in the array, mapped to the specified element name
*
* @param element A mutable dictionary that will be added as a child element to each element in the array
* @param name The key that the child element will be mapped to inside each existing element in the array
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_addElement:(NSMutableDictionary *)element withName:(NSString *)name;
/** Removes any child element with a matching name from each element inside this array
*
* @param elementName The name of the element that should be removed
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_removeElementNamed:(NSString *)elementName;
/** Provides the same functionality as pd_removeElementNamed: but with an object reference instead of a name
*
* @param element A reference to the element that should be removed
* @return The modified NSArray that results from this operation
*/
- (instancetype)pd_removeElement:(NSMutableDictionary *)element;
/** Removes this array from the dictionary it is a child element of */
- (void)pd_removeFromParentDictionary;
/**---------------------------------------------------------------------------------------
* @name Filtering Elements
* ---------------------------------------------------------------------------------------
*/
/** Returns an array of all child or descendant elements inside this array which match the specified XPath query
*
* @param xPathString A string containing an XPath 1.0-style query. See documentation here: http://www.w3schools.com/xpath/xpath_syntax.asp
* @return The array of matching elements
*/
- (NSArray *)pd_filterWithXPath:(NSString *)xPathString;
/** Returns all child elements inside this array which have the specified name. Primarily used in XPath operations
*
* @param name The element name to search this array for
* @return The array of matching children
*/
- (NSArray *)pd_childrenNamed:(NSString *)name;
/** Returns all child and descendant elements inside this array which have the specified name. Primarily used in XPath operations
*
* @param name The element name to search this array and its elements for
* @return The array of matching descendants
*/
- (NSArray *)pd_descendantsNamed:(NSString *)name;
/**---------------------------------------------------------------------------------------
* @name Converting to JSON and XML
* ---------------------------------------------------------------------------------------
*/
/** Returns a JSON string representation of this array and the elements inside it, using the default key name that will be used for any inner values
*
* @return A JSON string representation of this array and the elements inside it
*/
- (NSString *)pd_jsonString;
/** Returns a JSON string representation of this array and the elements inside it, using the specified key name that will be used for any inner values
*
* @param keyForInnerValue The key name that will be used to map any inner values to JSON attributes.
* @return A JSON string representation of this array and the elements inside it
*/
- (NSString *)pd_jsonStringWithInnerValueKey:(NSString *)keyForInnerValue;
/** Returns an XML string representation of this array and the elements inside it
*
* @return An XML string representation of this array and the elements inside it
*/
- (NSString *)pd_xmlString;
@end
|
// 문제 설명
// 조이스틱으로 알파벳 이름을 완성하세요. 맨 처음엔 A로만 이루어져 있습니다.
// ex) 완성해야 하는 이름이 세 글자면 AAA, 네 글자면 AAAA
// 조이스틱을 각 방향으로 움직이면 아래와 같습니다.
// ▲ - 다음 알파벳
// ▼ - 이전 알파벳 (A에서 아래쪽으로 이동하면 Z로)
// ◀ - 커서를 왼쪽으로 이동 (첫 번째 위치에서 왼쪽으로 이동하면 마지막 문자에 커서)
// ▶ - 커서를 오른쪽으로 이동
// 예를 들어 아래의 방법으로 "JAZ"를 만들 수 있습니다.
// - 첫 번째 위치에서 조이스틱을 위로 9번 조작하여 J를 완성합니다.
// - 조이스틱을 왼쪽으로 1번 조작하여 커서를 마지막 문자 위치로 이동시킵니다.
// - 마지막 위치에서 조이스틱을 아래로 1번 조작하여 Z를 완성합니다.
// 따라서 11번 이동시켜 "JAZ"를 만들 수 있고, 이때가 최소 이동입니다.
// 만들고자 하는 이름 name이 매개변수로 주어질 때, 이름에 대해 조이스틱 조작 횟수의 최솟값을 return 하도록 solution 함수를 만드세요.
// 제한 사항
// name은 알파벳 대문자로만 이루어져 있습니다.
// name의 길이는 1 이상 20 이하입니다.
// 입출력 예
// name return
// "JEROEN" 56
// "JAN" 23
function solution(name) {
const joyPad = {};
"ABCDEFGHIJKLMN".split('')
.forEach((char, idx) => joyPad[char] = idx);
"OPQRSTUVWXYZ".split('')
.reverse()
.forEach((char, idx) => joyPad[char] = idx + 1);
const state = Array.from(name).map((char) => joyPad[char]);
let result = state.reduce((ac, c) => ac + c);
let flag = true;
let idx = 0;
while (state.length > 0) {
const current = flag ?
state.shift() :
state.pop();
if (current !== 0) {
if (state.length > 0) {
result++;
idx++;
}
} else {
let count = 1;
for (; ;) {
const pick = flag ? state[0] : state[state.length - 1];
if (pick !== 0) break;
flag ? state.shift() : state.pop();
count++;
}
if (state.length === 0 && idx > 0) {
break;
}
if (flag && count >= idx && idx !== 0) {
flag = false;
result += idx - 1;
idx += idx;
continue;
}
result += count;
idx += count;
}
}
return result;
}
|
#!/usr/bin/env python
#
# Pucktada Treeratpituk (https://pucktada.github.io/)
# License: MIT
# 2017-05-01
#
# A recurrent neural network model (LSTM) for thai word segmentation
import logging
import re
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as layers
import tensorflow.contrib.rnn as rnn
#from . import char_dictionary
def load_settings(sess):
model_settings = dict()
model_vars = dict()
graph = tf.get_default_graph()
#for v in sess.graph.get_operations():
# print('P:', v.name)
configs = ['cell_sizes', 'look_ahead', 'num_layers', 'input_classes', 'label_classes', 'learning_rate', 'l2_regularization', 'cell_type'] #, 'direction']
for c in configs:
name = 'prefix/%s:0' % c
model_settings[c] = sess.run(graph.get_tensor_by_name(name))
model_vars['inputs'] = graph.get_tensor_by_name('prefix/placeholder/inputs:0')
model_vars['fw_state'] = graph.get_tensor_by_name('prefix/placeholder/fw_state:0')
model_vars['seq_lengths'] = graph.get_tensor_by_name('prefix/placeholder/seq_lengths:0')
model_vars['keep_prob'] = graph.get_tensor_by_name('prefix/placeholder/keep_prob:0')
model_vars['probs'] = graph.get_tensor_by_name('prefix/probs:0')
return model_settings, model_vars
def load_graph(model_file):
""" loading necessary configuration of the network from the meta file &
the checkpoint file together with variables that are needed for the inferences
"""
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
with tf.gfile.GFile(model_file, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we can use again a convenient built-in function to import a graph_def into the
# current default Graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name='prefix',
op_dict=None,
producer_op_list=None)
return graph
def load_model2(sess, meta_file, checkpoint_file):
""" loading necessary configuration of the network from the meta file &
the checkpoint file together with variables that are needed for the inferences
"""
saver = tf.train.import_meta_graph(meta_file, clear_devices=True)
saver.restore(sess, checkpoint_file)
configs = tf.get_collection('configs')
pvars = tf.get_collection('placeholders')
model_settings = dict()
for c in configs:
name = c.name.split(':')[0]
model_settings[name] = sess.run(c)
model_vars = dict()
for p in pvars:
scope, name, _ = re.split('[:/]', p.name)
model_vars[name] = p
model_vars['probs'] = tf.get_collection('probs')[0]
return model_settings, model_vars
class CkModel:
""" cutkum model: LSTM recurrent neural network model """
def __init__(self, model_settings):
logging.info('...init WordSegmentor')
self.num_layers = model_settings["num_layers"]
self.cell_sizes = model_settings["cell_sizes"] # list of cell_size, same length as num_layers
self.total_cells = sum(self.cell_sizes)
self.cell_start = [sum(self.cell_sizes [:i]) for i in range(self.num_layers)]
# keep number of look_ahead (not used in the training, but so that people know how to use the model)
self.look_ahead = model_settings['look_ahead']
#self.num_unroll = model_settings["num_unroll"]
self.input_classes = model_settings['input_classes']
self.label_classes = model_settings['label_classes']
self.learning_rate = model_settings['learning_rate']
self.l2_regularization = model_settings['l2_regularization'] # 0.1
self.cell_type = model_settings['cell_type']
#self.direction = model_settings['direction']
#self.states = None
tf.add_to_collection('configs', tf.constant(self.cell_sizes, name="cell_sizes"))
tf.add_to_collection('configs', tf.constant(self.look_ahead, name="look_ahead"))
#tf.add_to_collection('configs', tf.constant(self.num_unroll, name="num_unroll"))
tf.add_to_collection('configs', tf.constant(self.num_layers, name="num_layers"))
tf.add_to_collection('configs', tf.constant(self.input_classes, name="input_classes"))
tf.add_to_collection('configs', tf.constant(self.label_classes, name="label_classes"))
tf.add_to_collection('configs', tf.constant(self.learning_rate, name="learning_rate"))
tf.add_to_collection('configs', tf.constant(self.l2_regularization, name="l2_regularization"))
tf.add_to_collection('configs', tf.constant(self.cell_type, name="cell_type"))
#tf.add_to_collection('configs', tf.constant(self.direction, name="direction"))
self.global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
self.increment_global_step_op = tf.assign(self.global_step, self.global_step+1)
def _create_placeholders(self):
logging.info('...create placeholder')
with tf.name_scope("placeholder"):
# (time, batch, in)
self.inputs = tf.placeholder(tf.float32, (None, None, self.input_classes), name="inputs")
# (time, batch, out)
self.outputs = tf.placeholder(tf.float32, (None, None, self.label_classes), name="outputs")
# [batch]
self.seq_lengths = tf.placeholder(tf.int32, [None], name="seq_lengths")
# LSTM - [2, None, sum(cell_sizes)]
# GRU, RNN - [1, None, sum(cell_sizes)]
if (self.cell_type == 'lstm'):
self.fw_state = tf.placeholder(tf.float32, [2, None, self.total_cells], name="fw_state")
else: # gru, rnn
self.fw_state = tf.placeholder(tf.float32, [1, None, self.total_cells], name="fw_state")
self.keep_prob = tf.placeholder(tf.float32, name="keep_prob")
tf.add_to_collection('placeholders', self.inputs)
tf.add_to_collection('placeholders', self.outputs)
tf.add_to_collection('placeholders', self.seq_lengths)
tf.add_to_collection('placeholders', self.keep_prob)
#
def init_fw_states(self, batch_size):
if (self.cell_type == 'lstm'):
return np.zeros(shape=[2, batch_size, self.total_cells])
else: # GRU, RNN
return np.zeros(shape=[1, batch_size, self.total_cells])
# state tuple to tensor
def flatten_fw_states(self, fw_state_tuple):
if (self.cell_type == 'lstm'):
# fw_state_tuple is tuple of LSTMStateTuple of lenghts 'num_layers'
# states = [2, batch_size, self.total_cells]
c_tensor = np.concatenate([fw_state_tuple[i].c for i in range(self.num_layers)], axis=1)
h_tensor = np.concatenate([fw_state_tuple[i].h for i in range(self.num_layers)], axis=1)
state = np.stack([c_tensor, h_tensor])
else: # GRU, RNN
# fw_state_tuple is tuple of ndarray of lenghts 'num_layers'
c_tensor = np.concatenate([fw_state_tuple[i] for i in range(self.num_layers)], axis=1)
state = np.expand_dims(c_tensor, axis=0)
return state #.eval()
# state tensor to tuple
def unstack_fw_states(self, fw_state):
if (self.cell_type == 'lstm'):
# states = [2, batch_size, self.total_cells]
fw_state_tuple = tuple(
[tf.contrib.rnn.LSTMStateTuple(
fw_state[0, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]],
fw_state[1, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]])
for i in range(self.num_layers)])
else: # GRU, RNN
# states = [1, batch_size, self.total_cells]
fw_state_tuple = tuple(
[fw_state[0, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]]
for i in range(self.num_layers)])
return fw_state_tuple
def _inference(self):
logging.info('...create inference')
fw_state_tuple = self.unstack_fw_states(self.fw_state)
fw_cells = list()
for i in range(0, self.num_layers):
if (self.cell_type == 'lstm'):
cell = rnn.LSTMCell(num_units=self.cell_sizes[i], state_is_tuple=True)
elif (self.cell_type == 'gru'):
# change to GRU
cell = rnn.GRUCell(num_units=self.cell_sizes[i])
else:
cell = rnn.BasicRNNCell(num_units=self.cell_sizes[i])
cell = rnn.DropoutWrapper(cell, output_keep_prob=self.keep_prob)
fw_cells.append(cell)
self.fw_cells = rnn.MultiRNNCell(fw_cells, state_is_tuple=True)
rnn_outputs, states = tf.nn.dynamic_rnn(
self.fw_cells,
self.inputs,
initial_state=fw_state_tuple,
sequence_length=self.seq_lengths,
dtype=tf.float32, time_major=True)
# project output from rnn output size to OUTPUT_SIZE. Sometimes it is worth adding
# an extra layer here.
self.projection = lambda x: layers.linear(x,
num_outputs=self.label_classes, activation_fn=tf.nn.sigmoid)
self.logits = tf.map_fn(self.projection, rnn_outputs, name="logits")
self.probs = tf.nn.softmax(self.logits, name="probs")
self.states = states
tf.add_to_collection('probs', self.probs)
def _create_loss(self):
logging.info('...create loss')
with tf.name_scope("loss"):
# shape=[Time * Batch, label_classes]
outputs_flat = tf.reshape(self.outputs, [-1, self.label_classes])
logits_flat = tf.reshape(self.logits, [-1, self.label_classes])
# calculate the losses shape=[Time * Batch]
# pre-tensorflow 1.5
#losses = tf.nn.softmax_cross_entropy_with_logits_v2(labels=outputs_flat, logits=logits_flat)
losses = tf.nn.softmax_cross_entropy_with_logits(labels=outputs_flat, logits=logits_flat)
# create mask [Time * Batch] where 0: padded, 1: not-padded
mask = outputs_flat[:,0]
mask = tf.abs(tf.subtract(mask, tf.ones_like(mask)))
# mask the losses
masked_losses = mask * losses
l2_reg = self.l2_regularization
l2 = l2_reg * sum(tf.nn.l2_loss(tf_var) for tf_var in tf.trainable_variables()
if not ("noreg" in tf_var.name or "Bias" in tf_var.name))
self.losses = masked_losses + l2
self.num_entries = tf.reduce_sum(mask)
self.mean_loss = tf.reduce_sum(masked_losses) / self.num_entries
# accuracy
correct_pred = tf.cast(tf.equal(tf.argmax(outputs_flat, 1), tf.argmax(logits_flat, 1)), tf.float32)
mask_correct_pred = mask * correct_pred
self.accuracy = tf.reduce_sum(mask_correct_pred) / self.num_entries
def _create_optimizer(self):
logging.info('...create optimizer')
with tf.name_scope("train"):
#self.optimizer = tf.train.AdamOptimizer(self.learning_rate).minimize(self.mean_loss, global_step=self.global_step)
max_gradient_norm = 1.0
params = tf.trainable_variables()
gradients = tf.gradients(self.mean_loss, params)
clipped_gradients, norm = tf.clip_by_global_norm(gradients, max_gradient_norm)
#self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate)\
# .apply_gradients(zip(clipped_gradients, params), global_step=self.global_step)
self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate)\
.apply_gradients(zip(clipped_gradients, params))
def _create_summary(self):
logging.info('...create summary')
tf.summary.scalar("mean_loss", self.mean_loss)
tf.summary.scalar("accuracy", self.accuracy)
self.summary_op = tf.summary.merge_all()
def build_graph(self):
self._create_placeholders()
self._inference()
self._create_loss()
self._create_optimizer()
self._create_summary()
if __name__ == '__main__':
print('create word segmentor model')
char_dict = CharDictionary()
# MODEL
model_settings = dict()
#model_settings["l2_regularisation"] = 0.0 # not usring right now
model_settings['num_unroll'] = 12
model_settings['num_layers'] = 3
model_settings['cell_size'] = 64
model_settings['input_classes'] = char_dict.num_char_classes() + 1
model_settings['label_classes'] = char_dict.num_label_classes() + 1
model_settings['learning_rate'] = 0.001 # Initial learning rate
model = CkModel(model_settings)
model.build_graph()
|
//Problem 5. Third bit
//Write a boolean expression for finding if the bit #3 (counting from 0) of a given integer.
//The bits are counted from right to left, starting from bit #0.
//The result of the expression should be either 1 or 0.
function getByte(number, position) {
thirdBit = (number >> position) & 1;
binaryNumber = number.toString(2);
return ('number: ' + number + '\n' + 'binary representation: ' + binaryNumber+ '\n' + '#3 bit: ' + thirdBit);
}
console.log(getByte(5, 3));
console.log(getByte(8, 3));
console.log(getByte(0, 3));
console.log(getByte(15, 3));
console.log(getByte(5343, 3));
console.log(getByte(62241, 3));
|
import ChainedSelectListField from './chained-select-list-field';
const createCarsField = props => {
return new ChainedSelectListField({
name: 'cars',
label: 'Cars',
required: true,
blankString: 'None',
// block: true,
// fullWidth: true,
options: [
{ value: 1, parentValue: null, label: 'Germany' },
{ value: 2, parentValue: null, label: 'USA' },
{ value: 3, parentValue: 1, label: 'BMW' },
{ value: 4, parentValue: 1, label: 'Mercedes' },
{ value: 5, parentValue: 2, label: 'Tesla' },
{ value: 6, parentValue: 3, label: 'i3' },
{ value: 7, parentValue: 3, label: 'i8' },
{ value: 8, parentValue: 4, label: 'S-Class' },
{ value: 9, parentValue: 5, label: 'Model S' },
{ value: 10, parentValue: 9, label: 'Red' },
{ value: 11, parentValue: 9, label: 'Blue' }
],
...props
});
};
it('should set and get', () => {
const cars = createCarsField();
cars.clearValue();
expect(cars.getValue()).toBe(null);
// USA - Telsa - Model S - Red
// Germany - BMW - i3
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
expect(cars.getValue()).toEqual([[2, 5, 9, 10], [1, 3, 6]]);
// Germany - BMW - i8
cars.setValue([[1, 3, 7]]);
expect(cars.getValue()).toEqual([[1, 3, 7]]);
cars.clearValue();
expect(cars.getValue()).toBe(null);
});
it('should remove fields when clearing', () => {
const cars = createCarsField();
cars.clearValue();
expect(cars._fields.length()).toEqual(1);
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
cars.clearValue();
expect(cars._fields.length()).toEqual(1);
});
it('should add field when option selected', () => {
const cars = createCarsField();
cars._getField(0).setValue([2]);
expect(cars._fields.length()).toEqual(2);
cars.clearValue();
// Simulate user selecting 1st option -- this exposed a bug at one point
cars
._getField(0)
._getField(0)
.setValue(2);
expect(cars._fields.length()).toEqual(2);
});
it('should not create more than max size fields', () => {
const cars = createCarsField({ maxSize: 2 });
cars._getField(0).setValue([2, 5, 9, 10]);
cars._getField(1).setValue([1, 3, 6]);
expect(cars._fields.length()).toEqual(2);
});
it('should not add a field when a field is deleted and not reached max size', () => {
const cars = createCarsField();
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
expect(cars._fields.length()).toEqual(3);
cars._getField(0).emit('delete');
expect(cars._fields.length()).toEqual(2);
});
it('should add a field when a field is deleted and reached max size', () => {
const cars = createCarsField({ maxSize: 2 });
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
expect(cars._fields.length()).toEqual(2);
cars._getField(0).emit('delete');
expect(cars._fields.length()).toEqual(2);
// TODO: also test when delete last item
});
it('should allow last field to be deleted if reached max size', () => {
const cars = createCarsField({ maxSize: 2 });
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
expect(cars._getField(1).isBlank()).toEqual(false);
cars.clearValue();
cars
._getField(0)
._getField(0)
.setValue(2); // select 1st option
cars
._getField(3)
._getField(0)
.setValue(1); // select 2nd option
expect(cars._getField(3).isBlank()).toEqual(false);
});
it('should clone', () => {
// Clone when no values and make sure a new field is created
const cars = createCarsField();
const clonedCars = cars.clone();
expect(clonedCars._fields.first()).not.toEqual(cars._fields.first());
// Make sure value is copied after the new fields have been created
const myCars = [[2, 5, 9, 10], [1, 3, 6]];
cars.setValue(myCars);
const clonedCars2 = cars.clone();
expect(cars.getValue()).toEqual(myCars);
expect(clonedCars2.getValue()).toEqual(myCars);
});
it('should not report error after valid set', () => {
const cars = createCarsField();
// USA - Telsa - Model S - Red
// Germany - BMW - i3
cars.setValue([[2, 5, 9, 10], [1, 3, 6]]);
cars.validate();
expect(cars.hasErr()).toEqual(false);
});
|
webpackJsonp([0xf22e95fad48],{836:function(e,t){e.exports={data:{post:{id:"/Users/Isaac/website/content/posts/2018-04-02--customize-personal-blog-starter/index.md absPath of file >>> MarkdownRemark",html:'<p>The <a href="/gatsby-starter-personal-blog/">starter</a> uses a theme object so base customization is really easy.</p>\n<p>Find the <code class="language-text">theme.js</code> file.</p>\n<div class="gatsby-highlight" data-language="text">\n <pre class="language-text"><code class="language-text">root\n ├── src\n │ ├── styles\n │ │ ├── colors.js\n │ │ ├── globals.js\n │ │ └── theme.js</code></pre>\n </div>\n<p>…</p>\n<h4>Front-end web development</h4>\n<p>To read the whole instruction article - <a href="https://dev.greglobinski.com/customize-personal-blog-starter/">How to customize the personalBlog starter’s appearance</a> - you have to move to <a href="https://dev.greglobinski.com">Front-end web development with Greg</a> website.</p>\n<p>…</p>\n<p>Vivamus vel justo in leo laoreet ullamcorper non vitae lorem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin bibendum ullamcorper rutrum.</p>\n<p>\n <a\n class="gatsby-resp-image-link"\n href="/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-699d0.jpg"\n style="display: block"\n target="_blank"\n rel="noopener"\n >\n \n <span\n class="gatsby-resp-image-wrapper"\n style="position: relative; display: block; ; max-width: 800px; margin-left: auto; margin-right: auto;"\n >\n <span\n class="gatsby-resp-image-background-image"\n style="padding-bottom: 46.08233731739708%; position: relative; bottom: 0; left: 0; background-image: url(\'data:image/jpeg;base64,/9j/2wBDABALDA4MChAODQ4SERATGCgaGBYWGDEjJR0oOjM9PDkzODdASFxOQERXRTc4UG1RV19iZ2hnPk1xeXBkeFxlZ2P/2wBDARESEhgVGC8aGi9jQjhCY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2P/wgARCAAJABQDASIAAhEBAxEB/8QAGAAAAwEBAAAAAAAAAAAAAAAAAAIEAQX/xAAVAQEBAAAAAAAAAAAAAAAAAAAAAf/aAAwDAQACEAMQAAAByOzlwwgf/8QAGxAAAgEFAAAAAAAAAAAAAAAAAQIxAxIiM0L/2gAIAQEAAQUCVLQXyM1tXTT/AP/EABQRAQAAAAAAAAAAAAAAAAAAABD/2gAIAQMBAT8BP//EABURAQEAAAAAAAAAAAAAAAAAAAEQ/9oACAECAQE/ASf/xAAYEAACAwAAAAAAAAAAAAAAAAAAASAhMf/aAAgBAQAGPwK2bH//xAAZEAACAwEAAAAAAAAAAAAAAAAAARAhMXH/2gAIAQEAAT8hS99GVdIuNx4g/9oADAMBAAIAAwAAABBrz//EABURAQEAAAAAAAAAAAAAAAAAAAEQ/9oACAEDAQE/EGf/xAAWEQEBAQAAAAAAAAAAAAAAAAAAETH/2gAIAQIBAT8Qwj//xAAaEAEAAgMBAAAAAAAAAAAAAAABACERMaGx/9oACAEBAAE/ECpIXoIWLNWxKCX252Hs5ybZ/9k=\'); background-size: cover; display: block;"\n >\n <img\n class="gatsby-resp-image-image"\n style="width: 100%; height: 100%; margin: 0; vertical-align: middle; position: absolute; top: 0; left: 0; box-shadow: inset 0px 0px 0px 400px transparent;"\n alt="unsplash.com"\n title=""\n src="/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-78f2b.jpg"\n srcset="/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-dce19.jpg 200w,\n/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-c1413.jpg 400w,\n/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-78f2b.jpg 800w,\n/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-ab4c4.jpg 1200w,\n/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-699d0.jpg 1506w"\n sizes="(max-width: 800px) 100vw, 800px"\n />\n </span>\n </span>\n \n </a>\n </p>\n<p>Proin suscipit luctus orci placerat fringilla. Donec hendrerit laoreet risus eget adipiscing. Suspendisse in urna ligula, a volutpat mauris. Sed enim mi, bibendum eu pulvinar vel, sodales vitae dui. Pellentesque sed sapien lorem, at lacinia urna. In hac habitasse platea dictumst.</p>\n<p>Fusce a metus eu diam varius congue nec nec sapien. Nunc convallis accumsan justo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec malesuada vehicula lectus, viverra sodales ipsum gravida nec. Integer gravida nisi ut magna mollis molestie. Nullam pharetra accumsan sagittis. Proin tristique rhoncus orci, eget vulputate nisi sollicitudin et. Quisque lacus augue, mollis non mollis et, ullamcorper in purus. Morbi et sem orci.</p>\n<h3>Vestibulum orci tortor, sollicitudin ac euismod non, placerat ac augue.</h3>\n<p>Praesent accumsan odio in ante ullamcorper id pellentesque mauris rhoncus. Duis vitae neque dolor. Duis sed purus at eros bibendum cursus nec a nulla. Donec turpis quam, ultricies id pretium sit amet, gravida eget leo.</p>\n<p>Proin ornare ligula eu tellus tempus elementum. . Vestibulum non nibh risus, a scelerisque purus. Ut vel arcu ac tortor adipiscing hendrerit vel sed massa. Fusce sem libero, lacinia vulputate interdum non, porttitor non quam. Aliquam sed felis ligula. Duis non nulla magna.</p>\n<h3>Aenean bibendum iaculis mi, nec blandit lacus interdum vitae</h3>\n<p>Nullam eros mi, mollis in sollicitudin non, tincidunt sed enim. Sed et felis metus, rhoncus ornare nibh. Ut at magna leo. Suspendisse egestas est ac dolor imperdiet pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit</p>\n<h3>Donec hendrerit laoreet risus eget adipiscing.</h3>\n<p>Proin suscipit luctus orci placerat fringilla. Suspendisse in urna ligula, a volutpat mauris. Sed enim mi, bibendum eu pulvinar vel, sodales vitae dui. Pellentesque sed sapien lorem, at lacinia urna. In hac habitasse platea dictumst. Vivamus vel justo in leo laoreet ullamcorper non vitae lorem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin bibendum ullamcorper rutrum.</p>\n<p>Fusce a metus eu diam varius congue nec nec sapien. Nunc convallis accumsan justo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec malesuada vehicula lectus, viverra sodales ipsum gravida nec. Integer gravida nisi ut magna mollis molestie. Nullam pharetra accumsan sagittis. Proin tristique rhoncus orci, eget vulputate nisi sollicitudin et. Quisque lacus augue, mollis non mollis et, ullamcorper in purus. Morbi et sem orci.</p>',htmlAst:{type:"root",children:[{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"The "},{type:"element",tagName:"a",properties:{href:"/gatsby-starter-personal-blog/"},children:[{type:"text",value:"starter"}]},{type:"text",value:" uses a theme object so base customization is really easy."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Find the "},{type:"element",tagName:"code",properties:{className:["language-text"]},children:[{type:"text",value:"theme.js"}]},{type:"text",value:" file."}]},{type:"text",value:"\n"},{type:"element",tagName:"div",properties:{className:["gatsby-highlight"],dataLanguage:"text"},children:[{type:"text",value:"\n "},{type:"element",tagName:"pre",properties:{className:["language-text"]},children:[{type:"element",tagName:"code",properties:{className:["language-text"]},children:[{type:"text",value:"root\n ├── src\n │ ├── styles\n │ │ ├── colors.js\n │ │ ├── globals.js\n │ │ └── theme.js"}]}]},{type:"text",value:"\n "}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"…"}]},{type:"text",value:"\n"},{type:"element",tagName:"h4",properties:{},children:[{type:"text",value:"Front-end web development"}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"To read the whole instruction article - "},{type:"element",tagName:"a",properties:{href:"https://dev.greglobinski.com/customize-personal-blog-starter/"},children:[{type:"text",value:"How to customize the personalBlog starter’s appearance"}]},{type:"text",value:" - you have to move to "},{type:"element",tagName:"a",properties:{href:"https://dev.greglobinski.com"},children:[{type:"text",value:"Front-end web development with Greg"}]},{type:"text",value:" website."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"…"}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Vivamus vel justo in leo laoreet ullamcorper non vitae lorem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin bibendum ullamcorper rutrum."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"\n "},{type:"element",tagName:"a",properties:{className:["gatsby-resp-image-link"],href:"/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-699d0.jpg",style:"display: block",target:"_blank",rel:["noopener"]},children:[{type:"text",value:"\n \n "},{type:"element",tagName:"span",properties:{className:["gatsby-resp-image-wrapper"],style:"position: relative; display: block; ; max-width: 800px; margin-left: auto; margin-right: auto;"},children:[{type:"text",value:"\n "},{type:"element",tagName:"span",properties:{className:["gatsby-resp-image-background-image"],style:"padding-bottom: 46.08233731739708%; position: relative; bottom: 0; left: 0; background-image: url('data:image/jpeg;base64,/9j/2wBDABALDA4MChAODQ4SERATGCgaGBYWGDEjJR0oOjM9PDkzODdASFxOQERXRTc4UG1RV19iZ2hnPk1xeXBkeFxlZ2P/2wBDARESEhgVGC8aGi9jQjhCY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2P/wgARCAAJABQDASIAAhEBAxEB/8QAGAAAAwEBAAAAAAAAAAAAAAAAAAIEAQX/xAAVAQEBAAAAAAAAAAAAAAAAAAAAAf/aAAwDAQACEAMQAAAByOzlwwgf/8QAGxAAAgEFAAAAAAAAAAAAAAAAAQIxAxIiM0L/2gAIAQEAAQUCVLQXyM1tXTT/AP/EABQRAQAAAAAAAAAAAAAAAAAAABD/2gAIAQMBAT8BP//EABURAQEAAAAAAAAAAAAAAAAAAAEQ/9oACAECAQE/ASf/xAAYEAACAwAAAAAAAAAAAAAAAAAAASAhMf/aAAgBAQAGPwK2bH//xAAZEAACAwEAAAAAAAAAAAAAAAAAARAhMXH/2gAIAQEAAT8hS99GVdIuNx4g/9oADAMBAAIAAwAAABBrz//EABURAQEAAAAAAAAAAAAAAAAAAAEQ/9oACAEDAQE/EGf/xAAWEQEBAQAAAAAAAAAAAAAAAAAAETH/2gAIAQIBAT8Qwj//xAAaEAEAAgMBAAAAAAAAAAAAAAABACERMaGx/9oACAEBAAE/ECpIXoIWLNWxKCX252Hs5ybZ/9k='); background-size: cover; display: block;"},children:[{type:"text",value:"\n "},{type:"element",tagName:"img",properties:{className:["gatsby-resp-image-image"],style:"width: 100%; height: 100%; margin: 0; vertical-align: middle; position: absolute; top: 0; left: 0; box-shadow: inset 0px 0px 0px 400px transparent;",alt:"unsplash.com",title:"",src:"/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-78f2b.jpg",srcSet:["/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-dce19.jpg 200w","/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-c1413.jpg 400w","/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-78f2b.jpg 800w","/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-ab4c4.jpg 1200w","/static/photo-1463852247062-1bbca38f7805-a3b23c58902850ed9b4fa458aa658292-699d0.jpg 1506w"],sizes:["(max-width:","800px)","100vw,","800px"]},children:[]},{type:"text",value:"\n "}]},{type:"text",value:"\n "}]},{type:"text",value:"\n \n "}]},{type:"text",value:"\n "}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Proin suscipit luctus orci placerat fringilla. Donec hendrerit laoreet risus eget adipiscing. Suspendisse in urna ligula, a volutpat mauris. Sed enim mi, bibendum eu pulvinar vel, sodales vitae dui. Pellentesque sed sapien lorem, at lacinia urna. In hac habitasse platea dictumst."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Fusce a metus eu diam varius congue nec nec sapien. Nunc convallis accumsan justo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec malesuada vehicula lectus, viverra sodales ipsum gravida nec. Integer gravida nisi ut magna mollis molestie. Nullam pharetra accumsan sagittis. Proin tristique rhoncus orci, eget vulputate nisi sollicitudin et. Quisque lacus augue, mollis non mollis et, ullamcorper in purus. Morbi et sem orci."}]},{type:"text",value:"\n"},{type:"element",tagName:"h3",properties:{},children:[{type:"text",value:"Vestibulum orci tortor, sollicitudin ac euismod non, placerat ac augue."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Praesent accumsan odio in ante ullamcorper id pellentesque mauris rhoncus. Duis vitae neque dolor. Duis sed purus at eros bibendum cursus nec a nulla. Donec turpis quam, ultricies id pretium sit amet, gravida eget leo."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Proin ornare ligula eu tellus tempus elementum. . Vestibulum non nibh risus, a scelerisque purus. Ut vel arcu ac tortor adipiscing hendrerit vel sed massa. Fusce sem libero, lacinia vulputate interdum non, porttitor non quam. Aliquam sed felis ligula. Duis non nulla magna."}]},{type:"text",value:"\n"},{type:"element",tagName:"h3",properties:{},children:[{type:"text",value:"Aenean bibendum iaculis mi, nec blandit lacus interdum vitae"}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Nullam eros mi, mollis in sollicitudin non, tincidunt sed enim. Sed et felis metus, rhoncus ornare nibh. Ut at magna leo. Suspendisse egestas est ac dolor imperdiet pretium. Lorem ipsum dolor sit amet, consectetur adipiscing elit"}]},{type:"text",value:"\n"},{type:"element",tagName:"h3",properties:{},children:[{type:"text",value:"Donec hendrerit laoreet risus eget adipiscing."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Proin suscipit luctus orci placerat fringilla. Suspendisse in urna ligula, a volutpat mauris. Sed enim mi, bibendum eu pulvinar vel, sodales vitae dui. Pellentesque sed sapien lorem, at lacinia urna. In hac habitasse platea dictumst. Vivamus vel justo in leo laoreet ullamcorper non vitae lorem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Proin bibendum ullamcorper rutrum."}]},{type:"text",value:"\n"},{type:"element",tagName:"p",properties:{},children:[{type:"text",value:"Fusce a metus eu diam varius congue nec nec sapien. Nunc convallis accumsan justo. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Donec malesuada vehicula lectus, viverra sodales ipsum gravida nec. Integer gravida nisi ut magna mollis molestie. Nullam pharetra accumsan sagittis. Proin tristique rhoncus orci, eget vulputate nisi sollicitudin et. Quisque lacus augue, mollis non mollis et, ullamcorper in purus. Morbi et sem orci."}]}],data:{quirksMode:!1}},fields:{slug:"/customize-personal-blog-starter/",prefix:"2018-04-02"},frontmatter:{title:"How to customize the PersonalBlog starter's appearance",subTitle:"GatsbyJS starter instruction",cover:{childImageSharp:{resize:{src:"/static/photo-1463852247062-1bbca38f7805-cover-e7a129318b43a8a58bbc8fe8219cda1d-ada8c.jpg"}}}}},author:{id:"/Users/Isaac/website/content/parts/author.md absPath of file >>> MarkdownRemark",html:'<p><strong>Mr. Gatsby</strong> Proin ornare ligula eu tellus tempus elementum. Aenean <a href="/">bibendum</a> iaculis mi, nec blandit lacus interdum vitae. Vestibulum non nibh risus, a scelerisque purus. :hearts:</p>'},footnote:{id:"/Users/Isaac/website/content/parts/footnote.md absPath of file >>> MarkdownRemark",html:'<ul>\n<li>this is a demo site of the <a href="https://github.com/greglobinski/gatsby-starter-personal-blog">gatsby-starter-personal-blog</a></li>\n<li>built by <a href="https://www.greglobinski.com">greg lobinski</a></li>\n<li>GatsbyJS, ReactJs, CSS in JS - <a href="https://dev.greglobinski.com">Front-end web development with Greg</a></li>\n<li>delivered by <a href="https://www.netlify.com/">Netlify</a></li>\n<li>photos by <a href="https://unsplash.com">unsplash.com</a></li>\n</ul>'},site:{siteMetadata:{facebook:{appId:""}}}},pathContext:{slug:"/customize-personal-blog-starter/"}}}});
//# sourceMappingURL=path---customize-personal-blog-starter-5b1e4ff0ce014d6c660f.js.map
|
import datetime
import unittest
import pandas as pd
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import f1_score
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.utils.estimator_checks import check_estimator
from ITMO_FS.ensembles import WeightBased
from ITMO_FS.filters import *
from ITMO_FS.hybrid.Melif import Melif
from ITMO_FS.utils import f1_scorer
class MyTestCase(unittest.TestCase):
wide_classification = make_classification(n_features=2000, n_informative=100, n_redundant=500)
tall_classification = make_classification(n_samples=50000, n_features=100, n_informative=23, n_redundant=30)
wide_regression = make_regression(n_features=2000, n_informative=100)
tall_regression = make_regression(n_samples=50000, n_features=200, n_informative=50)
filters = [UnivariateFilter(gini_index),
UnivariateFilter(pearson_corr),
UnivariateFilter(spearman_corr)]
estimator = SVC(random_state=42)
ensemble = WeightBased(filters, cutting_rule=select_k_best(50))
melif = Melif(estimator, select_k_best(1500), ensemble, scorer=f1_score, verbose=True)
def test_wide(self):
data, target = self.wide_classification[0], self.wide_classification[1]
train_data, test_data, train_target, test_target = train_test_split(data, target)
self.melif.fit(train_data, train_target)
print(f1_score(test_target, self.melif.predict(test_data)))
def test_wide_pd(self):
data, target = pd.DataFrame(self.wide_classification[0]), pd.DataFrame(self.wide_classification[1])
train_data, test_data, train_target, test_target = train_test_split(data, target)
self.melif.fit(train_data, train_target)
print(f1_score(test_target, self.melif.predict(test_data)))
def test_R(self):
data = pd.read_csv('C:\\Users\\SomaC\\PycharmProjects\\machinka\\mlrcheck\\boston_corrected.csv')
target = 'class'
features = data.loc[:, data.columns != 'b'].columns
# data[target]=data[target].apply(lambda x: 0 if x<=0 else 1)
ks = [int(i * 500) for i in [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]]
print()
for j in ks:
print('|' + str(j) + '|')
start = datetime.datetime.now()
f = UnivariateFilter(pearson_corr, select_k_best(j))
f.fit(data[features], data[target])
print('|', datetime.datetime.now() - start, '|')
start = datetime.datetime.now()
f = UnivariateFilter(spearman_corr, select_k_best(j))
f.fit(data[features], data[target])
print('|', datetime.datetime.now() - start, '|')
# start = datetime.datetime.now()
# f = UnivariateFilter(chi2_measure, select_k_best(j))
# f.fit(data[features], data[target])
# print('|', datetime.datetime.now() - start, '|')
start = datetime.datetime.now()
f = UnivariateFilter(information_gain, select_k_best(j))
f.fit(data[features], data[target])
print('|', datetime.datetime.now() - start, '|')
def test_est(self):
melif = Melif(self.estimator, select_k_best(2), self.ensemble, scorer=f1_scorer)
check_estimator(melif)
if __name__ == '__main__':
unittest.main()
|
define('app/episodes/space/manifest',
[
'app/i18/_', 'app/core/_'
],
function(i18, core) {
return function() {
var r = core.helperApp.pixelRatio();
return {
splashscreen: true,
ball: {
tail: true,
explosion: true
},
paddle: {
bullet_rate_timeout: 250,
left_barrel_rel_pos: {
x: -6,
y: -5
},
right_barrel_rel_pos: {
x: 6,
y: -5
},
left_bullet_rel_pos: {
x: r >= 2 ? 24 : 9,
y: 0
},
right_bullet_rel_pos: {
x: r >= 2 ? 1 : 3,
y: 0
},
sizes: {
shrink: 3,
normal: 8,
grow: 18
}
},
grid: {
width: 21,
height: 19
},
cell: {
width: 38,
height: 21
},
bonus: {
animation: 'random_direction',
rotate_speed: 0
},
bonus_catch_label: {
animation: 'up',
font: (20 * r) + 'px \'Orion-Pax\', sans-serif',
color: '#ffd700'
},
facade: {
background_alpha: 0.6,
background_color: '#000',
background_src: null,
text_style: (37 * r) + 'px \'Quantico\', sans-serif',
text_color: '#e0e396',
shadow_color: '#fff'
},
dashboard: {
width: 417,
height: 214,
buttons: [
{type: 'Bitmap', x: 12 * r, y: 113 * r, args: 'c-btn-user', event: 'clickUser'},
{type: 'Bitmap', x: 21 * r, y: 144 * r, args: 'c-btn-play', event: 'clickPlay'},
{type: 'Bitmap', x: 45 * r, y: 169 * r, args: 'c-btn-options', event: 'clickOptions'},
{type: 'Bitmap', x: 73 * r, y: 184 * r, args: 'c-btn-help', event: 'clickHelp'},
{type: 'Text', x: 223 * r, y: core.helperBrowser.name == 'firefox' ? (core.helperBrowser.platform.name == 'win' ? 42 : 48) : (46 * r),
args: [i18._('round') + ':', (15 * r) + 'px \'Orion-Pax\', sans-serif', '#ccc'], textAlign: 'left'}
],
auth: {
x: 99 * r,
y: 338 * r,
textAlign: 'center'
},
lives: {
type: 'bitmap',
x: 370 * r,
y: 21 * r,
alpha: 0.8
},
round: {
textPattern: '{current}/{max}',
x: 373 * r,
y: core.helperBrowser.name == 'firefox' ? (core.helperBrowser.platform.name == 'win' ? 42 : 48) : (46 * r),
textAlign: 'right',
font: (15 * r) + 'px \'Orion-Pax\', sans-serif',
color: '#ccc'
},
score: {
x: 115 * r,
y: core.helperBrowser.name == 'firefox' ? (core.helperBrowser.platform.name == 'win' ? 85 : 93) : (91 * r),
textAlign: 'center',
font: (20 * r) + 'px \'Orion-Pax\', sans-serif',
color: '#51f2f1'
},
speed: {
type: 'rotate',
x: 108 * r,
y: 40 * r
},
time: {
textPattern: '{h} : {m} : {s}',
x: 383 * r,
y: core.helperBrowser.name == 'firefox' ? (core.helperBrowser.platform.name == 'win' ? 91 : 84) : (92 * r),
textAlign: 'right',
font: (29 * r) + 'px \'segmentled\', sans-serif',
color: '#51f2f1'
}
}
};
};
});
|
# Copyright 2015 Ufora Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import ufora.test.PerformanceTestReporter as PerformanceTestReporter
import sys
class StringTestCases(object):
"""Test cases for pyfora strings"""
def test_string_indexing(self):
def f():
a = "abc"
return (a[0], a[1], a[2], a[-1], a[-2])
self.equivalentEvaluationTest(f)
def test_strings_with_weird_characters(self):
x = "\xb0"
def f():
return (x,"\xb0")
self.equivalentEvaluationTest(f)
def test_large_string_indexing_perf(self):
def f(ct, passCt):
x = "asdfasdf" * (ct / 8)
res = 0
for _ in xrange(passCt):
for ix in xrange(len(x)):
res = res + len(x[ix])
return res
self.evaluateWithExecutor(f, 1000000, 1)
self.evaluateWithExecutor(f, 10000, 1)
@PerformanceTestReporter.PerfTest("pyfora.string_indexing.large_string")
def test1():
self.evaluateWithExecutor(f, 1000000, 100)
@PerformanceTestReporter.PerfTest("pyfora.string_indexing.small_string")
def test2():
self.evaluateWithExecutor(f, 10000, 10000)
test1()
test2()
def test_large_string_parsing_perf(self):
def f(ct, passCt):
x = "1,2,3,4," * ct
res = 0
for _ in xrange(passCt):
ix = 0
while ix < len(x):
res = res + int(x[ix:ix+1]) + 12341234
ix = ix + 2
return res
self.evaluateWithExecutor(f, 1000000, 1)
with PerformanceTestReporter.RecordAsPerfTest("pyfora.string_to_int"):
self.evaluateWithExecutor(f, 1000000, 10)
def test_string_slicing(self):
def f(ct, passCt,chars):
x = "asdfasdf" * (ct / 8)
res = 0
for _ in xrange(passCt):
for ix in xrange(len(x)):
res = res + len(x[ix:ix+chars])
return res
self.evaluateWithExecutor(f, 1000000, 1, 2)
self.evaluateWithExecutor(f, 10000, 1, 2)
def runTest(func, name):
PerformanceTestReporter.PerfTest(name)(func)()
runTest(lambda: self.evaluateWithExecutor(f, 1000000, 10, 2), "pyfora.string_slicing_10mm.2_char_large_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 1000000, 10, 200), "pyfora.string_slicing_10mm.200_char_large_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 10000, 1000, 2), "pyfora.string_slicing_10mm.2_char_small_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 10000, 1000, 200), "pyfora.string_slicing_10mm.200_char_small_string.pyfora")
sys.setcheckinterval(100000)
runTest(lambda: f(1000000, 10, 2), "pyfora.string_slicing_10mm.2_char_large_string.native")
runTest(lambda: f(1000000, 10, 200), "pyfora.string_slicing_10mm.200_char_large_string.native")
runTest(lambda: f(10000, 1000, 2), "pyfora.string_slicing_10mm.2_char_small_string.native")
runTest(lambda: f(10000, 1000, 200), "pyfora.string_slicing_10mm.200_char_small_string.native")
sys.setcheckinterval(100)
def test_string_slicing_into_vector(self):
def testFunction(ct, passCt,chars):
x = "asdfasdf" * (ct / 8)
res = 0
for _ in xrange(passCt):
v = [x[ix*chars:ix*chars+chars] for ix in xrange(len(x) / chars)]
for e in v:
res = res + len(e)
return res
f = testFunction
self.evaluateWithExecutor(f, 1000000, 1, 2)
self.evaluateWithExecutor(f, 10000, 1, 2)
def runTest(func, name):
PerformanceTestReporter.PerfTest(name)(func)()
runTest(lambda: self.evaluateWithExecutor(f, 1000000, 10, 2), "pyfora.string_slicing_into_vector_10mm.2_char_large_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 1000000, 1000, 200), "pyfora.string_slicing_into_vector_10mm.200_char_large_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 10000, 1000, 2), "pyfora.string_slicing_into_vector_10mm.2_char_small_string.pyfora")
runTest(lambda: self.evaluateWithExecutor(f, 10000, 100000, 200), "pyfora.string_slicing_into_vector_10mm.200_char_small_string.pyfora")
sys.setcheckinterval(100000)
runTest(lambda: f(1000000, 10, 2), "pyfora.string_slicing_into_vector_10mm.2_char_large_string.native")
runTest(lambda: f(1000000, 1000, 200), "pyfora.string_slicing_into_vector_10mm.200_char_large_string.native")
runTest(lambda: f(10000, 1000, 2), "pyfora.string_slicing_into_vector_10mm.2_char_small_string.native")
runTest(lambda: f(10000, 100000, 200), "pyfora.string_slicing_into_vector_10mm.200_char_small_string.native")
sys.setcheckinterval(100)
def test_string_splitlines(self):
#test a wide variety of strings with combinations of different separators
stringsToTest = []
for char1 in ["","a"]:
stringsToTest.append(char1)
for sep1 in ["\n","\r","\n\r", "\r\n", "\r\r", "\n\n", "\r\n\r"]:
stringsToTest.append(char1 + sep1)
for char2 in ["","b"]:
stringsToTest.append(char1 + sep1 + char2)
for sep2 in ["\n","\r","\n\r", "\r\n", "\r\r", "\n\n", "\r\n\r"]:
stringsToTest.append(char1 + sep1 + char2 + sep2)
def f():
res = []
for shouldSplit in [True, False]:
for candidate in stringsToTest:
res = res + [(candidate, candidate.splitlines(shouldSplit))]
self.equivalentEvaluationTest(f)
def test_string_split(self):
#test a wide variety of strings with combinations of different separators
stringsToTest = ["", "a", "aa", "ab", "aba", "aaa", "bbb", "abab", "abc"]
sepsToTest = ["a","b"]
def f():
res = []
for s in stringsToTest:
for sep in sepsToTest:
res = res + [(s,sep, s.split(sep))]
self.equivalentEvaluationTest(f)
def test_string_indexing_2(self):
def f(idx):
x = "asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdf"
return x[idx]
self.equivalentEvaluationTest(f, -1)
self.equivalentEvaluationTest(f, -2)
self.equivalentEvaluationTest(f, 0)
self.equivalentEvaluationTest(f, 1)
def test_string_comparison(self):
def f():
a = "a"
b = "b"
r1 = a < b
r2 = a > b
return (r1, r2)
self.equivalentEvaluationTest(f)
def test_string_duplication(self):
def f():
a = "asdf"
r1 = a * 20
r2 = 20 * a
return (r1, r2)
self.equivalentEvaluationTest(f)
def test_string_equality_methods(self):
def f():
a = "val1"
b = "val1"
r1 = a == b
r2 = a != b
a = "val2"
r3 = a == b
r4 = a != b
r5 = a.__eq__(b)
r6 = a.__ne__(b)
return (r1, r2, r3, r4, r5, r6)
self.equivalentEvaluationTest(f)
def test_large_strings(self):
def f():
a = "val1"
while len(a) < 1000000:
a = a + a
return a
self.equivalentEvaluationTest(f)
def test_define_constant_string(self):
x = "a string"
with self.create_executor() as executor:
define_x = executor.define(x)
fora_x = define_x.result()
self.assertIsNotNone(fora_x)
def test_compute_string(self):
def f():
return "a string"
remote = self.evaluateWithExecutor(f)
self.assertEqual(f(), remote)
self.assertTrue(isinstance(remote, str))
def test_strings_1(self):
def f():
x = "asdf"
return x
self.equivalentEvaluationTest(f)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _checkBox = _interopRequireDefault(require("./checkBox.json"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var _default = {
animationData: _checkBox["default"],
animationKey: 'checkBox'
};
exports["default"] = _default;
|
# Generated by Django 2.0.1 on 2020-10-31 01:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feed', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='feed',
name='details',
),
]
|
#!/usr/bin/env python
import unittest
from bardolph.controller import light_set
from bardolph.fakes import fake_lifx
from bardolph.fakes.fake_lifx import Action
from bardolph.lib.injection import provide
from bardolph.parser.parse import Parser
from tests import test_module
from tests.script_runner import ScriptRunner
class LoopTest(unittest.TestCase):
"""
repeat-loop syntax:
repeat
[(all | in <light_list>) as <light_name>]
[with <numeric_var> (from <start> to <end> | cycle [<start>)]
light list syntax:
<light_name> | group <group_name> | location <location_name>
[and <light_list>]
"""
def setUp(self):
test_module.configure()
fake_lifx.using_small_set().configure()
light_set.configure()
self._runner = ScriptRunner(self)
def test_all(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat all as the_light set the_light
repeat all as the_light with brt from 0 to 100 begin
brightness brt
set the_light
end
repeat all as a_light with the_hue cycle begin
hue the_hue
set a_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0', [
(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)),
(Action.SET_COLOR, ([0, 32768, 65535, 1000], 0))
])
self._runner.check_call_list('light_1', [
(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0)),
(Action.SET_COLOR, ([21845, 32768, 65535, 1000], 0))
])
self._runner.check_call_list('light_2', [
(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0)),
(Action.SET_COLOR, ([43690, 32768, 65535, 1000], 0))
])
def test_bare_list(self):
script = """
hue 45 saturation 25 brightness 75 kelvin 2000 duration 9
define light_0 "light_0" assign light_1 "light_1"
repeat in light_0 and light_1 and "light_2" as the_light
set the_light
"""
self._runner.run_script(script)
self._runner.check_call_list(('light_0', 'light_1', 'light_2'),
(Action.SET_COLOR, ([8192, 16384, 49151, 2000], 9000)))
def test_list_range(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
define l0 "light_0" assign l1 "light_1"
repeat
in l0 and l1 and "light_2"
as the_light
with brt from {saturation - brightness}
to {hue - brightness - 30}
begin
brightness brt
set the_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0',
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)))
self._runner.check_call_list('light_1',
(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0)))
self._runner.check_call_list('light_2',
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0)))
def test_list_cycle(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat
in "light_0" and "light_1" and "light_2" as the_light
with the_hue cycle
begin
hue the_hue
set the_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0',
(Action.SET_COLOR, ([0, 32768, 32768, 1000], 0)))
self._runner.check_call_list('light_1',
(Action.SET_COLOR, ([21845, 32768, 32768, 1000], 0)))
self._runner.check_call_list('light_2',
(Action.SET_COLOR, ([43690, 32768, 32768, 1000], 0)))
def test_const_count(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat 4 with brt from 0 to 100 begin
brightness brt
set all
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 21845, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 43690, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0))
])
def test_expr_count(self):
script = """
assign x 16
define y 5
assign z 10
assign thou 1000
hue {36 * y} saturation {y * z} brightness {50 * 1} kelvin thou
repeat {16 / (y - 1)} with brt from {-z+z} to {thou / 10} begin
brightness brt
set all
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 21845, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 43690, 1000], 0)),
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0))
])
def test_cycle_count(self):
script = """
repeat 5 with the_hue cycle 180 begin
hue the_hue
set all
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([32768, 0, 0, 0], 0)),
(Action.SET_COLOR, ([45874, 0, 0, 0], 0)),
(Action.SET_COLOR, ([58982, 0, 0, 0], 0)),
(Action.SET_COLOR, ([6554, 0, 0, 0], 0)),
(Action.SET_COLOR, ([19660, 0, 0, 0], 0))
])
def test_nested_cycle(self):
script = """
saturation 90 brightness 75 kelvin 2700
repeat 5 with base_hue cycle begin
time 0
repeat all as the_light with the_hue cycle base_hue begin
hue the_hue
set the_light
end
time 3
wait
end
"""
self._runner.run_script(script)
def test_while(self):
script = """
hue 180 saturation 10 brightness 10 kelvin 500
assign y 0
define x 100
repeat while {y < 4 and x == 100} begin
set all
assign y {y + 1}
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list(
[(Action.SET_COLOR, ([32768, 6554, 6554, 500], 0))] * 4)
def test_bare_with(self):
script = """
units raw
repeat with i from 3 to 1 begin
hue i set all
end
repeat with i from 1 to 3 begin
hue i set all
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([3, 0, 0, 0], 0)),
(Action.SET_COLOR, ([2, 0, 0, 0], 0)),
(Action.SET_COLOR, ([1, 0, 0, 0], 0)),
(Action.SET_COLOR, ([1, 0, 0, 0], 0)),
(Action.SET_COLOR, ([2, 0, 0, 0], 0)),
(Action.SET_COLOR, ([3, 0, 0, 0], 0))
])
def test_group(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat in group "group" as the_light with brt from 100 to 0
begin
brightness brt
set the_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0',
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0)))
self._runner.check_call_list('light_2',
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)))
def test_group_cycle(self):
script = """
saturation 75 brightness 25 kelvin 1234
repeat in group "group" as the_light with the_hue cycle
begin
hue the_hue
set the_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0',
(Action.SET_COLOR, ([0, 49151, 16384, 1234], 0)))
self._runner.check_call_list('light_2',
(Action.SET_COLOR, ([32768, 49151, 16384, 1234], 0)))
def test_group_no_with(self):
script = """
hue 90 saturation 50 brightness 75 kelvin 2000
repeat in group "group" as the_light set the_light
"""
self._runner.run_script(script)
self._runner.check_call_list('light_0',
(Action.SET_COLOR, ([16384, 32768, 49151, 2000], 0)))
self._runner.check_call_list('light_2',
(Action.SET_COLOR, ([16384, 32768, 49151, 2000], 0)))
def test_all_groups(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat group as grp with brt from 0 to 100
begin
brightness brt
set group grp
end
"""
fake_lifx.using_large_set().configure()
light_set.configure()
self._runner.run_script(script)
self._runner.check_call_list(('Chair', 'Strip', 'Table'),
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)))
self._runner.check_call_list(('Bottom', 'Middle', 'Top'),
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0)))
def test_all_locations(self):
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat location as loc with brt from 0 to 100
begin
brightness brt
set location loc
end
"""
self._runner.run_script(script)
self._runner.check_call_list(('light_0', 'light_2'),
(Action.SET_COLOR, ([32768, 32768, 0, 1000], 0)))
self._runner.check_call_list('light_1',
(Action.SET_COLOR, ([32768, 32768, 65535, 1000], 0)))
def test_mixture(self):
fake_lifx.using_large_set().configure()
light_set.configure()
script = """
hue 180 saturation 50 brightness 50 kelvin 1000
repeat in "Table" and group "Pole" and "Chair"
and location "Home" as the_light
begin
set the_light
end
"""
self._runner.run_script(script)
self._runner.check_call_list(
('Top', 'Middle', 'Bottom', 'Table', 'Chair'),
[(Action.SET_COLOR, ([32768, 32768, 32768, 1000], 0))] * 2)
def test_break(self):
light_set.configure()
script = """
units raw hue 100 saturation 200 brightness 300 kelvin 400
assign i 0
repeat begin
if {i >= 2}
break
hue i
set all
assign i {i + 1}
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([0, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1, 200, 300, 400], 0.0))])
def test_nested_break(self):
light_set.configure()
script = """
units raw hue 100 saturation 200 brightness 300 kelvin 400
assign i 0
repeat begin
if {i >= 2}
break
hue i
set all
assign i {i + 1}
assign j 1000
repeat begin
if {j > 1002}
break
hue j
set all
assign j {j + 1}
end
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list([
(Action.SET_COLOR, ([0, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1000, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1001, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1002, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1000, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1001, 200, 300, 400], 0.0)),
(Action.SET_COLOR, ([1002, 200, 300, 400], 0.0))])
def test_while_break(self):
script = """
assign y 0
units raw hue 180 saturation 190 brightness 200 kelvin 210
repeat while {y < 4} begin
set all
if {y == 2}
break
assign y {y + 1}
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list(
[(Action.SET_COLOR, ([180, 190, 200, 210], 0.0))] * 3)
def test_count_break(self):
script = """
units raw hue 180 saturation 190 brightness 200 kelvin 210
repeat with i from 0 to 10 begin
set all
if {i == 2}
break
end
"""
self._runner.run_script(script)
self._runner.check_global_call_list(
[(Action.SET_COLOR, ([180, 190, 200, 210], 0.0))] * 3)
def test_list_break(self):
script = """
units raw
hue 500 saturation 600 brightness 700 kelvin 800 duration 900
assign i 0
repeat in "light_0" and "light_1" and "light_2" as the_light begin
if {i == 2}
break
set the_light
assign i {i + 1}
end
"""
self._runner.run_script(script)
self._runner.check_call_list(('light_0', 'light_1'),
[(Action.SET_COLOR, ([500, 600, 700, 800], 900))])
self._runner.check_no_others('light_0', 'light_1')
def test_bad_break(self):
script = "hue 5 saturation 6 break set all"
parser = Parser()
self.assertFalse(parser.parse(script))
self.assertEqual(parser.get_errors(),
'Line 1: Encountered "break" not inside loop.\n')
if __name__ == '__main__':
unittest.main()
|
#include <windows.h>
#include <tchar.h>
#include "xvmcore.h"
#include "atoms.h"
#include "foperate.h"
#include "xvmerror.h"
#include <vector>
using namespace xcode;
using namespace std;
class XVM_API xcomplier
{
public:
xcomplier();
~xcomplier();
public:
xvmcore* m_pxvm;
public:
int eval(tchar * str);
int procdescseq();
int procdescv();
int procdesct();
int procfct();
int procclass();
int procinterface();
int procclass(bool binterface );
int procexpr();
int procexpr(int sy1);
int procexprterm();
int procstmt(bool bcontinue=true);
int procstmtcompound();
int procstmtinclude();
int procstmtexpr();
int procstmtlabeled();
int procstmtselection();
int procstmtiteration();
int procstmtjump();
int procstmtdeclaration();
int procstmttry();
int proctuple();
int procparalist();
public:
tchar * m_psource;
int m_nrow;
size_t m_nlen;
int cc;
tchar ch;
public:
int enter(tchar * pcode);
public:
int sy;
union
{
double rnum;
int inum;
tchar snum[255];
};
tchar sclass[255];
public:
int readscale(void);
int adjustscale(int e);
int adjustscale(double &rnum, int e);
int insymbol(void);
tchar nextch();
public:
int error(tchar * ptext,bool bprocess = false);
void procerror();
public:
static int getOrder(tchar * p);
static int getType(tchar * p);
int showTuple(dataitem &d,bool data=true);
int showTuple( xtuple* t,bool data=true);
static int getOpIndent(tchar * p);
static tchar * getOpIndentStr(int op);
static tchar* getTypeStr(int p);
static tchar* getOrderStr(int p);
static int parsepcode(xvmcore* m_pxvm,tchar * buf,bool bexec = false,xarrays<code *,code> * pCodes = null,code * pcode = null);
public:
xvmerror* m_error;
vector<LPTSTR > * m_pCodes;
};
|
# -*- coding: utf-8 -*-
tests = r"""
>>> from django.forms import *
>>> from django.core.files.uploadedfile import SimpleUploadedFile
>>> import datetime
>>> import time
>>> import re
>>> try:
... from decimal import Decimal
... except ImportError:
... from django.utils._decimal import Decimal
#########
# Forms #
#########
A Form is a collection of Fields. It knows how to validate a set of data and it
knows how to render itself in a couple of default ways (e.g., an HTML table).
You can pass it data in __init__(), as a dictionary.
# Form ########################################################################
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... birthday = DateField()
Pass a dictionary to a Form's __init__().
>>> p = Person({'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9'})
>>> p.is_bound
True
>>> p.errors
{}
>>> p.is_valid()
True
>>> p.errors.as_ul()
u''
>>> p.errors.as_text()
u''
>>> p.cleaned_data["first_name"], p.cleaned_data["last_name"], p.cleaned_data["birthday"]
(u'John', u'Lennon', datetime.date(1940, 10, 9))
>>> print p['first_name']
<input type="text" name="first_name" value="John" id="id_first_name" />
>>> print p['last_name']
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
>>> print p['birthday']
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
>>> print p['nonexistentfield']
Traceback (most recent call last):
...
KeyError: "Key 'nonexistentfield' not found in Form"
>>> for boundfield in p:
... print boundfield
<input type="text" name="first_name" value="John" id="id_first_name" />
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
>>> for boundfield in p:
... print boundfield.label, boundfield.data
First name John
Last name Lennon
Birthday 1940-10-9
>>> print p
<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>
Empty dictionaries are valid, too.
>>> p = Person({})
>>> p.is_bound
True
>>> p.errors['first_name']
[u'This field is required.']
>>> p.errors['last_name']
[u'This field is required.']
>>> p.errors['birthday']
[u'This field is required.']
>>> p.is_valid()
False
>>> p.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'Person' object has no attribute 'cleaned_data'
>>> print p
<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>
>>> print p.as_table()
<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>
>>> print p.as_p()
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>
If you don't pass any values to the Form's __init__(), or if you pass None,
the Form will be considered unbound and won't do any validation. Form.errors
will be an empty dictionary *but* Form.is_valid() will return False.
>>> p = Person()
>>> p.is_bound
False
>>> p.errors
{}
>>> p.is_valid()
False
>>> p.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'Person' object has no attribute 'cleaned_data'
>>> print p
<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>
>>> print p.as_table()
<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>
>>> print p.as_ul()
<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>
>>> print p.as_p()
<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>
Unicode values are handled properly.
>>> p = Person({'first_name': u'John', 'last_name': u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111', 'birthday': '1940-10-9'})
>>> p.as_table()
u'<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>\n<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></td></tr>\n<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>'
>>> p.as_ul()
u'<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></li>\n<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></li>\n<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></li>'
>>> p.as_p()
u'<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></p>\n<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></p>\n<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></p>'
>>> p = Person({'last_name': u'Lennon'})
>>> p.errors['first_name']
[u'This field is required.']
>>> p.errors['birthday']
[u'This field is required.']
>>> p.is_valid()
False
>>> p.errors.as_ul()
u'<ul class="errorlist"><li>first_name<ul class="errorlist"><li>This field is required.</li></ul></li><li>birthday<ul class="errorlist"><li>This field is required.</li></ul></li></ul>'
>>> print p.errors.as_text()
* first_name
* This field is required.
* birthday
* This field is required.
>>> p.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'Person' object has no attribute 'cleaned_data'
>>> p['first_name'].errors
[u'This field is required.']
>>> p['first_name'].errors.as_ul()
u'<ul class="errorlist"><li>This field is required.</li></ul>'
>>> p['first_name'].errors.as_text()
u'* This field is required.'
>>> p = Person()
>>> print p['first_name']
<input type="text" name="first_name" id="id_first_name" />
>>> print p['last_name']
<input type="text" name="last_name" id="id_last_name" />
>>> print p['birthday']
<input type="text" name="birthday" id="id_birthday" />
cleaned_data will always *only* contain a key for fields defined in the
Form, even if you pass extra data when you define the Form. In this
example, we pass a bunch of extra fields to the form constructor,
but cleaned_data contains only the form's fields.
>>> data = {'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9', 'extra1': 'hello', 'extra2': 'hello'}
>>> p = Person(data)
>>> p.is_valid()
True
>>> p.cleaned_data['first_name']
u'John'
>>> p.cleaned_data['last_name']
u'Lennon'
>>> p.cleaned_data['birthday']
datetime.date(1940, 10, 9)
cleaned_data will include a key and value for *all* fields defined in the Form,
even if the Form's data didn't include a value for fields that are not
required. In this example, the data dictionary doesn't include a value for the
"nick_name" field, but cleaned_data includes it. For CharFields, it's set to the
empty string.
>>> class OptionalPersonForm(Form):
... first_name = CharField()
... last_name = CharField()
... nick_name = CharField(required=False)
>>> data = {'first_name': u'John', 'last_name': u'Lennon'}
>>> f = OptionalPersonForm(data)
>>> f.is_valid()
True
>>> f.cleaned_data['nick_name']
u''
>>> f.cleaned_data['first_name']
u'John'
>>> f.cleaned_data['last_name']
u'Lennon'
For DateFields, it's set to None.
>>> class OptionalPersonForm(Form):
... first_name = CharField()
... last_name = CharField()
... birth_date = DateField(required=False)
>>> data = {'first_name': u'John', 'last_name': u'Lennon'}
>>> f = OptionalPersonForm(data)
>>> f.is_valid()
True
>>> print f.cleaned_data['birth_date']
None
>>> f.cleaned_data['first_name']
u'John'
>>> f.cleaned_data['last_name']
u'Lennon'
"auto_id" tells the Form to add an "id" attribute to each form element.
If it's a string that contains '%s', Django will use that as a format string
into which the field's name will be inserted. It will also put a <label> around
the human-readable labels for a field.
>>> p = Person(auto_id='%s_id')
>>> print p.as_table()
<tr><th><label for="first_name_id">First name:</label></th><td><input type="text" name="first_name" id="first_name_id" /></td></tr>
<tr><th><label for="last_name_id">Last name:</label></th><td><input type="text" name="last_name" id="last_name_id" /></td></tr>
<tr><th><label for="birthday_id">Birthday:</label></th><td><input type="text" name="birthday" id="birthday_id" /></td></tr>
>>> print p.as_ul()
<li><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></li>
<li><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></li>
<li><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></li>
>>> print p.as_p()
<p><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></p>
<p><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></p>
<p><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></p>
If auto_id is any True value whose str() does not contain '%s', the "id"
attribute will be the name of the field.
>>> p = Person(auto_id=True)
>>> print p.as_ul()
<li><label for="first_name">First name:</label> <input type="text" name="first_name" id="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>
If auto_id is any False value, an "id" attribute won't be output unless it
was manually entered.
>>> p = Person(auto_id=False)
>>> print p.as_ul()
<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
In this example, auto_id is False, but the "id" attribute for the "first_name"
field is given. Also note that field gets a <label>, while the others don't.
>>> class PersonNew(Form):
... first_name = CharField(widget=TextInput(attrs={'id': 'first_name_id'}))
... last_name = CharField()
... birthday = DateField()
>>> p = PersonNew(auto_id=False)
>>> print p.as_ul()
<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
If the "id" attribute is specified in the Form and auto_id is True, the "id"
attribute in the Form gets precedence.
>>> p = PersonNew(auto_id=True)
>>> print p.as_ul()
<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>
>>> class SignupForm(Form):
... email = EmailField()
... get_spam = BooleanField()
>>> f = SignupForm(auto_id=False)
>>> print f['email']
<input type="text" name="email" />
>>> print f['get_spam']
<input type="checkbox" name="get_spam" />
>>> f = SignupForm({'email': 'test@example.com', 'get_spam': True}, auto_id=False)
>>> print f['email']
<input type="text" name="email" value="test@example.com" />
>>> print f['get_spam']
<input checked="checked" type="checkbox" name="get_spam" />
'True' or 'true' should be rendered without a value attribute
>>> f = SignupForm({'email': 'test@example.com', 'get_spam': 'True'}, auto_id=False)
>>> print f['get_spam']
<input checked="checked" type="checkbox" name="get_spam" />
>>> f = SignupForm({'email': 'test@example.com', 'get_spam': 'true'}, auto_id=False)
>>> print f['get_spam']
<input checked="checked" type="checkbox" name="get_spam" />
A value of 'False' or 'false' should be rendered unchecked
>>> f = SignupForm({'email': 'test@example.com', 'get_spam': 'False'}, auto_id=False)
>>> print f['get_spam']
<input type="checkbox" name="get_spam" />
>>> f = SignupForm({'email': 'test@example.com', 'get_spam': 'false'}, auto_id=False)
>>> print f['get_spam']
<input type="checkbox" name="get_spam" />
Any Field can have a Widget class passed to its constructor:
>>> class ContactForm(Form):
... subject = CharField()
... message = CharField(widget=Textarea)
>>> f = ContactForm(auto_id=False)
>>> print f['subject']
<input type="text" name="subject" />
>>> print f['message']
<textarea rows="10" cols="40" name="message"></textarea>
as_textarea(), as_text() and as_hidden() are shortcuts for changing the output
widget type:
>>> f['subject'].as_textarea()
u'<textarea rows="10" cols="40" name="subject"></textarea>'
>>> f['message'].as_text()
u'<input type="text" name="message" />'
>>> f['message'].as_hidden()
u'<input type="hidden" name="message" />'
The 'widget' parameter to a Field can also be an instance:
>>> class ContactForm(Form):
... subject = CharField()
... message = CharField(widget=Textarea(attrs={'rows': 80, 'cols': 20}))
>>> f = ContactForm(auto_id=False)
>>> print f['message']
<textarea rows="80" cols="20" name="message"></textarea>
Instance-level attrs are *not* carried over to as_textarea(), as_text() and
as_hidden():
>>> f['message'].as_text()
u'<input type="text" name="message" />'
>>> f = ContactForm({'subject': 'Hello', 'message': 'I love you.'}, auto_id=False)
>>> f['subject'].as_textarea()
u'<textarea rows="10" cols="40" name="subject">Hello</textarea>'
>>> f['message'].as_text()
u'<input type="text" name="message" value="I love you." />'
>>> f['message'].as_hidden()
u'<input type="hidden" name="message" value="I love you." />'
For a form with a <select>, use ChoiceField:
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>
>>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
>>> print f['language']
<select name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>
A subtlety: If one of the choices' value is the empty string and the form is
unbound, then the <option> for the empty-string choice will get selected="selected".
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField(choices=[('', '------'), ('P', 'Python'), ('J', 'Java')])
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<select name="language">
<option value="" selected="selected">------</option>
<option value="P">Python</option>
<option value="J">Java</option>
</select>
You can specify widget attributes in the Widget constructor.
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(attrs={'class': 'foo'}))
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>
>>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
>>> print f['language']
<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>
When passing a custom widget instance to ChoiceField, note that setting
'choices' on the widget is meaningless. The widget will use the choices
defined on the Field, not the ones defined on the Widget.
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(choices=[('R', 'Ruby'), ('P', 'Perl')], attrs={'class': 'foo'}))
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>
>>> f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
>>> print f['language']
<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>
You can set a ChoiceField's choices after the fact.
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField()
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<select name="language">
</select>
>>> f.fields['language'].choices = [('P', 'Python'), ('J', 'Java')]
>>> print f['language']
<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>
Add widget=RadioSelect to use that widget with a ChoiceField.
>>> class FrameworkForm(Form):
... name = CharField()
... language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=RadioSelect)
>>> f = FrameworkForm(auto_id=False)
>>> print f['language']
<ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul>
>>> print f
<tr><th>Name:</th><td><input type="text" name="name" /></td></tr>
<tr><th>Language:</th><td><ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></td></tr>
>>> print f.as_ul()
<li>Name: <input type="text" name="name" /></li>
<li>Language: <ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></li>
Regarding auto_id and <label>, RadioSelect is a special case. Each radio button
gets a distinct ID, formed by appending an underscore plus the button's
zero-based index.
>>> f = FrameworkForm(auto_id='id_%s')
>>> print f['language']
<ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul>
When RadioSelect is used with auto_id, and the whole form is printed using
either as_table() or as_ul(), the label for the RadioSelect will point to the
ID of the *first* radio button.
>>> print f
<tr><th><label for="id_name">Name:</label></th><td><input type="text" name="name" id="id_name" /></td></tr>
<tr><th><label for="id_language_0">Language:</label></th><td><ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></td></tr>
>>> print f.as_ul()
<li><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></li>
>>> print f.as_p()
<p><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></p>
MultipleChoiceField is a special case, as its data is required to be a list:
>>> class SongForm(Form):
... name = CharField()
... composers = MultipleChoiceField()
>>> f = SongForm(auto_id=False)
>>> print f['composers']
<select multiple="multiple" name="composers">
</select>
>>> class SongForm(Form):
... name = CharField()
... composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
>>> f = SongForm(auto_id=False)
>>> print f['composers']
<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P">Paul McCartney</option>
</select>
>>> f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
>>> print f['name']
<input type="text" name="name" value="Yesterday" />
>>> print f['composers']
<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P" selected="selected">Paul McCartney</option>
</select>
MultipleChoiceField rendered as_hidden() is a special case. Because it can
have multiple values, its as_hidden() renders multiple <input type="hidden">
tags.
>>> f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
>>> print f['composers'].as_hidden()
<input type="hidden" name="composers" value="P" />
>>> f = SongForm({'name': 'From Me To You', 'composers': ['P', 'J']}, auto_id=False)
>>> print f['composers'].as_hidden()
<input type="hidden" name="composers" value="P" />
<input type="hidden" name="composers" value="J" />
MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
>>> class SongForm(Form):
... name = CharField()
... composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
>>> f = SongForm(auto_id=False)
>>> print f['composers']
<ul>
<li><label><input type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>
>>> f = SongForm({'composers': ['J']}, auto_id=False)
>>> print f['composers']
<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>
>>> f = SongForm({'composers': ['J', 'P']}, auto_id=False)
>>> print f['composers']
<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input checked="checked" type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>
Regarding auto_id, CheckboxSelectMultiple is a special case. Each checkbox
gets a distinct ID, formed by appending an underscore plus the checkbox's
zero-based index.
>>> f = SongForm(auto_id='%s_id')
>>> print f['composers']
<ul>
<li><label for="composers_id_0"><input type="checkbox" name="composers" value="J" id="composers_id_0" /> John Lennon</label></li>
<li><label for="composers_id_1"><input type="checkbox" name="composers" value="P" id="composers_id_1" /> Paul McCartney</label></li>
</ul>
Data for a MultipleChoiceField should be a list. QueryDict, MultiValueDict and
MergeDict (when created as a merge of MultiValueDicts) conveniently work with
this.
>>> data = {'name': 'Yesterday', 'composers': ['J', 'P']}
>>> f = SongForm(data)
>>> f.errors
{}
>>> from django.http import QueryDict
>>> data = QueryDict('name=Yesterday&composers=J&composers=P')
>>> f = SongForm(data)
>>> f.errors
{}
>>> from django.utils.datastructures import MultiValueDict
>>> data = MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P']))
>>> f = SongForm(data)
>>> f.errors
{}
>>> from django.utils.datastructures import MergeDict
>>> data = MergeDict(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])))
>>> f = SongForm(data)
>>> f.errors
{}
The MultipleHiddenInput widget renders multiple values as hidden fields.
>>> class SongFormHidden(Form):
... name = CharField()
... composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=MultipleHiddenInput)
>>> f = SongFormHidden(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])), auto_id=False)
>>> print f.as_ul()
<li>Name: <input type="text" name="name" value="Yesterday" /><input type="hidden" name="composers" value="J" />
<input type="hidden" name="composers" value="P" /></li>
When using CheckboxSelectMultiple, the framework expects a list of input and
returns a list of input.
>>> f = SongForm({'name': 'Yesterday'}, auto_id=False)
>>> f.errors['composers']
[u'This field is required.']
>>> f = SongForm({'name': 'Yesterday', 'composers': ['J']}, auto_id=False)
>>> f.errors
{}
>>> f.cleaned_data['composers']
[u'J']
>>> f.cleaned_data['name']
u'Yesterday'
>>> f = SongForm({'name': 'Yesterday', 'composers': ['J', 'P']}, auto_id=False)
>>> f.errors
{}
>>> f.cleaned_data['composers']
[u'J', u'P']
>>> f.cleaned_data['name']
u'Yesterday'
Validation errors are HTML-escaped when output as HTML.
>>> from django.utils.safestring import mark_safe
>>> class EscapingForm(Form):
... special_name = CharField(label="<em>Special</em> Field")
... special_safe_name = CharField(label=mark_safe("<em>Special</em> Field"))
... def clean_special_name(self):
... raise ValidationError("Something's wrong with '%s'" % self.cleaned_data['special_name'])
... def clean_special_safe_name(self):
... raise ValidationError(mark_safe("'<b>%s</b>' is a safe string" % self.cleaned_data['special_safe_name']))
>>> f = EscapingForm({'special_name': "Nothing to escape", 'special_safe_name': "Nothing to escape"}, auto_id=False)
>>> print f
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Nothing to escape'</li></ul><input type="text" name="special_name" value="Nothing to escape" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b>Nothing to escape</b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="Nothing to escape" /></td></tr>
>>> f = EscapingForm(
... {'special_name': "Should escape < & > and <script>alert('xss')</script>",
... 'special_safe_name': "<i>Do not escape</i>"}, auto_id=False)
>>> print f
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Should escape < & > and <script>alert('xss')</script>'</li></ul><input type="text" name="special_name" value="Should escape < & > and <script>alert('xss')</script>" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b><i>Do not escape</i></b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="<i>Do not escape</i>" /></td></tr>
""" + \
r""" # [This concatenation is to keep the string below the jython's 32K limit].
# Validating multiple fields in relation to another ###########################
There are a couple of ways to do multiple-field validation. If you want the
validation message to be associated with a particular field, implement the
clean_XXX() method on the Form, where XXX is the field name. As in
Field.clean(), the clean_XXX() method should return the cleaned value. In the
clean_XXX() method, you have access to self.cleaned_data, which is a dictionary
of all the data that has been cleaned *so far*, in order by the fields,
including the current field (e.g., the field XXX if you're in clean_XXX()).
>>> class UserRegistration(Form):
... username = CharField(max_length=10)
... password1 = CharField(widget=PasswordInput)
... password2 = CharField(widget=PasswordInput)
... def clean_password2(self):
... if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
... raise ValidationError(u'Please make sure your passwords match.')
... return self.cleaned_data['password2']
>>> f = UserRegistration(auto_id=False)
>>> f.errors
{}
>>> f = UserRegistration({}, auto_id=False)
>>> f.errors['username']
[u'This field is required.']
>>> f.errors['password1']
[u'This field is required.']
>>> f.errors['password2']
[u'This field is required.']
>>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
>>> f.errors['password2']
[u'Please make sure your passwords match.']
>>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
>>> f.errors
{}
>>> f.cleaned_data['username']
u'adrian'
>>> f.cleaned_data['password1']
u'foo'
>>> f.cleaned_data['password2']
u'foo'
Another way of doing multiple-field validation is by implementing the
Form's clean() method. If you do this, any ValidationError raised by that
method will not be associated with a particular field; it will have a
special-case association with the field named '__all__'.
Note that in Form.clean(), you have access to self.cleaned_data, a dictionary of
all the fields/values that have *not* raised a ValidationError. Also note
Form.clean() is required to return a dictionary of all clean data.
>>> class UserRegistration(Form):
... username = CharField(max_length=10)
... password1 = CharField(widget=PasswordInput)
... password2 = CharField(widget=PasswordInput)
... def clean(self):
... if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
... raise ValidationError(u'Please make sure your passwords match.')
... return self.cleaned_data
>>> f = UserRegistration(auto_id=False)
>>> f.errors
{}
>>> f = UserRegistration({}, auto_id=False)
>>> print f.as_table()
<tr><th>Username:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password2" /></td></tr>
>>> f.errors['username']
[u'This field is required.']
>>> f.errors['password1']
[u'This field is required.']
>>> f.errors['password2']
[u'This field is required.']
>>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
>>> f.errors['__all__']
[u'Please make sure your passwords match.']
>>> print f.as_table()
<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><input type="text" name="username" value="adrian" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" value="foo" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" value="bar" /></td></tr>
>>> print f.as_ul()
<li><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></li>
<li>Username: <input type="text" name="username" value="adrian" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" value="foo" /></li>
<li>Password2: <input type="password" name="password2" value="bar" /></li>
>>> f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
>>> f.errors
{}
>>> f.cleaned_data['username']
u'adrian'
>>> f.cleaned_data['password1']
u'foo'
>>> f.cleaned_data['password2']
u'foo'
# Dynamic construction ########################################################
It's possible to construct a Form dynamically by adding to the self.fields
dictionary in __init__(). Don't forget to call Form.__init__() within the
subclass' __init__().
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... def __init__(self, *args, **kwargs):
... super(Person, self).__init__(*args, **kwargs)
... self.fields['birthday'] = DateField()
>>> p = Person(auto_id=False)
>>> print p
<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /></td></tr>
Instances of a dynamic Form do not persist fields from one Form instance to
the next.
>>> class MyForm(Form):
... def __init__(self, data=None, auto_id=False, field_list=[]):
... Form.__init__(self, data, auto_id=auto_id)
... for field in field_list:
... self.fields[field[0]] = field[1]
>>> field_list = [('field1', CharField()), ('field2', CharField())]
>>> my_form = MyForm(field_list=field_list)
>>> print my_form
<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
>>> field_list = [('field3', CharField()), ('field4', CharField())]
>>> my_form = MyForm(field_list=field_list)
>>> print my_form
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
>>> class MyForm(Form):
... default_field_1 = CharField()
... default_field_2 = CharField()
... def __init__(self, data=None, auto_id=False, field_list=[]):
... Form.__init__(self, data, auto_id=auto_id)
... for field in field_list:
... self.fields[field[0]] = field[1]
>>> field_list = [('field1', CharField()), ('field2', CharField())]
>>> my_form = MyForm(field_list=field_list)
>>> print my_form
<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
>>> field_list = [('field3', CharField()), ('field4', CharField())]
>>> my_form = MyForm(field_list=field_list)
>>> print my_form
<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
Similarly, changes to field attributes do not persist from one Form instance
to the next.
>>> class Person(Form):
... first_name = CharField(required=False)
... last_name = CharField(required=False)
... def __init__(self, names_required=False, *args, **kwargs):
... super(Person, self).__init__(*args, **kwargs)
... if names_required:
... self.fields['first_name'].required = True
... self.fields['first_name'].widget.attrs['class'] = 'required'
... self.fields['last_name'].required = True
... self.fields['last_name'].widget.attrs['class'] = 'required'
>>> f = Person(names_required=False)
>>> f['first_name'].field.required, f['last_name'].field.required
(False, False)
>>> f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs
({}, {})
>>> f = Person(names_required=True)
>>> f['first_name'].field.required, f['last_name'].field.required
(True, True)
>>> f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs
({'class': 'required'}, {'class': 'required'})
>>> f = Person(names_required=False)
>>> f['first_name'].field.required, f['last_name'].field.required
(False, False)
>>> f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs
({}, {})
>>> class Person(Form):
... first_name = CharField(max_length=30)
... last_name = CharField(max_length=30)
... def __init__(self, name_max_length=None, *args, **kwargs):
... super(Person, self).__init__(*args, **kwargs)
... if name_max_length:
... self.fields['first_name'].max_length = name_max_length
... self.fields['last_name'].max_length = name_max_length
>>> f = Person(name_max_length=None)
>>> f['first_name'].field.max_length, f['last_name'].field.max_length
(30, 30)
>>> f = Person(name_max_length=20)
>>> f['first_name'].field.max_length, f['last_name'].field.max_length
(20, 20)
>>> f = Person(name_max_length=None)
>>> f['first_name'].field.max_length, f['last_name'].field.max_length
(30, 30)
HiddenInput widgets are displayed differently in the as_table(), as_ul()
and as_p() output of a Form -- their verbose names are not displayed, and a
separate row is not displayed. They're displayed in the last row of the
form, directly after that row's form element.
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... hidden_text = CharField(widget=HiddenInput)
... birthday = DateField()
>>> p = Person(auto_id=False)
>>> print p
<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></td></tr>
>>> print p.as_ul()
<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></li>
>>> print p.as_p()
<p>First name: <input type="text" name="first_name" /></p>
<p>Last name: <input type="text" name="last_name" /></p>
<p>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></p>
With auto_id set, a HiddenInput still gets an ID, but it doesn't get a label.
>>> p = Person(auto_id='id_%s')
>>> print p
<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></td></tr>
>>> print p.as_ul()
<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></li>
>>> print p.as_p()
<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></p>
If a field with a HiddenInput has errors, the as_table() and as_ul() output
will include the error message(s) with the text "(Hidden field [fieldname]) "
prepended. This message is displayed at the top of the output, regardless of
its field's order in the form.
>>> p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'}, auto_id=False)
>>> print p
<tr><td colspan="2"><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></td></tr>
<tr><th>First name:</th><td><input type="text" name="first_name" value="John" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" value="Lennon" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></td></tr>
>>> print p.as_ul()
<li><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></li>
<li>First name: <input type="text" name="first_name" value="John" /></li>
<li>Last name: <input type="text" name="last_name" value="Lennon" /></li>
<li>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></li>
>>> print p.as_p()
<ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul>
<p>First name: <input type="text" name="first_name" value="John" /></p>
<p>Last name: <input type="text" name="last_name" value="Lennon" /></p>
<p>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></p>
A corner case: It's possible for a form to have only HiddenInputs.
>>> class TestForm(Form):
... foo = CharField(widget=HiddenInput)
... bar = CharField(widget=HiddenInput)
>>> p = TestForm(auto_id=False)
>>> print p.as_table()
<input type="hidden" name="foo" /><input type="hidden" name="bar" />
>>> print p.as_ul()
<input type="hidden" name="foo" /><input type="hidden" name="bar" />
>>> print p.as_p()
<input type="hidden" name="foo" /><input type="hidden" name="bar" />
A Form's fields are displayed in the same order in which they were defined.
>>> class TestForm(Form):
... field1 = CharField()
... field2 = CharField()
... field3 = CharField()
... field4 = CharField()
... field5 = CharField()
... field6 = CharField()
... field7 = CharField()
... field8 = CharField()
... field9 = CharField()
... field10 = CharField()
... field11 = CharField()
... field12 = CharField()
... field13 = CharField()
... field14 = CharField()
>>> p = TestForm(auto_id=False)
>>> print p
<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
<tr><th>Field5:</th><td><input type="text" name="field5" /></td></tr>
<tr><th>Field6:</th><td><input type="text" name="field6" /></td></tr>
<tr><th>Field7:</th><td><input type="text" name="field7" /></td></tr>
<tr><th>Field8:</th><td><input type="text" name="field8" /></td></tr>
<tr><th>Field9:</th><td><input type="text" name="field9" /></td></tr>
<tr><th>Field10:</th><td><input type="text" name="field10" /></td></tr>
<tr><th>Field11:</th><td><input type="text" name="field11" /></td></tr>
<tr><th>Field12:</th><td><input type="text" name="field12" /></td></tr>
<tr><th>Field13:</th><td><input type="text" name="field13" /></td></tr>
<tr><th>Field14:</th><td><input type="text" name="field14" /></td></tr>
Some Field classes have an effect on the HTML attributes of their associated
Widget. If you set max_length in a CharField and its associated widget is
either a TextInput or PasswordInput, then the widget's rendered HTML will
include the "maxlength" attribute.
>>> class UserRegistration(Form):
... username = CharField(max_length=10) # uses TextInput by default
... password = CharField(max_length=10, widget=PasswordInput)
... realname = CharField(max_length=10, widget=TextInput) # redundantly define widget, just to test
... address = CharField() # no max_length defined here
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>
<li>Realname: <input type="text" name="realname" maxlength="10" /></li>
<li>Address: <input type="text" name="address" /></li>
If you specify a custom "attrs" that includes the "maxlength" attribute,
the Field's max_length attribute will override whatever "maxlength" you specify
in "attrs".
>>> class UserRegistration(Form):
... username = CharField(max_length=10, widget=TextInput(attrs={'maxlength': 20}))
... password = CharField(max_length=10, widget=PasswordInput)
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>
# Specifying labels ###########################################################
You can specify the label for a field by using the 'label' argument to a Field
class. If you don't specify 'label', Django will use the field name with
underscores converted to spaces, and the initial letter capitalized.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, label='Your username')
... password1 = CharField(widget=PasswordInput)
... password2 = CharField(widget=PasswordInput, label='Password (again)')
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Your username: <input type="text" name="username" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" /></li>
<li>Password (again): <input type="password" name="password2" /></li>
Labels for as_* methods will only end in a colon if they don't end in other
punctuation already.
>>> class Questions(Form):
... q1 = CharField(label='The first question')
... q2 = CharField(label='What is your name?')
... q3 = CharField(label='The answer to life is:')
... q4 = CharField(label='Answer this question!')
... q5 = CharField(label='The last question. Period.')
>>> print Questions(auto_id=False).as_p()
<p>The first question: <input type="text" name="q1" /></p>
<p>What is your name? <input type="text" name="q2" /></p>
<p>The answer to life is: <input type="text" name="q3" /></p>
<p>Answer this question! <input type="text" name="q4" /></p>
<p>The last question. Period. <input type="text" name="q5" /></p>
>>> print Questions().as_p()
<p><label for="id_q1">The first question:</label> <input type="text" name="q1" id="id_q1" /></p>
<p><label for="id_q2">What is your name?</label> <input type="text" name="q2" id="id_q2" /></p>
<p><label for="id_q3">The answer to life is:</label> <input type="text" name="q3" id="id_q3" /></p>
<p><label for="id_q4">Answer this question!</label> <input type="text" name="q4" id="id_q4" /></p>
<p><label for="id_q5">The last question. Period.</label> <input type="text" name="q5" id="id_q5" /></p>
A label can be a Unicode object or a bytestring with special characters.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, label='ŠĐĆŽćžšđ')
... password = CharField(widget=PasswordInput, label=u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
>>> p = UserRegistration(auto_id=False)
>>> p.as_ul()
u'<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="text" name="username" maxlength="10" /></li>\n<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="password" name="password" /></li>'
If a label is set to the empty string for a field, that field won't get a label.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, label='')
... password = CharField(widget=PasswordInput)
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li> <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration(auto_id='id_%s')
>>> print p.as_ul()
<li> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>
If label is None, Django will auto-create the label from the field name. This
is default behavior.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, label=None)
... password = CharField(widget=PasswordInput)
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration(auto_id='id_%s')
>>> print p.as_ul()
<li><label for="id_username">Username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>
# Label Suffix ################################################################
You can specify the 'label_suffix' argument to a Form class to modify the
punctuation symbol used at the end of a label. By default, the colon (:) is
used, and is only appended to the label if the label doesn't already end with a
punctuation symbol: ., !, ? or :. If you specify a different suffix, it will
be appended regardless of the last character of the label.
>>> class FavoriteForm(Form):
... color = CharField(label='Favorite color?')
... animal = CharField(label='Favorite animal')
...
>>> f = FavoriteForm(auto_id=False)
>>> print f.as_ul()
<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal: <input type="text" name="animal" /></li>
>>> f = FavoriteForm(auto_id=False, label_suffix='?')
>>> print f.as_ul()
<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal? <input type="text" name="animal" /></li>
>>> f = FavoriteForm(auto_id=False, label_suffix='')
>>> print f.as_ul()
<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal <input type="text" name="animal" /></li>
>>> f = FavoriteForm(auto_id=False, label_suffix=u'\u2192')
>>> f.as_ul()
u'<li>Favorite color? <input type="text" name="color" /></li>\n<li>Favorite animal\u2192 <input type="text" name="animal" /></li>'
""" + \
r""" # [This concatenation is to keep the string below the jython's 32K limit].
# Initial data ################################################################
You can specify initial data for a field by using the 'initial' argument to a
Field class. This initial data is displayed when a Form is rendered with *no*
data. It is not displayed when a Form is rendered with any data (including an
empty dictionary). Also, the initial value is *not* used if data for a
particular required field isn't provided.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, initial='django')
... password = CharField(widget=PasswordInput)
Here, we're not submitting any data, so the initial value will be displayed.
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
Here, we're submitting data, so the initial value will *not* be displayed.
>>> p = UserRegistration({}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration({'username': u''}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration({'username': u'foo'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
An 'initial' value is *not* used as a fallback if data is not provided. In this
example, we don't provide a value for 'username', and the form raises a
validation error rather than using the initial value for 'username'.
>>> p = UserRegistration({'password': 'secret'})
>>> p.errors['username']
[u'This field is required.']
>>> p.is_valid()
False
# Dynamic initial data ########################################################
The previous technique dealt with "hard-coded" initial data, but it's also
possible to specify initial data after you've already created the Form class
(i.e., at runtime). Use the 'initial' parameter to the Form constructor. This
should be a dictionary containing initial values for one or more fields in the
form, keyed by field name.
>>> class UserRegistration(Form):
... username = CharField(max_length=10)
... password = CharField(widget=PasswordInput)
Here, we're not submitting any data, so the initial value will be displayed.
>>> p = UserRegistration(initial={'username': 'django'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration(initial={'username': 'stephane'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
The 'initial' parameter is meaningless if you pass data.
>>> p = UserRegistration({}, initial={'username': 'django'}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration({'username': u''}, initial={'username': 'django'}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
>>> p = UserRegistration({'username': u'foo'}, initial={'username': 'django'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
A dynamic 'initial' value is *not* used as a fallback if data is not provided.
In this example, we don't provide a value for 'username', and the form raises a
validation error rather than using the initial value for 'username'.
>>> p = UserRegistration({'password': 'secret'}, initial={'username': 'django'})
>>> p.errors['username']
[u'This field is required.']
>>> p.is_valid()
False
If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
then the latter will get precedence.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, initial='django')
... password = CharField(widget=PasswordInput)
>>> p = UserRegistration(initial={'username': 'babik'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="babik" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
# Callable initial data ########################################################
The previous technique dealt with raw values as initial data, but it's also
possible to specify callable data.
>>> class UserRegistration(Form):
... username = CharField(max_length=10)
... password = CharField(widget=PasswordInput)
... options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')])
We need to define functions that get called later.
>>> def initial_django():
... return 'django'
>>> def initial_stephane():
... return 'stephane'
>>> def initial_options():
... return ['f','b']
>>> def initial_other_options():
... return ['b','w']
Here, we're not submitting any data, so the initial value will be displayed.
>>> p = UserRegistration(initial={'username': initial_django, 'options': initial_options}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>
The 'initial' parameter is meaningless if you pass data.
>>> p = UserRegistration({}, initial={'username': initial_django, 'options': initial_options}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>
>>> p = UserRegistration({'username': u''}, initial={'username': initial_django}, auto_id=False)
>>> print p.as_ul()
<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>
>>> p = UserRegistration({'username': u'foo', 'options':['f','b']}, initial={'username': initial_django}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>
A callable 'initial' value is *not* used as a fallback if data is not provided.
In this example, we don't provide a value for 'username', and the form raises a
validation error rather than using the initial value for 'username'.
>>> p = UserRegistration({'password': 'secret'}, initial={'username': initial_django, 'options': initial_options})
>>> p.errors['username']
[u'This field is required.']
>>> p.is_valid()
False
If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
then the latter will get precedence.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, initial=initial_django)
... password = CharField(widget=PasswordInput)
... options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')], initial=initial_other_options)
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w" selected="selected">whiz</option>
</select></li>
>>> p = UserRegistration(initial={'username': initial_stephane, 'options': initial_options}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>
# Help text ###################################################################
You can specify descriptive text for a field by using the 'help_text' argument
to a Field class. This help text is displayed when a Form is rendered.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, help_text='e.g., user@example.com')
... password = CharField(widget=PasswordInput, help_text='Choose wisely.')
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</li>
<li>Password: <input type="password" name="password" /> Choose wisely.</li>
>>> print p.as_p()
<p>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</p>
<p>Password: <input type="password" name="password" /> Choose wisely.</p>
>>> print p.as_table()
<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /><br />e.g., user@example.com</td></tr>
<tr><th>Password:</th><td><input type="password" name="password" /><br />Choose wisely.</td></tr>
The help text is displayed whether or not data is provided for the form.
>>> p = UserRegistration({'username': u'foo'}, auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" value="foo" maxlength="10" /> e.g., user@example.com</li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /> Choose wisely.</li>
help_text is not displayed for hidden fields. It can be used for documentation
purposes, though.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, help_text='e.g., user@example.com')
... password = CharField(widget=PasswordInput)
... next = CharField(widget=HiddenInput, initial='/', help_text='Redirect destination')
>>> p = UserRegistration(auto_id=False)
>>> print p.as_ul()
<li>Username: <input type="text" name="username" maxlength="10" /> e.g., user@example.com</li>
<li>Password: <input type="password" name="password" /><input type="hidden" name="next" value="/" /></li>
Help text can include arbitrary Unicode characters.
>>> class UserRegistration(Form):
... username = CharField(max_length=10, help_text='ŠĐĆŽćžšđ')
>>> p = UserRegistration(auto_id=False)
>>> p.as_ul()
u'<li>Username: <input type="text" name="username" maxlength="10" /> \u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</li>'
# Subclassing forms ###########################################################
You can subclass a Form to add fields. The resulting form subclass will have
all of the fields of the parent Form, plus whichever fields you define in the
subclass.
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... birthday = DateField()
>>> class Musician(Person):
... instrument = CharField()
>>> p = Person(auto_id=False)
>>> print p.as_ul()
<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
>>> m = Musician(auto_id=False)
>>> print m.as_ul()
<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>
Yes, you can subclass multiple forms. The fields are added in the order in
which the parent classes are listed.
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... birthday = DateField()
>>> class Instrument(Form):
... instrument = CharField()
>>> class Beatle(Person, Instrument):
... haircut_type = CharField()
>>> b = Beatle(auto_id=False)
>>> print b.as_ul()
<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>
<li>Haircut type: <input type="text" name="haircut_type" /></li>
# Forms with prefixes #########################################################
Sometimes it's necessary to have multiple forms display on the same HTML page,
or multiple copies of the same form. We can accomplish this with form prefixes.
Pass the keyword argument 'prefix' to the Form constructor to use this feature.
This value will be prepended to each HTML form field name. One way to think
about this is "namespaces for HTML forms". Notice that in the data argument,
each field's key has the prefix, in this case 'person1', prepended to the
actual field name.
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... birthday = DateField()
>>> data = {
... 'person1-first_name': u'John',
... 'person1-last_name': u'Lennon',
... 'person1-birthday': u'1940-10-9'
... }
>>> p = Person(data, prefix='person1')
>>> print p.as_ul()
<li><label for="id_person1-first_name">First name:</label> <input type="text" name="person1-first_name" value="John" id="id_person1-first_name" /></li>
<li><label for="id_person1-last_name">Last name:</label> <input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" /></li>
<li><label for="id_person1-birthday">Birthday:</label> <input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" /></li>
>>> print p['first_name']
<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" />
>>> print p['last_name']
<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" />
>>> print p['birthday']
<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" />
>>> p.errors
{}
>>> p.is_valid()
True
>>> p.cleaned_data['first_name']
u'John'
>>> p.cleaned_data['last_name']
u'Lennon'
>>> p.cleaned_data['birthday']
datetime.date(1940, 10, 9)
Let's try submitting some bad data to make sure form.errors and field.errors
work as expected.
>>> data = {
... 'person1-first_name': u'',
... 'person1-last_name': u'',
... 'person1-birthday': u''
... }
>>> p = Person(data, prefix='person1')
>>> p.errors['first_name']
[u'This field is required.']
>>> p.errors['last_name']
[u'This field is required.']
>>> p.errors['birthday']
[u'This field is required.']
>>> p['first_name'].errors
[u'This field is required.']
>>> p['person1-first_name'].errors
Traceback (most recent call last):
...
KeyError: "Key 'person1-first_name' not found in Form"
In this example, the data doesn't have a prefix, but the form requires it, so
the form doesn't "see" the fields.
>>> data = {
... 'first_name': u'John',
... 'last_name': u'Lennon',
... 'birthday': u'1940-10-9'
... }
>>> p = Person(data, prefix='person1')
>>> p.errors['first_name']
[u'This field is required.']
>>> p.errors['last_name']
[u'This field is required.']
>>> p.errors['birthday']
[u'This field is required.']
With prefixes, a single data dictionary can hold data for multiple instances
of the same form.
>>> data = {
... 'person1-first_name': u'John',
... 'person1-last_name': u'Lennon',
... 'person1-birthday': u'1940-10-9',
... 'person2-first_name': u'Jim',
... 'person2-last_name': u'Morrison',
... 'person2-birthday': u'1943-12-8'
... }
>>> p1 = Person(data, prefix='person1')
>>> p1.is_valid()
True
>>> p1.cleaned_data['first_name']
u'John'
>>> p1.cleaned_data['last_name']
u'Lennon'
>>> p1.cleaned_data['birthday']
datetime.date(1940, 10, 9)
>>> p2 = Person(data, prefix='person2')
>>> p2.is_valid()
True
>>> p2.cleaned_data['first_name']
u'Jim'
>>> p2.cleaned_data['last_name']
u'Morrison'
>>> p2.cleaned_data['birthday']
datetime.date(1943, 12, 8)
By default, forms append a hyphen between the prefix and the field name, but a
form can alter that behavior by implementing the add_prefix() method. This
method takes a field name and returns the prefixed field, according to
self.prefix.
>>> class Person(Form):
... first_name = CharField()
... last_name = CharField()
... birthday = DateField()
... def add_prefix(self, field_name):
... return self.prefix and '%s-prefix-%s' % (self.prefix, field_name) or field_name
>>> p = Person(prefix='foo')
>>> print p.as_ul()
<li><label for="id_foo-prefix-first_name">First name:</label> <input type="text" name="foo-prefix-first_name" id="id_foo-prefix-first_name" /></li>
<li><label for="id_foo-prefix-last_name">Last name:</label> <input type="text" name="foo-prefix-last_name" id="id_foo-prefix-last_name" /></li>
<li><label for="id_foo-prefix-birthday">Birthday:</label> <input type="text" name="foo-prefix-birthday" id="id_foo-prefix-birthday" /></li>
>>> data = {
... 'foo-prefix-first_name': u'John',
... 'foo-prefix-last_name': u'Lennon',
... 'foo-prefix-birthday': u'1940-10-9'
... }
>>> p = Person(data, prefix='foo')
>>> p.is_valid()
True
>>> p.cleaned_data['first_name']
u'John'
>>> p.cleaned_data['last_name']
u'Lennon'
>>> p.cleaned_data['birthday']
datetime.date(1940, 10, 9)
# Forms with NullBooleanFields ################################################
NullBooleanField is a bit of a special case because its presentation (widget)
is different than its data. This is handled transparently, though.
>>> class Person(Form):
... name = CharField()
... is_cool = NullBooleanField()
>>> p = Person({'name': u'Joe'}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>
>>> p = Person({'name': u'Joe', 'is_cool': u'1'}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>
>>> p = Person({'name': u'Joe', 'is_cool': u'2'}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>
>>> p = Person({'name': u'Joe', 'is_cool': u'3'}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>
>>> p = Person({'name': u'Joe', 'is_cool': True}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>
>>> p = Person({'name': u'Joe', 'is_cool': False}, auto_id=False)
>>> print p['is_cool']
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>
# Forms with FileFields ################################################
FileFields are a special case because they take their data from the request.FILES,
not request.POST.
>>> class FileForm(Form):
... file1 = FileField()
>>> f = FileForm(auto_id=False)
>>> print f
<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>
>>> f = FileForm(data={}, files={}, auto_id=False)
>>> print f
<tr><th>File1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="file" name="file1" /></td></tr>
>>> f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', '')}, auto_id=False)
>>> print f
<tr><th>File1:</th><td><ul class="errorlist"><li>The submitted file is empty.</li></ul><input type="file" name="file1" /></td></tr>
>>> f = FileForm(data={}, files={'file1': 'something that is not a file'}, auto_id=False)
>>> print f
<tr><th>File1:</th><td><ul class="errorlist"><li>No file was submitted. Check the encoding type on the form.</li></ul><input type="file" name="file1" /></td></tr>
>>> f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', 'some content')}, auto_id=False)
>>> print f
<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>
>>> f.is_valid()
True
>>> f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह')}, auto_id=False)
>>> print f
<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>
# Basic form processing in a view #############################################
>>> from django.template import Template, Context
>>> class UserRegistration(Form):
... username = CharField(max_length=10)
... password1 = CharField(widget=PasswordInput)
... password2 = CharField(widget=PasswordInput)
... def clean(self):
... if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
... raise ValidationError(u'Please make sure your passwords match.')
... return self.cleaned_data
>>> def my_function(method, post_data):
... if method == 'POST':
... form = UserRegistration(post_data, auto_id=False)
... else:
... form = UserRegistration(auto_id=False)
... if form.is_valid():
... return 'VALID: %r' % form.cleaned_data
... t = Template('<form action="" method="post">\n<table>\n{{ form }}\n</table>\n<input type="submit" />\n</form>')
... return t.render(Context({'form': form}))
Case 1: GET (an empty form, with no errors).
>>> print my_function('GET', {})
<form action="" method="post">
<table>
<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
</table>
<input type="submit" />
</form>
Case 2: POST with erroneous data (a redisplayed form, with errors).
>>> print my_function('POST', {'username': 'this-is-a-long-username', 'password1': 'foo', 'password2': 'bar'})
<form action="" method="post">
<table>
<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><ul class="errorlist"><li>Ensure this value has at most 10 characters (it has 23).</li></ul><input type="text" name="username" value="this-is-a-long-username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" value="foo" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" value="bar" /></td></tr>
</table>
<input type="submit" />
</form>
Case 3: POST with valid data (the success message).
>>> print my_function('POST', {'username': 'adrian', 'password1': 'secret', 'password2': 'secret'})
VALID: {'username': u'adrian', 'password1': u'secret', 'password2': u'secret'}
# Some ideas for using templates with forms ###################################
>>> class UserRegistration(Form):
... username = CharField(max_length=10, help_text="Good luck picking a username that doesn't already exist.")
... password1 = CharField(widget=PasswordInput)
... password2 = CharField(widget=PasswordInput)
... def clean(self):
... if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
... raise ValidationError(u'Please make sure your passwords match.')
... return self.cleaned_data
You have full flexibility in displaying form fields in a template. Just pass a
Form instance to the template, and use "dot" access to refer to individual
fields. Note, however, that this flexibility comes with the responsibility of
displaying all the errors, including any that might not be associated with a
particular field.
>>> t = Template('''<form action="">
... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
<form action="">
<p><label>Your username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>
>>> print t.render(Context({'form': UserRegistration({'username': 'django'}, auto_id=False)}))
<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password: <input type="password" name="password1" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>
Use form.[field].label to output a field's label. You can specify the label for
a field by using the 'label' argument to a Field class. If you don't specify
'label', Django will use the field name with underscores converted to spaces,
and the initial letter capitalized.
>>> t = Template('''<form action="">
... <p><label>{{ form.username.label }}: {{ form.username }}</label></p>
... <p><label>{{ form.password1.label }}: {{ form.password1 }}</label></p>
... <p><label>{{ form.password2.label }}: {{ form.password2 }}</label></p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
<form action="">
<p><label>Username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password1: <input type="password" name="password1" /></label></p>
<p><label>Password2: <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>
User form.[field].label_tag to output a field's label with a <label> tag
wrapped around it, but *only* if the given field has an "id" attribute.
Recall from above that passing the "auto_id" argument to a Form gives each
field an "id" attribute.
>>> t = Template('''<form action="">
... <p>{{ form.username.label_tag }}: {{ form.username }}</p>
... <p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
... <p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /></p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>
>>> print t.render(Context({'form': UserRegistration(auto_id='id_%s')}))
<form action="">
<p><label for="id_username">Username</label>: <input id="id_username" type="text" name="username" maxlength="10" /></p>
<p><label for="id_password1">Password1</label>: <input type="password" name="password1" id="id_password1" /></p>
<p><label for="id_password2">Password2</label>: <input type="password" name="password2" id="id_password2" /></p>
<input type="submit" />
</form>
User form.[field].help_text to output a field's help text. If the given field
does not have help text, nothing will be output.
>>> t = Template('''<form action="">
... <p>{{ form.username.label_tag }}: {{ form.username }}<br />{{ form.username.help_text }}</p>
... <p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
... <p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration(auto_id=False)}))
<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /><br />Good luck picking a username that doesn't already exist.</p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>
>>> Template('{{ form.password1.help_text }}').render(Context({'form': UserRegistration(auto_id=False)}))
u''
The label_tag() method takes an optional attrs argument: a dictionary of HTML
attributes to add to the <label> tag.
>>> f = UserRegistration(auto_id='id_%s')
>>> for bf in f:
... print bf.label_tag(attrs={'class': 'pretty'})
<label for="id_username" class="pretty">Username</label>
<label for="id_password1" class="pretty">Password1</label>
<label for="id_password2" class="pretty">Password2</label>
To display the errors that aren't associated with a particular field -- e.g.,
the errors caused by Form.clean() -- use {{ form.non_field_errors }} in the
template. If used on its own, it is displayed as a <ul> (or an empty string, if
the list of errors is empty). You can also use it in {% if %} statements.
>>> t = Template('''<form action="">
... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)}))
<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" value="foo" /></label></p>
<p><label>Password (again): <input type="password" name="password2" value="bar" /></label></p>
<input type="submit" />
</form>
>>> t = Template('''<form action="">
... {{ form.non_field_errors }}
... {{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
... {{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
... {{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
... <input type="submit" />
... </form>''')
>>> print t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)}))
<form action="">
<ul class="errorlist"><li>Please make sure your passwords match.</li></ul>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" value="foo" /></label></p>
<p><label>Password (again): <input type="password" name="password2" value="bar" /></label></p>
<input type="submit" />
</form>
# The empty_permitted attribute ##############################################
Sometimes (pretty much in formsets) we want to allow a form to pass validation
if it is completely empty. We can accomplish this by using the empty_permitted
agrument to a form constructor.
>>> class SongForm(Form):
... artist = CharField()
... name = CharField()
First let's show what happens id empty_permitted=False (the default):
>>> data = {'artist': '', 'song': ''}
>>> form = SongForm(data, empty_permitted=False)
>>> form.is_valid()
False
>>> form.errors
{'name': [u'This field is required.'], 'artist': [u'This field is required.']}
>>> form.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'SongForm' object has no attribute 'cleaned_data'
Now let's show what happens when empty_permitted=True and the form is empty.
>>> form = SongForm(data, empty_permitted=True)
>>> form.is_valid()
True
>>> form.errors
{}
>>> form.cleaned_data
{}
But if we fill in data for one of the fields, the form is no longer empty and
the whole thing must pass validation.
>>> data = {'artist': 'The Doors', 'song': ''}
>>> form = SongForm(data, empty_permitted=False)
>>> form.is_valid()
False
>>> form.errors
{'name': [u'This field is required.']}
>>> form.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'SongForm' object has no attribute 'cleaned_data'
If a field is not given in the data then None is returned for its data. Lets
make sure that when checking for empty_permitted that None is treated
accordingly.
>>> data = {'artist': None, 'song': ''}
>>> form = SongForm(data, empty_permitted=True)
>>> form.is_valid()
True
However, we *really* need to be sure we are checking for None as any data in
initial that returns False on a boolean call needs to be treated literally.
>>> class PriceForm(Form):
... amount = FloatField()
... qty = IntegerField()
>>> data = {'amount': '0.0', 'qty': ''}
>>> form = PriceForm(data, initial={'amount': 0.0}, empty_permitted=True)
>>> form.is_valid()
True
# Extracting hidden and visible fields ######################################
>>> class SongForm(Form):
... token = CharField(widget=HiddenInput)
... artist = CharField()
... name = CharField()
>>> form = SongForm()
>>> [f.name for f in form.hidden_fields()]
['token']
>>> [f.name for f in form.visible_fields()]
['artist', 'name']
# Hidden initial input gets its own unique id ################################
>>> class MyForm(Form):
... field1 = CharField(max_length=50, show_hidden_initial=True)
>>> print MyForm()
<tr><th><label for="id_field1">Field1:</label></th><td><input id="id_field1" type="text" name="field1" maxlength="50" /><input type="hidden" name="initial-field1" id="initial-id_field1" /></td></tr>
# The error_html_class and required_html_class attributes ####################
>>> class Person(Form):
... name = CharField()
... is_cool = NullBooleanField()
... email = EmailField(required=False)
... age = IntegerField()
>>> p = Person({})
>>> p.error_css_class = 'error'
>>> p.required_css_class = 'required'
>>> print p.as_ul()
<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></li>
<li><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></li>
<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></li>
>>> print p.as_p()
<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></p>
<p><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></p>
>>> print p.as_table()
<tr class="required error"><th><label for="id_name">Name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="name" id="id_name" /></td></tr>
<tr class="required"><th><label for="id_is_cool">Is cool:</label></th><td><select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></td></tr>
<tr><th><label for="id_email">Email:</label></th><td><input type="text" name="email" id="id_email" /></td></tr>
<tr class="required error"><th><label for="id_age">Age:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="age" id="id_age" /></td></tr>
# Checking that the label for SplitDateTimeField is not being displayed #####
>>> class EventForm(Form):
... happened_at = SplitDateTimeField(widget=widgets.SplitHiddenDateTimeWidget)
...
>>> form = EventForm()
>>> form.as_ul()
u'<input type="hidden" name="happened_at_0" id="id_happened_at_0" /><input type="hidden" name="happened_at_1" id="id_happened_at_1" />'
"""
|
window._ = require("lodash");
window.feather = require("feather-icons/dist/feather.min.js");
require("alpinejs");
|
var class_terrain_object_manager =
[
[ "MapKey", "class_terrain_object_manager.html#a3cf848b4c4b11eb06135d83bc91b7af7", null ],
[ "StorageMap", "class_terrain_object_manager.html#ab349676b02a8fe4176c46ef06e87ce6c", null ],
[ "TerrainObjectManager", "class_terrain_object_manager.html#aac7be644f7265c410905769500c680f9", null ],
[ "TerrainObjectManager", "class_terrain_object_manager.html#aaefd77ab4d51fbd8038abe13a8ea023a", null ],
[ "TerrainObjectManager", "class_terrain_object_manager.html#a86d5d6ff2db3b028b87154f4fd64e87b", null ],
[ "~TerrainObjectManager", "class_terrain_object_manager.html#a45861ee3f3cfc36fcbe70452bed6eeb1", null ],
[ "Delete", "class_terrain_object_manager.html#a1123203fe6b4a86085b6bb9daa36fbc4", null ],
[ "Get", "class_terrain_object_manager.html#ab712917ef91c1fe238454b82adfe8ab3", null ],
[ "Instance", "class_terrain_object_manager.html#a3e1de183b4aaca5f75c4f756099fc393", null ],
[ "Load", "class_terrain_object_manager.html#ac557665da6faf6a79bbf8332e6c539dd", null ],
[ "operator=", "class_terrain_object_manager.html#a5dc7301b3f777ec5ff41f4a4907d93e8", null ],
[ "operator=", "class_terrain_object_manager.html#a57370c35ca5af4c32e1a32bbcaba5210", null ],
[ "privDelete", "class_terrain_object_manager.html#a79a157a5b7b7b3849e47da00205f8aee", null ],
[ "privGet", "class_terrain_object_manager.html#a4d0359e3da3722914be5cff2dd25782b", null ],
[ "privLoad", "class_terrain_object_manager.html#ade6bd0330ca95f8eab122452e448f025", null ],
[ "TerrainObjectManagerAttorney", "class_terrain_object_manager.html#abf36ff3c5f71effa85ee4c5c5c17f988", null ],
[ "defaultPath", "class_terrain_object_manager.html#a1094d7140147a3bc910f400002c025eb", null ],
[ "ptrInstance", "class_terrain_object_manager.html#a3f8f9cf5a3abc56a29d0af72a4cb492b", null ],
[ "terrainObjectMap", "class_terrain_object_manager.html#aef5031738864f689c21d43b5d3c58b83", null ]
];
|