id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
/Booktype-1.5.tar.gz/Booktype-1.5/lib/booki/site_static/js/tiny_mce/classes/dom/DOMUtils.js | (function(tinymce) {
// Shorten names
var each = tinymce.each,
is = tinymce.is,
isWebKit = tinymce.isWebKit,
isIE = tinymce.isIE,
Entities = tinymce.html.Entities,
simpleSelectorRe = /^([a-z0-9],?)+$/i,
blockElementsMap = tinymce.html.Schema.blockElementsMap,
whiteSpaceRegExp = /^[ \t\r\n]*$/;
/**
* Utility class for various DOM manipulation and retrival functions.
*
* @class tinymce.dom.DOMUtils
* @example
* // Add a class to an element by id in the page
* tinymce.DOM.addClass('someid', 'someclass');
*
* // Add a class to an element by id inside the editor
* tinyMCE.activeEditor.dom.addClass('someid', 'someclass');
*/
tinymce.create('tinymce.dom.DOMUtils', {
doc : null,
root : null,
files : null,
pixelStyles : /^(top|left|bottom|right|width|height|borderWidth)$/,
props : {
"for" : "htmlFor",
"class" : "className",
className : "className",
checked : "checked",
disabled : "disabled",
maxlength : "maxLength",
readonly : "readOnly",
selected : "selected",
value : "value",
id : "id",
name : "name",
type : "type"
},
/**
* Constructs a new DOMUtils instance. Consult the Wiki for more details on settings etc for this class.
*
* @constructor
* @method DOMUtils
* @param {Document} d Document reference to bind the utility class to.
* @param {settings} s Optional settings collection.
*/
DOMUtils : function(d, s) {
var t = this, globalStyle, name;
t.doc = d;
t.win = window;
t.files = {};
t.cssFlicker = false;
t.counter = 0;
t.stdMode = !tinymce.isIE || d.documentMode >= 8;
t.boxModel = !tinymce.isIE || d.compatMode == "CSS1Compat" || t.stdMode;
t.hasOuterHTML = "outerHTML" in d.createElement("a");
t.settings = s = tinymce.extend({
keep_values : false,
hex_colors : 1
}, s);
t.schema = s.schema;
t.styles = new tinymce.html.Styles({
url_converter : s.url_converter,
url_converter_scope : s.url_converter_scope
}, s.schema);
// Fix IE6SP2 flicker and check it failed for pre SP2
if (tinymce.isIE6) {
try {
d.execCommand('BackgroundImageCache', false, true);
} catch (e) {
t.cssFlicker = true;
}
}
if (isIE && s.schema) {
// Add missing HTML 4/5 elements to IE
('abbr article aside audio canvas ' +
'details figcaption figure footer ' +
'header hgroup mark menu meter nav ' +
'output progress section summary ' +
'time video').replace(/\w+/g, function(name) {
d.createElement(name);
});
// Create all custom elements
for (name in s.schema.getCustomElements()) {
d.createElement(name);
}
}
tinymce.addUnload(t.destroy, t);
},
/**
* Returns the root node of the document this is normally the body but might be a DIV. Parents like getParent will not
* go above the point of this root node.
*
* @method getRoot
* @return {Element} Root element for the utility class.
*/
getRoot : function() {
var t = this, s = t.settings;
return (s && t.get(s.root_element)) || t.doc.body;
},
/**
* Returns the viewport of the window.
*
* @method getViewPort
* @param {Window} w Optional window to get viewport of.
* @return {Object} Viewport object with fields x, y, w and h.
*/
getViewPort : function(w) {
var d, b;
w = !w ? this.win : w;
d = w.document;
b = this.boxModel ? d.documentElement : d.body;
// Returns viewport size excluding scrollbars
return {
x : w.pageXOffset || b.scrollLeft,
y : w.pageYOffset || b.scrollTop,
w : w.innerWidth || b.clientWidth,
h : w.innerHeight || b.clientHeight
};
},
/**
* Returns the rectangle for a specific element.
*
* @method getRect
* @param {Element/String} e Element object or element ID to get rectange from.
* @return {object} Rectange for specified element object with x, y, w, h fields.
*/
getRect : function(e) {
var p, t = this, sr;
e = t.get(e);
p = t.getPos(e);
sr = t.getSize(e);
return {
x : p.x,
y : p.y,
w : sr.w,
h : sr.h
};
},
/**
* Returns the size dimensions of the specified element.
*
* @method getSize
* @param {Element/String} e Element object or element ID to get rectange from.
* @return {object} Rectange for specified element object with w, h fields.
*/
getSize : function(e) {
var t = this, w, h;
e = t.get(e);
w = t.getStyle(e, 'width');
h = t.getStyle(e, 'height');
// Non pixel value, then force offset/clientWidth
if (w.indexOf('px') === -1)
w = 0;
// Non pixel value, then force offset/clientWidth
if (h.indexOf('px') === -1)
h = 0;
return {
w : parseInt(w) || e.offsetWidth || e.clientWidth,
h : parseInt(h) || e.offsetHeight || e.clientHeight
};
},
/**
* Returns a node by the specified selector function. This function will
* loop through all parent nodes and call the specified function for each node.
* If the function then returns true indicating that it has found what it was looking for, the loop execution will then end
* and the node it found will be returned.
*
* @method getParent
* @param {Node/String} n DOM node to search parents on or ID string.
* @param {function} f Selection function to execute on each node or CSS pattern.
* @param {Node} r Optional root element, never go below this point.
* @return {Node} DOM Node or null if it wasn't found.
*/
getParent : function(n, f, r) {
return this.getParents(n, f, r, false);
},
/**
* Returns a node list of all parents matching the specified selector function or pattern.
* If the function then returns true indicating that it has found what it was looking for and that node will be collected.
*
* @method getParents
* @param {Node/String} n DOM node to search parents on or ID string.
* @param {function} f Selection function to execute on each node or CSS pattern.
* @param {Node} r Optional root element, never go below this point.
* @return {Array} Array of nodes or null if it wasn't found.
*/
getParents : function(n, f, r, c) {
var t = this, na, se = t.settings, o = [];
n = t.get(n);
c = c === undefined;
if (se.strict_root)
r = r || t.getRoot();
// Wrap node name as func
if (is(f, 'string')) {
na = f;
if (f === '*') {
f = function(n) {return n.nodeType == 1;};
} else {
f = function(n) {
return t.is(n, na);
};
}
}
while (n) {
if (n == r || !n.nodeType || n.nodeType === 9)
break;
if (!f || f(n)) {
if (c)
o.push(n);
else
return n;
}
n = n.parentNode;
}
return c ? o : null;
},
/**
* Returns the specified element by ID or the input element if it isn't a string.
*
* @method get
* @param {String/Element} n Element id to look for or element to just pass though.
* @return {Element} Element matching the specified id or null if it wasn't found.
*/
get : function(e) {
var n;
if (e && this.doc && typeof(e) == 'string') {
n = e;
e = this.doc.getElementById(e);
// IE and Opera returns meta elements when they match the specified input ID, but getElementsByName seems to do the trick
if (e && e.id !== n)
return this.doc.getElementsByName(n)[1];
}
return e;
},
/**
* Returns the next node that matches selector or function
*
* @method getNext
* @param {Node} node Node to find siblings from.
* @param {String/function} selector Selector CSS expression or function.
* @return {Node} Next node item matching the selector or null if it wasn't found.
*/
getNext : function(node, selector) {
return this._findSib(node, selector, 'nextSibling');
},
/**
* Returns the previous node that matches selector or function
*
* @method getPrev
* @param {Node} node Node to find siblings from.
* @param {String/function} selector Selector CSS expression or function.
* @return {Node} Previous node item matching the selector or null if it wasn't found.
*/
getPrev : function(node, selector) {
return this._findSib(node, selector, 'previousSibling');
},
// #ifndef jquery
/**
* Selects specific elements by a CSS level 3 pattern. For example "div#a1 p.test".
* This function is optimized for the most common patterns needed in TinyMCE but it also performes good enough
* on more complex patterns.
*
* @method select
* @param {String} p CSS level 1 pattern to select/find elements by.
* @param {Object} s Optional root element/scope element to search in.
* @return {Array} Array with all matched elements.
* @example
* // Adds a class to all paragraphs in the currently active editor
* tinyMCE.activeEditor.dom.addClass(tinyMCE.activeEditor.dom.select('p'), 'someclass');
*
* // Adds a class to all spans that has the test class in the currently active editor
* tinyMCE.activeEditor.dom.addClass(tinyMCE.activeEditor.dom.select('span.test'), 'someclass')
*/
select : function(pa, s) {
var t = this;
return tinymce.dom.Sizzle(pa, t.get(s) || t.get(t.settings.root_element) || t.doc, []);
},
/**
* Returns true/false if the specified element matches the specified css pattern.
*
* @method is
* @param {Node/NodeList} n DOM node to match or an array of nodes to match.
* @param {String} selector CSS pattern to match the element agains.
*/
is : function(n, selector) {
var i;
// If it isn't an array then try to do some simple selectors instead of Sizzle for to boost performance
if (n.length === undefined) {
// Simple all selector
if (selector === '*')
return n.nodeType == 1;
// Simple selector just elements
if (simpleSelectorRe.test(selector)) {
selector = selector.toLowerCase().split(/,/);
n = n.nodeName.toLowerCase();
for (i = selector.length - 1; i >= 0; i--) {
if (selector[i] == n)
return true;
}
return false;
}
}
return tinymce.dom.Sizzle.matches(selector, n.nodeType ? [n] : n).length > 0;
},
// #endif
/**
* Adds the specified element to another element or elements.
*
* @method add
* @param {String/Element/Array} Element id string, DOM node element or array of id's or elements to add to.
* @param {String/Element} n Name of new element to add or existing element to add.
* @param {Object} a Optional object collection with arguments to add to the new element(s).
* @param {String} h Optional inner HTML contents to add for each element.
* @param {Boolean} c Optional internal state to indicate if it should create or add.
* @return {Element/Array} Element that got created or array with elements if multiple elements where passed.
* @example
* // Adds a new paragraph to the end of the active editor
* tinyMCE.activeEditor.dom.add(tinyMCE.activeEditor.getBody(), 'p', {title : 'my title'}, 'Some content');
*/
add : function(p, n, a, h, c) {
var t = this;
return this.run(p, function(p) {
var e, k;
e = is(n, 'string') ? t.doc.createElement(n) : n;
t.setAttribs(e, a);
if (h) {
if (h.nodeType)
e.appendChild(h);
else
t.setHTML(e, h);
}
return !c ? p.appendChild(e) : e;
});
},
/**
* Creates a new element.
*
* @method create
* @param {String} n Name of new element.
* @param {Object} a Optional object name/value collection with element attributes.
* @param {String} h Optional HTML string to set as inner HTML of the element.
* @return {Element} HTML DOM node element that got created.
* @example
* // Adds an element where the caret/selection is in the active editor
* var el = tinyMCE.activeEditor.dom.create('div', {id : 'test', 'class' : 'myclass'}, 'some content');
* tinyMCE.activeEditor.selection.setNode(el);
*/
create : function(n, a, h) {
return this.add(this.doc.createElement(n), n, a, h, 1);
},
/**
* Create HTML string for element. The element will be closed unless an empty inner HTML string is passed.
*
* @method createHTML
* @param {String} n Name of new element.
* @param {Object} a Optional object name/value collection with element attributes.
* @param {String} h Optional HTML string to set as inner HTML of the element.
* @return {String} String with new HTML element like for example: <a href="#">test</a>.
* @example
* // Creates a html chunk and inserts it at the current selection/caret location
* tinyMCE.activeEditor.selection.setContent(tinyMCE.activeEditor.dom.createHTML('a', {href : 'test.html'}, 'some line'));
*/
createHTML : function(n, a, h) {
var o = '', t = this, k;
o += '<' + n;
for (k in a) {
if (a.hasOwnProperty(k))
o += ' ' + k + '="' + t.encode(a[k]) + '"';
}
// A call to tinymce.is doesn't work for some odd reason on IE9 possible bug inside their JS runtime
if (typeof(h) != "undefined")
return o + '>' + h + '</' + n + '>';
return o + ' />';
},
/**
* Removes/deletes the specified element(s) from the DOM.
*
* @method remove
* @param {String/Element/Array} node ID of element or DOM element object or array containing multiple elements/ids.
* @param {Boolean} keep_children Optional state to keep children or not. If set to true all children will be placed at the location of the removed element.
* @return {Element/Array} HTML DOM element that got removed or array of elements depending on input.
* @example
* // Removes all paragraphs in the active editor
* tinyMCE.activeEditor.dom.remove(tinyMCE.activeEditor.dom.select('p'));
*
* // Removes a element by id in the document
* tinyMCE.DOM.remove('mydiv');
*/
remove : function(node, keep_children) {
return this.run(node, function(node) {
var child, parent = node.parentNode;
if (!parent)
return null;
if (keep_children) {
while (child = node.firstChild) {
// IE 8 will crash if you don't remove completely empty text nodes
if (!tinymce.isIE || child.nodeType !== 3 || child.nodeValue)
parent.insertBefore(child, node);
else
node.removeChild(child);
}
}
return parent.removeChild(node);
});
},
/**
* Sets the CSS style value on a HTML element. The name can be a camelcase string
* or the CSS style name like background-color.
*
* @method setStyle
* @param {String/Element/Array} n HTML element/Element ID or Array of elements/ids to set CSS style value on.
* @param {String} na Name of the style value to set.
* @param {String} v Value to set on the style.
* @example
* // Sets a style value on all paragraphs in the currently active editor
* tinyMCE.activeEditor.dom.setStyle(tinyMCE.activeEditor.dom.select('p'), 'background-color', 'red');
*
* // Sets a style value to an element by id in the current document
* tinyMCE.DOM.setStyle('mydiv', 'background-color', 'red');
*/
setStyle : function(n, na, v) {
var t = this;
return t.run(n, function(e) {
var s, i;
s = e.style;
// Camelcase it, if needed
na = na.replace(/-(\D)/g, function(a, b){
return b.toUpperCase();
});
// Default px suffix on these
if (t.pixelStyles.test(na) && (tinymce.is(v, 'number') || /^[\-0-9\.]+$/.test(v)))
v += 'px';
switch (na) {
case 'opacity':
// IE specific opacity
if (isIE) {
s.filter = v === '' ? '' : "alpha(opacity=" + (v * 100) + ")";
if (!n.currentStyle || !n.currentStyle.hasLayout)
s.display = 'inline-block';
}
// Fix for older browsers
s[na] = s['-moz-opacity'] = s['-khtml-opacity'] = v || '';
break;
case 'float':
isIE ? s.styleFloat = v : s.cssFloat = v;
break;
default:
s[na] = v || '';
}
// Force update of the style data
if (t.settings.update_styles)
t.setAttrib(e, 'data-mce-style');
});
},
/**
* Returns the current style or runtime/computed value of a element.
*
* @method getStyle
* @param {String/Element} n HTML element or element id string to get style from.
* @param {String} na Style name to return.
* @param {Boolean} c Computed style.
* @return {String} Current style or computed style value of a element.
*/
getStyle : function(n, na, c) {
n = this.get(n);
if (!n)
return;
// Gecko
if (this.doc.defaultView && c) {
// Remove camelcase
na = na.replace(/[A-Z]/g, function(a){
return '-' + a;
});
try {
return this.doc.defaultView.getComputedStyle(n, null).getPropertyValue(na);
} catch (ex) {
// Old safari might fail
return null;
}
}
// Camelcase it, if needed
na = na.replace(/-(\D)/g, function(a, b){
return b.toUpperCase();
});
if (na == 'float')
na = isIE ? 'styleFloat' : 'cssFloat';
// IE & Opera
if (n.currentStyle && c)
return n.currentStyle[na];
return n.style ? n.style[na] : undefined;
},
/**
* Sets multiple styles on the specified element(s).
*
* @method setStyles
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to set styles on.
* @param {Object} o Name/Value collection of style items to add to the element(s).
* @example
* // Sets styles on all paragraphs in the currently active editor
* tinyMCE.activeEditor.dom.setStyles(tinyMCE.activeEditor.dom.select('p'), {'background-color' : 'red', 'color' : 'green'});
*
* // Sets styles to an element by id in the current document
* tinyMCE.DOM.setStyles('mydiv', {'background-color' : 'red', 'color' : 'green'});
*/
setStyles : function(e, o) {
var t = this, s = t.settings, ol;
ol = s.update_styles;
s.update_styles = 0;
each(o, function(v, n) {
t.setStyle(e, n, v);
});
// Update style info
s.update_styles = ol;
if (s.update_styles)
t.setAttrib(e, s.cssText);
},
/**
* Removes all attributes from an element or elements.
*
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to remove attributes from.
*/
removeAllAttribs: function(e) {
return this.run(e, function(e) {
var i, attrs = e.attributes;
for (i = attrs.length - 1; i >= 0; i--) {
e.removeAttributeNode(attrs.item(i));
}
});
},
/**
* Sets the specified attributes value of a element or elements.
*
* @method setAttrib
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to set attribute on.
* @param {String} n Name of attribute to set.
* @param {String} v Value to set on the attribute of this value is falsy like null 0 or '' it will remove the attribute instead.
* @example
* // Sets an attribute to all paragraphs in the active editor
* tinyMCE.activeEditor.dom.setAttrib(tinyMCE.activeEditor.dom.select('p'), 'class', 'myclass');
*
* // Sets an attribute to a specific element in the current page
* tinyMCE.dom.setAttrib('mydiv', 'class', 'myclass');
*/
setAttrib : function(e, n, v) {
var t = this;
// Whats the point
if (!e || !n)
return;
// Strict XML mode
if (t.settings.strict)
n = n.toLowerCase();
return this.run(e, function(e) {
var s = t.settings;
if (v !== null) {
switch (n) {
case "style":
if (!is(v, 'string')) {
each(v, function(v, n) {
t.setStyle(e, n, v);
});
return;
}
// No mce_style for elements with these since they might get resized by the user
if (s.keep_values) {
if (v && !t._isRes(v))
e.setAttribute('data-mce-style', v, 2);
else
e.removeAttribute('data-mce-style', 2);
}
e.style.cssText = v;
break;
case "class":
e.className = v || ''; // Fix IE null bug
break;
case "src":
case "href":
if (s.keep_values) {
if (s.url_converter)
v = s.url_converter.call(s.url_converter_scope || t, v, n, e);
t.setAttrib(e, 'data-mce-' + n, v, 2);
}
break;
case "shape":
e.setAttribute('data-mce-style', v);
break;
}
}
if (is(v) && v !== null && v.length !== 0)
e.setAttribute(n, '' + v, 2);
else
e.removeAttribute(n, 2);
});
},
/**
* Sets the specified attributes of a element or elements.
*
* @method setAttribs
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to set attributes on.
* @param {Object} o Name/Value collection of attribute items to add to the element(s).
* @example
* // Sets some attributes to all paragraphs in the active editor
* tinyMCE.activeEditor.dom.setAttribs(tinyMCE.activeEditor.dom.select('p'), {'class' : 'myclass', title : 'some title'});
*
* // Sets some attributes to a specific element in the current page
* tinyMCE.DOM.setAttribs('mydiv', {'class' : 'myclass', title : 'some title'});
*/
setAttribs : function(e, o) {
var t = this;
return this.run(e, function(e) {
each(o, function(v, n) {
t.setAttrib(e, n, v);
});
});
},
/**
* Returns the specified attribute by name.
*
* @method getAttrib
* @param {String/Element} e Element string id or DOM element to get attribute from.
* @param {String} n Name of attribute to get.
* @param {String} dv Optional default value to return if the attribute didn't exist.
* @return {String} Attribute value string, default value or null if the attribute wasn't found.
*/
getAttrib : function(e, n, dv) {
var v, t = this, undef;
e = t.get(e);
if (!e || e.nodeType !== 1)
return dv === undef ? false : dv;
if (!is(dv))
dv = '';
// Try the mce variant for these
if (/^(src|href|style|coords|shape)$/.test(n)) {
v = e.getAttribute("data-mce-" + n);
if (v)
return v;
}
if (isIE && t.props[n]) {
v = e[t.props[n]];
v = v && v.nodeValue ? v.nodeValue : v;
}
if (!v)
v = e.getAttribute(n, 2);
// Check boolean attribs
if (/^(checked|compact|declare|defer|disabled|ismap|multiple|nohref|noshade|nowrap|readonly|selected)$/.test(n)) {
if (e[t.props[n]] === true && v === '')
return n;
return v ? n : '';
}
// Inner input elements will override attributes on form elements
if (e.nodeName === "FORM" && e.getAttributeNode(n))
return e.getAttributeNode(n).nodeValue;
if (n === 'style') {
v = v || e.style.cssText;
if (v) {
v = t.serializeStyle(t.parseStyle(v), e.nodeName);
if (t.settings.keep_values && !t._isRes(v))
e.setAttribute('data-mce-style', v);
}
}
// Remove Apple and WebKit stuff
if (isWebKit && n === "class" && v)
v = v.replace(/(apple|webkit)\-[a-z\-]+/gi, '');
// Handle IE issues
if (isIE) {
switch (n) {
case 'rowspan':
case 'colspan':
// IE returns 1 as default value
if (v === 1)
v = '';
break;
case 'size':
// IE returns +0 as default value for size
if (v === '+0' || v === 20 || v === 0)
v = '';
break;
case 'width':
case 'height':
case 'vspace':
case 'checked':
case 'disabled':
case 'readonly':
if (v === 0)
v = '';
break;
case 'hspace':
// IE returns -1 as default value
if (v === -1)
v = '';
break;
case 'maxlength':
case 'tabindex':
// IE returns default value
if (v === 32768 || v === 2147483647 || v === '32768')
v = '';
break;
case 'multiple':
case 'compact':
case 'noshade':
case 'nowrap':
if (v === 65535)
return n;
return dv;
case 'shape':
v = v.toLowerCase();
break;
default:
// IE has odd anonymous function for event attributes
if (n.indexOf('on') === 0 && v)
v = tinymce._replace(/^function\s+\w+\(\)\s+\{\s+(.*)\s+\}$/, '$1', '' + v);
}
}
return (v !== undef && v !== null && v !== '') ? '' + v : dv;
},
/**
* Returns the absolute x, y position of a node. The position will be returned in a object with x, y fields.
*
* @method getPos
* @param {Element/String} n HTML element or element id to get x, y position from.
* @param {Element} ro Optional root element to stop calculations at.
* @return {object} Absolute position of the specified element object with x, y fields.
*/
getPos : function(n, ro) {
var t = this, x = 0, y = 0, e, d = t.doc, r;
n = t.get(n);
ro = ro || d.body;
if (n) {
// Use getBoundingClientRect if it exists since it's faster than looping offset nodes
if (n.getBoundingClientRect) {
n = n.getBoundingClientRect();
e = t.boxModel ? d.documentElement : d.body;
// Add scroll offsets from documentElement or body since IE with the wrong box model will use d.body and so do WebKit
// Also remove the body/documentelement clientTop/clientLeft on IE 6, 7 since they offset the position
x = n.left + (d.documentElement.scrollLeft || d.body.scrollLeft) - e.clientTop;
y = n.top + (d.documentElement.scrollTop || d.body.scrollTop) - e.clientLeft;
return {x : x, y : y};
}
r = n;
while (r && r != ro && r.nodeType) {
x += r.offsetLeft || 0;
y += r.offsetTop || 0;
r = r.offsetParent;
}
r = n.parentNode;
while (r && r != ro && r.nodeType) {
x -= r.scrollLeft || 0;
y -= r.scrollTop || 0;
r = r.parentNode;
}
}
return {x : x, y : y};
},
/**
* Parses the specified style value into an object collection. This parser will also
* merge and remove any redundant items that browsers might have added. It will also convert non hex
* colors to hex values. Urls inside the styles will also be converted to absolute/relative based on settings.
*
* @method parseStyle
* @param {String} st Style value to parse for example: border:1px solid red;.
* @return {Object} Object representation of that style like {border : '1px solid red'}
*/
parseStyle : function(st) {
return this.styles.parse(st);
},
/**
* Serializes the specified style object into a string.
*
* @method serializeStyle
* @param {Object} o Object to serialize as string for example: {border : '1px solid red'}
* @param {String} name Optional element name.
* @return {String} String representation of the style object for example: border: 1px solid red.
*/
serializeStyle : function(o, name) {
return this.styles.serialize(o, name);
},
/**
* Imports/loads the specified CSS file into the document bound to the class.
*
* @method loadCSS
* @param {String} u URL to CSS file to load.
* @example
* // Loads a CSS file dynamically into the current document
* tinymce.DOM.loadCSS('somepath/some.css');
*
* // Loads a CSS file into the currently active editor instance
* tinyMCE.activeEditor.dom.loadCSS('somepath/some.css');
*
* // Loads a CSS file into an editor instance by id
* tinyMCE.get('someid').dom.loadCSS('somepath/some.css');
*
* // Loads multiple CSS files into the current document
* tinymce.DOM.loadCSS('somepath/some.css,somepath/someother.css');
*/
loadCSS : function(u) {
var t = this, d = t.doc, head;
if (!u)
u = '';
head = t.select('head')[0];
each(u.split(','), function(u) {
var link;
if (t.files[u])
return;
t.files[u] = true;
link = t.create('link', {rel : 'stylesheet', href : tinymce._addVer(u)});
// IE 8 has a bug where dynamically loading stylesheets would produce a 1 item remaining bug
// This fix seems to resolve that issue by realcing the document ones a stylesheet finishes loading
// It's ugly but it seems to work fine.
if (isIE && d.documentMode && d.recalc) {
link.onload = function() {
if (d.recalc)
d.recalc();
link.onload = null;
};
}
head.appendChild(link);
});
},
/**
* Adds a class to the specified element or elements.
*
* @method addClass
* @param {String/Element/Array} Element ID string or DOM element or array with elements or IDs.
* @param {String} c Class name to add to each element.
* @return {String/Array} String with new class value or array with new class values for all elements.
* @example
* // Adds a class to all paragraphs in the active editor
* tinyMCE.activeEditor.dom.addClass(tinyMCE.activeEditor.dom.select('p'), 'myclass');
*
* // Adds a class to a specific element in the current page
* tinyMCE.DOM.addClass('mydiv', 'myclass');
*/
addClass : function(e, c) {
return this.run(e, function(e) {
var o;
if (!c)
return 0;
if (this.hasClass(e, c))
return e.className;
o = this.removeClass(e, c);
return e.className = (o != '' ? (o + ' ') : '') + c;
});
},
/**
* Removes a class from the specified element or elements.
*
* @method removeClass
* @param {String/Element/Array} Element ID string or DOM element or array with elements or IDs.
* @param {String} c Class name to remove to each element.
* @return {String/Array} String with new class value or array with new class values for all elements.
* @example
* // Removes a class from all paragraphs in the active editor
* tinyMCE.activeEditor.dom.removeClass(tinyMCE.activeEditor.dom.select('p'), 'myclass');
*
* // Removes a class from a specific element in the current page
* tinyMCE.DOM.removeClass('mydiv', 'myclass');
*/
removeClass : function(e, c) {
var t = this, re;
return t.run(e, function(e) {
var v;
if (t.hasClass(e, c)) {
if (!re)
re = new RegExp("(^|\\s+)" + c + "(\\s+|$)", "g");
v = e.className.replace(re, ' ');
v = tinymce.trim(v != ' ' ? v : '');
e.className = v;
// Empty class attr
if (!v) {
e.removeAttribute('class');
e.removeAttribute('className');
}
return v;
}
return e.className;
});
},
/**
* Returns true if the specified element has the specified class.
*
* @method hasClass
* @param {String/Element} n HTML element or element id string to check CSS class on.
* @param {String} c CSS class to check for.
* @return {Boolean} true/false if the specified element has the specified class.
*/
hasClass : function(n, c) {
n = this.get(n);
if (!n || !c)
return false;
return (' ' + n.className + ' ').indexOf(' ' + c + ' ') !== -1;
},
/**
* Shows the specified element(s) by ID by setting the "display" style.
*
* @method show
* @param {String/Element/Array} e ID of DOM element or DOM element or array with elements or IDs to show.
*/
show : function(e) {
return this.setStyle(e, 'display', 'block');
},
/**
* Hides the specified element(s) by ID by setting the "display" style.
*
* @method hide
* @param {String/Element/Array} e ID of DOM element or DOM element or array with elements or IDs to hide.
* @example
* // Hides a element by id in the document
* tinymce.DOM.hide('myid');
*/
hide : function(e) {
return this.setStyle(e, 'display', 'none');
},
/**
* Returns true/false if the element is hidden or not by checking the "display" style.
*
* @method isHidden
* @param {String/Element} e Id or element to check display state on.
* @return {Boolean} true/false if the element is hidden or not.
*/
isHidden : function(e) {
e = this.get(e);
return !e || e.style.display == 'none' || this.getStyle(e, 'display') == 'none';
},
/**
* Returns a unique id. This can be useful when generating elements on the fly.
* This method will not check if the element allready exists.
*
* @method uniqueId
* @param {String} p Optional prefix to add infront of all ids defaults to "mce_".
* @return {String} Unique id.
*/
uniqueId : function(p) {
return (!p ? 'mce_' : p) + (this.counter++);
},
/**
* Sets the specified HTML content inside the element or elements. The HTML will first be processed this means
* URLs will get converted, hex color values fixed etc. Check processHTML for details.
*
* @method setHTML
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to set HTML inside.
* @param {String} h HTML content to set as inner HTML of the element.
* @example
* // Sets the inner HTML of all paragraphs in the active editor
* tinyMCE.activeEditor.dom.setHTML(tinyMCE.activeEditor.dom.select('p'), 'some inner html');
*
* // Sets the inner HTML of a element by id in the document
* tinyMCE.DOM.setHTML('mydiv', 'some inner html');
*/
setHTML : function(element, html) {
var self = this;
return self.run(element, function(element) {
if (isIE) {
// Remove all child nodes, IE keeps empty text nodes in DOM
while (element.firstChild)
element.removeChild(element.firstChild);
try {
// IE will remove comments from the beginning
// unless you padd the contents with something
element.innerHTML = '<br />' + html;
element.removeChild(element.firstChild);
} catch (ex) {
// IE sometimes produces an unknown runtime error on innerHTML if it's an block element within a block element for example a div inside a p
// This seems to fix this problem
// Create new div with HTML contents and a BR infront to keep comments
element = self.create('div');
element.innerHTML = '<br />' + html;
// Add all children from div to target
each (element.childNodes, function(node, i) {
// Skip br element
if (i)
element.appendChild(node);
});
}
} else
element.innerHTML = html;
return html;
});
},
/**
* Returns the outer HTML of an element.
*
* @method getOuterHTML
* @param {String/Element} elm Element ID or element object to get outer HTML from.
* @return {String} Outer HTML string.
* @example
* tinymce.DOM.getOuterHTML(editorElement);
* tinyMCE.activeEditor.getOuterHTML(tinyMCE.activeEditor.getBody());
*/
getOuterHTML : function(elm) {
var doc, self = this;
elm = self.get(elm);
if (!elm)
return null;
if (elm.nodeType === 1 && self.hasOuterHTML)
return elm.outerHTML;
doc = (elm.ownerDocument || self.doc).createElement("body");
doc.appendChild(elm.cloneNode(true));
return doc.innerHTML;
},
/**
* Sets the specified outer HTML on a element or elements.
*
* @method setOuterHTML
* @param {Element/String/Array} e DOM element, element id string or array of elements/ids to set outer HTML on.
* @param {Object} h HTML code to set as outer value for the element.
* @param {Document} d Optional document scope to use in this process defaults to the document of the DOM class.
* @example
* // Sets the outer HTML of all paragraphs in the active editor
* tinyMCE.activeEditor.dom.setOuterHTML(tinyMCE.activeEditor.dom.select('p'), '<div>some html</div>');
*
* // Sets the outer HTML of a element by id in the document
* tinyMCE.DOM.setOuterHTML('mydiv', '<div>some html</div>');
*/
setOuterHTML : function(e, h, d) {
var t = this;
function setHTML(e, h, d) {
var n, tp;
tp = d.createElement("body");
tp.innerHTML = h;
n = tp.lastChild;
while (n) {
t.insertAfter(n.cloneNode(true), e);
n = n.previousSibling;
}
t.remove(e);
};
return this.run(e, function(e) {
e = t.get(e);
// Only set HTML on elements
if (e.nodeType == 1) {
d = d || e.ownerDocument || t.doc;
if (isIE) {
try {
// Try outerHTML for IE it sometimes produces an unknown runtime error
if (isIE && e.nodeType == 1)
e.outerHTML = h;
else
setHTML(e, h, d);
} catch (ex) {
// Fix for unknown runtime error
setHTML(e, h, d);
}
} else
setHTML(e, h, d);
}
});
},
/**
* Entity decode a string, resolves any HTML entities like å.
*
* @method decode
* @param {String} s String to decode entities on.
* @return {String} Entity decoded string.
*/
decode : Entities.decode,
/**
* Entity encodes a string, encodes the most common entities <>"& into entities.
*
* @method encode
* @param {String} text String to encode with entities.
* @return {String} Entity encoded string.
*/
encode : Entities.encodeAllRaw,
/**
* Inserts a element after the reference element.
*
* @method insertAfter
* @param {Element} node Element to insert after the reference.
* @param {Element/String/Array} reference_node Reference element, element id or array of elements to insert after.
* @return {Element/Array} Element that got added or an array with elements.
*/
insertAfter : function(node, reference_node) {
reference_node = this.get(reference_node);
return this.run(node, function(node) {
var parent, nextSibling;
parent = reference_node.parentNode;
nextSibling = reference_node.nextSibling;
if (nextSibling)
parent.insertBefore(node, nextSibling);
else
parent.appendChild(node);
return node;
});
},
/**
* Returns true/false if the specified element is a block element or not.
*
* @method isBlock
* @param {Node/String} node Element/Node to check.
* @return {Boolean} True/False state if the node is a block element or not.
*/
isBlock : function(node) {
var type = node.nodeType;
// If it's a node then check the type and use the nodeName
if (type)
return !!(type === 1 && blockElementsMap[node.nodeName]);
return !!blockElementsMap[node];
},
/**
* Replaces the specified element or elements with the specified element, the new element will
* be cloned if multiple inputs elements are passed.
*
* @method replace
* @param {Element} n New element to replace old ones with.
* @param {Element/String/Array} o Element DOM node, element id or array of elements or ids to replace.
* @param {Boolean} k Optional keep children state, if set to true child nodes from the old object will be added to new ones.
*/
replace : function(n, o, k) {
var t = this;
if (is(o, 'array'))
n = n.cloneNode(true);
return t.run(o, function(o) {
if (k) {
each(tinymce.grep(o.childNodes), function(c) {
n.appendChild(c);
});
}
return o.parentNode.replaceChild(n, o);
});
},
/**
* Renames the specified element to a new name and keep it's attributes and children.
*
* @method rename
* @param {Element} elm Element to rename.
* @param {String} name Name of the new element.
* @return New element or the old element if it needed renaming.
*/
rename : function(elm, name) {
var t = this, newElm;
if (elm.nodeName != name.toUpperCase()) {
// Rename block element
newElm = t.create(name);
// Copy attribs to new block
each(t.getAttribs(elm), function(attr_node) {
t.setAttrib(newElm, attr_node.nodeName, t.getAttrib(elm, attr_node.nodeName));
});
// Replace block
t.replace(newElm, elm, 1);
}
return newElm || elm;
},
/**
* Find the common ancestor of two elements. This is a shorter method than using the DOM Range logic.
*
* @method findCommonAncestor
* @param {Element} a Element to find common ancestor of.
* @param {Element} b Element to find common ancestor of.
* @return {Element} Common ancestor element of the two input elements.
*/
findCommonAncestor : function(a, b) {
var ps = a, pe;
while (ps) {
pe = b;
while (pe && ps != pe)
pe = pe.parentNode;
if (ps == pe)
break;
ps = ps.parentNode;
}
if (!ps && a.ownerDocument)
return a.ownerDocument.documentElement;
return ps;
},
/**
* Parses the specified RGB color value and returns a hex version of that color.
*
* @method toHex
* @param {String} s RGB string value like rgb(1,2,3)
* @return {String} Hex version of that RGB value like #FF00FF.
*/
toHex : function(s) {
var c = /^\s*rgb\s*?\(\s*?([0-9]+)\s*?,\s*?([0-9]+)\s*?,\s*?([0-9]+)\s*?\)\s*$/i.exec(s);
function hex(s) {
s = parseInt(s).toString(16);
return s.length > 1 ? s : '0' + s; // 0 -> 00
};
if (c) {
s = '#' + hex(c[1]) + hex(c[2]) + hex(c[3]);
return s;
}
return s;
},
/**
* Returns a array of all single CSS classes in the document. A single CSS class is a simple
* rule like ".class" complex ones like "div td.class" will not be added to output.
*
* @method getClasses
* @return {Array} Array with class objects each object has a class field might be other fields in the future.
*/
getClasses : function() {
var t = this, cl = [], i, lo = {}, f = t.settings.class_filter, ov;
if (t.classes)
return t.classes;
function addClasses(s) {
// IE style imports
each(s.imports, function(r) {
addClasses(r);
});
each(s.cssRules || s.rules, function(r) {
// Real type or fake it on IE
switch (r.type || 1) {
// Rule
case 1:
if (r.selectorText) {
each(r.selectorText.split(','), function(v) {
v = v.replace(/^\s*|\s*$|^\s\./g, "");
// Is internal or it doesn't contain a class
if (/\.mce/.test(v) || !/\.[\w\-]+$/.test(v))
return;
// Remove everything but class name
ov = v;
v = tinymce._replace(/.*\.([a-z0-9_\-]+).*/i, '$1', v);
// Filter classes
if (f && !(v = f(v, ov)))
return;
if (!lo[v]) {
cl.push({'class' : v});
lo[v] = 1;
}
});
}
break;
// Import
case 3:
addClasses(r.styleSheet);
break;
}
});
};
try {
each(t.doc.styleSheets, addClasses);
} catch (ex) {
// Ignore
}
if (cl.length > 0)
t.classes = cl;
return cl;
},
/**
* Executes the specified function on the element by id or dom element node or array of elements/id.
*
* @method run
* @param {String/Element/Array} Element ID or DOM element object or array with ids or elements.
* @param {function} f Function to execute for each item.
* @param {Object} s Optional scope to execute the function in.
* @return {Object/Array} Single object or array with objects depending on multiple input or not.
*/
run : function(e, f, s) {
var t = this, o;
if (t.doc && typeof(e) === 'string')
e = t.get(e);
if (!e)
return false;
s = s || this;
if (!e.nodeType && (e.length || e.length === 0)) {
o = [];
each(e, function(e, i) {
if (e) {
if (typeof(e) == 'string')
e = t.doc.getElementById(e);
o.push(f.call(s, e, i));
}
});
return o;
}
return f.call(s, e);
},
/**
* Returns an NodeList with attributes for the element.
*
* @method getAttribs
* @param {HTMLElement/string} n Element node or string id to get attributes from.
* @return {NodeList} NodeList with attributes.
*/
getAttribs : function(n) {
var o;
n = this.get(n);
if (!n)
return [];
if (isIE) {
o = [];
// Object will throw exception in IE
if (n.nodeName == 'OBJECT')
return n.attributes;
// IE doesn't keep the selected attribute if you clone option elements
if (n.nodeName === 'OPTION' && this.getAttrib(n, 'selected'))
o.push({specified : 1, nodeName : 'selected'});
// It's crazy that this is faster in IE but it's because it returns all attributes all the time
n.cloneNode(false).outerHTML.replace(/<\/?[\w:\-]+ ?|=[\"][^\"]+\"|=\'[^\']+\'|=[\w\-]+|>/gi, '').replace(/[\w:\-]+/gi, function(a) {
o.push({specified : 1, nodeName : a});
});
return o;
}
return n.attributes;
},
/**
* Returns true/false if the specified node is to be considered empty or not.
*
* @example
* tinymce.DOM.isEmpty(node, {img : true});
* @method isEmpty
* @param {Object} elements Optional name/value object with elements that are automatically treated as non empty elements.
* @return {Boolean} true/false if the node is empty or not.
*/
isEmpty : function(node, elements) {
var self = this, i, attributes, type, walker, name, parentNode;
node = node.firstChild;
if (node) {
walker = new tinymce.dom.TreeWalker(node);
elements = elements || self.schema ? self.schema.getNonEmptyElements() : null;
do {
type = node.nodeType;
if (type === 1) {
// Ignore bogus elements
if (node.getAttribute('data-mce-bogus'))
continue;
// Keep empty elements like <img />
name = node.nodeName.toLowerCase();
if (elements && elements[name]) {
// Ignore single BR elements in blocks like <p><br /></p>
parentNode = node.parentNode;
if (name === 'br' && self.isBlock(parentNode) && parentNode.firstChild === node && parentNode.lastChild === node) {
continue;
}
return false;
}
// Keep elements with data-bookmark attributes or name attribute like <a name="1"></a>
attributes = self.getAttribs(node);
i = node.attributes.length;
while (i--) {
name = node.attributes[i].nodeName;
if (name === "name" || name === 'data-mce-bookmark')
return false;
}
}
// Keep non whitespace text nodes
if ((type === 3 && !whiteSpaceRegExp.test(node.nodeValue)))
return false;
} while (node = walker.next());
}
return true;
},
/**
* Destroys all internal references to the DOM to solve IE leak issues.
*
* @method destroy
*/
destroy : function(s) {
var t = this;
if (t.events)
t.events.destroy();
t.win = t.doc = t.root = t.events = null;
// Manual destroy then remove unload handler
if (!s)
tinymce.removeUnload(t.destroy);
},
/**
* Created a new DOM Range object. This will use the native DOM Range API if it's
* available if it's not it will fallback to the custom TinyMCE implementation.
*
* @method createRng
* @return {DOMRange} DOM Range object.
* @example
* var rng = tinymce.DOM.createRng();
* alert(rng.startContainer + "," + rng.startOffset);
*/
createRng : function() {
var d = this.doc;
return d.createRange ? d.createRange() : new tinymce.dom.Range(this);
},
/**
* Returns the index of the specified node within it's parent.
*
* @param {Node} node Node to look for.
* @param {boolean} normalized Optional true/false state if the index is what it would be after a normalization.
* @return {Number} Index of the specified node.
*/
nodeIndex : function(node, normalized) {
var idx = 0, lastNodeType, lastNode, nodeType;
if (node) {
for (lastNodeType = node.nodeType, node = node.previousSibling, lastNode = node; node; node = node.previousSibling) {
nodeType = node.nodeType;
// Normalize text nodes
if (normalized && nodeType == 3) {
if (nodeType == lastNodeType || !node.nodeValue.length)
continue;
}
idx++;
lastNodeType = nodeType;
}
}
return idx;
},
/**
* Splits an element into two new elements and places the specified split
* element or element between the new ones. For example splitting the paragraph at the bold element in
* this example <p>abc<b>abc</b>123</p> would produce <p>abc</p><b>abc</b><p>123</p>.
*
* @method split
* @param {Element} pe Parent element to split.
* @param {Element} e Element to split at.
* @param {Element} re Optional replacement element to replace the split element by.
* @return {Element} Returns the split element or the replacement element if that is specified.
*/
split : function(pe, e, re) {
var t = this, r = t.createRng(), bef, aft, pa;
// W3C valid browsers tend to leave empty nodes to the left/right side of the contents, this makes sense
// but we don't want that in our code since it serves no purpose for the end user
// For example if this is chopped:
// <p>text 1<span><b>CHOP</b></span>text 2</p>
// would produce:
// <p>text 1<span></span></p><b>CHOP</b><p><span></span>text 2</p>
// this function will then trim of empty edges and produce:
// <p>text 1</p><b>CHOP</b><p>text 2</p>
function trim(node) {
var i, children = node.childNodes, type = node.nodeType;
function surroundedBySpans(node) {
var previousIsSpan = node.previousSibling && node.previousSibling.nodeName == 'SPAN';
var nextIsSpan = node.nextSibling && node.nextSibling.nodeName == 'SPAN';
return previousIsSpan && nextIsSpan;
}
if (type == 1 && node.getAttribute('data-mce-type') == 'bookmark')
return;
for (i = children.length - 1; i >= 0; i--)
trim(children[i]);
if (type != 9) {
// Keep non whitespace text nodes
if (type == 3 && node.nodeValue.length > 0) {
// If parent element isn't a block or there isn't any useful contents for example "<p> </p>"
// Also keep text nodes with only spaces if surrounded by spans.
// eg. "<p><span>a</span> <span>b</span></p>" should keep space between a and b
var trimmedLength = tinymce.trim(node.nodeValue).length;
if (!t.isBlock(node.parentNode) || trimmedLength > 0 || trimmedLength == 0 && surroundedBySpans(node))
return;
} else if (type == 1) {
// If the only child is a bookmark then move it up
children = node.childNodes;
if (children.length == 1 && children[0] && children[0].nodeType == 1 && children[0].getAttribute('data-mce-type') == 'bookmark')
node.parentNode.insertBefore(children[0], node);
// Keep non empty elements or img, hr etc
if (children.length || /^(br|hr|input|img)$/i.test(node.nodeName))
return;
}
t.remove(node);
}
return node;
};
if (pe && e) {
// Get before chunk
r.setStart(pe.parentNode, t.nodeIndex(pe));
r.setEnd(e.parentNode, t.nodeIndex(e));
bef = r.extractContents();
// Get after chunk
r = t.createRng();
r.setStart(e.parentNode, t.nodeIndex(e) + 1);
r.setEnd(pe.parentNode, t.nodeIndex(pe) + 1);
aft = r.extractContents();
// Insert before chunk
pa = pe.parentNode;
pa.insertBefore(trim(bef), pe);
// Insert middle chunk
if (re)
pa.replaceChild(re, e);
else
pa.insertBefore(e, pe);
// Insert after chunk
pa.insertBefore(trim(aft), pe);
t.remove(pe);
return re || e;
}
},
/**
* Adds an event handler to the specified object.
*
* @method bind
* @param {Element/Document/Window/Array/String} o Object or element id string to add event handler to or an array of elements/ids/documents.
* @param {String} n Name of event handler to add for example: click.
* @param {function} f Function to execute when the event occurs.
* @param {Object} s Optional scope to execute the function in.
* @return {function} Function callback handler the same as the one passed in.
*/
bind : function(target, name, func, scope) {
var t = this;
if (!t.events)
t.events = new tinymce.dom.EventUtils();
return t.events.add(target, name, func, scope || this);
},
/**
* Removes the specified event handler by name and function from a element or collection of elements.
*
* @method unbind
* @param {String/Element/Array} o Element ID string or HTML element or an array of elements or ids to remove handler from.
* @param {String} n Event handler name like for example: "click"
* @param {function} f Function to remove.
* @return {bool/Array} Bool state if true if the handler was removed or an array with states if multiple elements where passed in.
*/
unbind : function(target, name, func) {
var t = this;
if (!t.events)
t.events = new tinymce.dom.EventUtils();
return t.events.remove(target, name, func);
},
// #ifdef debug
dumpRng : function(r) {
return 'startContainer: ' + r.startContainer.nodeName + ', startOffset: ' + r.startOffset + ', endContainer: ' + r.endContainer.nodeName + ', endOffset: ' + r.endOffset;
},
// #endif
_findSib : function(node, selector, name) {
var t = this, f = selector;
if (node) {
// If expression make a function of it using is
if (is(f, 'string')) {
f = function(node) {
return t.is(node, selector);
};
}
// Loop all siblings
for (node = node[name]; node; node = node[name]) {
if (f(node))
return node;
}
}
return null;
},
_isRes : function(c) {
// Is live resizble element
return /^(top|left|bottom|right|width|height)/i.test(c) || /;\s*(top|left|bottom|right|width|height)/i.test(c);
}
/*
walk : function(n, f, s) {
var d = this.doc, w;
if (d.createTreeWalker) {
w = d.createTreeWalker(n, NodeFilter.SHOW_TEXT, null, false);
while ((n = w.nextNode()) != null)
f.call(s || this, n);
} else
tinymce.walk(n, f, 'childNodes', s);
}
*/
/*
toRGB : function(s) {
var c = /^\s*?#([0-9A-F]{2})([0-9A-F]{1,2})([0-9A-F]{2})?\s*?$/.exec(s);
if (c) {
// #FFF -> #FFFFFF
if (!is(c[3]))
c[3] = c[2] = c[1];
return "rgb(" + parseInt(c[1], 16) + "," + parseInt(c[2], 16) + "," + parseInt(c[3], 16) + ")";
}
return s;
}
*/
});
/**
* Instance of DOMUtils for the current document.
*
* @property DOM
* @member tinymce
* @type tinymce.dom.DOMUtils
* @example
* // Example of how to add a class to some element by id
* tinymce.DOM.addClass('someid', 'someclass');
*/
tinymce.DOM = new tinymce.dom.DOMUtils(document, {process_html : 0});
})(tinymce); | PypiClean |
/Editra-0.7.20.tar.gz/Editra-0.7.20/src/ebmlib/calllock.py | __author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: calllock.py 65794 2010-10-13 14:10:09Z CJP $"
__revision__ = "$Revision: 65794 $"
__all__ = [ 'CallLock', 'StaticCallLock', 'LockCall']
#-----------------------------------------------------------------------------#
class CallLock(object):
"""Class to lock a context around a function call"""
def __init__(self, callable=None, args=[], kwargs={}):
super(CallLock, self).__init__()
# Attributes
self._locked = False
self.funct = callable
self.args = args
self.kwargs = kwargs
def Discard(self):
"""Clear callable"""
assert not self.IsLocked(), "Failed to obtain lock!"
self.funct = None
self.args = []
self.kwargs = {}
def IsLocked(self):
return self._locked
def Lock(self):
assert not self.IsLocked(), "Failed to obtain lock!"
assert callable(self.funct), "No Callable to Lock!"
self._locked = True
rval = self.funct(*self.args, **self.kwargs)
self._locked = False
return rval
def SetManagedCall(self, callable, args=[], kwargs={}):
"""Set the call that will be managed by this lock"""
assert not self.IsLocked(), "Failed to obtain lock!"
self.funct = callable
self.args = args
self.kwargs = kwargs
#-----------------------------------------------------------------------------#
class StaticCallLock(CallLock):
"""Provides a static lock around a function call"""
_staticlock = False
def IsLocked(self):
return StaticCallLock._staticlock
def Lock(self):
"""Lock the static class member"""
StaticCallLock._staticlock = True
super(StaticCallLock, self).Lock()
StaticCallLock._staticlock = False
#-----------------------------------------------------------------------------#
def LockCall(lock, callable, args=[], kwargs={}):
"""Convenience function for locking an function call with
the provided CallLock object.
"""
if not isinstance(lock, CallLock):
raise TypeError("lock is not of type CallLock")
lock.SetManagedCall(callable, args, kwargs)
rval = lock.Lock()
lock.Discard()
return rval | PypiClean |
/Fo4doG_mess_client-0.0.2.tar.gz/Fo4doG_mess_client-0.0.2/client/common/decos.py | import socket
import logging
import sys
sys.path.append('../')
# метод определения модуля, источника запуска.
if sys.argv[0].find('client') == -1:
# если не клиент то сервер!
logger = logging.getLogger('server')
else:
# иначе сервер
logger = logging.getLogger('client')
def log(func_to_log):
'''
Декоратор, выполняющий логирование вызовов функций.
Сохраняет события типа debug, содержащие
информацию о имени вызываемой функиции, параметры с которыми
вызывается функция, и модуль, вызывающий функцию.
'''
def log_saver(*args, **kwargs):
logger.debug(
f'Была вызвана функция {func_to_log.__name__} c параметрами {args} , {kwargs}. Вызов из модуля {func_to_log.__module__}')
ret = func_to_log(*args, **kwargs)
return ret
return log_saver
def login_required(func):
'''
Декоратор, проверяющий, что клиент авторизован на сервере.
Проверяет, что передаваемый объект сокета находится в
списке авторизованных клиентов.
За исключением передачи словаря-запроса
на авторизацию. Если клиент не авторизован,
генерирует исключение TypeError
'''
def checker(*args, **kwargs):
# проверяем, что первый аргумент - экземпляр MessageProcessor
# Импортить необходимо тут, иначе ошибка рекурсивного импорта.
from server_packets.server import MessageProcessor
from common.variables import ACTION, PRESENCE
if isinstance(args[0], MessageProcessor):
found = False
for arg in args:
if isinstance(arg, socket.socket):
# Проверяем, что данный сокет есть в списке names класса
# MessageProcessor
for client in args[0].names:
if args[0].names[client] == arg:
found = True
# Теперь надо проверить, что передаваемые аргументы не presence
# сообщение. Если presense, то разрешаем
for arg in args:
if isinstance(arg, dict):
if ACTION in arg and arg[ACTION] == PRESENCE:
found = True
# Если не не авторизован и не сообщение начала авторизации, то
# вызываем исключение.
if not found:
raise TypeError
return func(*args, **kwargs)
return checker | PypiClean |
/IPFX-1.0.8.tar.gz/IPFX-1.0.8/ipfx/epochs.py | import numpy as np
import ipfx.time_series_utils as tsu
# global constants
#TODO: read them from the config file
NOISE_EPOCH = 0.0015
PRESTIM_STABILITY_EPOCH = 0.5
POSTSTIM_STABILITY_EPOCH = 0.5
TEST_PULSE_MAX_TIME = 0.4
def get_first_stability_epoch(stim_start_idx, hz):
num_steps = int(PRESTIM_STABILITY_EPOCH * hz)
if num_steps > stim_start_idx-1:
num_steps = stim_start_idx-1
elif num_steps <= 0:
return 0, 0
return stim_start_idx-1-num_steps, stim_start_idx-1
def get_last_stability_epoch(idx1, hz):
"""
Get epoch lasting LAST_STABILITY_EPOCH before idx1
Parameters
----------
idx1 : int last index of the epoch
hz : float sampling rate
Returns
-------
(idx0,idx1) : int tuple of epoch indices
"""
idx0 = idx1-int(POSTSTIM_STABILITY_EPOCH * hz)
return idx0, idx1
def get_first_noise_epoch(idx, hz):
return idx, idx + int(NOISE_EPOCH * hz)
def get_last_noise_epoch(idx1, hz):
return idx1-int(NOISE_EPOCH * hz), idx1
def get_recording_epoch(response):
"""
Detect response epoch defined as interval from start to the last non-nan value of the response
Parameters
----------
response: float np.array
Returns
-------
start,end: int
indices of the epoch
"""
if len(tsu.flatnotnan(response)) == 0:
end_idx = 0
else:
end_idx = tsu.flatnotnan(response)[-1]
return 0, end_idx
def get_sweep_epoch(response):
"""
Defined as interval including entire sweep
Parameters
----------
response: float np.array
Returns
-------
(start_index,end_index): int tuple
with start,end indices of the epoch
"""
return 0, len(response)-1
def get_stim_epoch(i, test_pulse=True):
"""
Determine the start index, and end index of a general stimulus.
Parameters
----------
i : numpy array
current
test_pulse: bool
True if test pulse is assumed
Returns
-------
start,end: int tuple
"""
di = np.diff(i)
di_idx = np.flatnonzero(di) # != 0
if test_pulse:
di_idx = di_idx[2:] # drop the first up/down (test pulse) if present
if len(di_idx) < 2: # if no stimulus is found
return None
start_idx = di_idx[0] + 1 # shift by one to compensate for diff()
end_idx = di_idx[-1]
return start_idx, end_idx
def get_test_epoch(i,hz):
"""
Find index range of the test epoch
Parameters
----------
i : float np.array
current trace
Returns
-------
start_idx,end_idx: int tuple
start,end indices of the epoch
hz: float
sampling rate
"""
di = np.diff(i)
di_idx = np.flatnonzero(di)
if len(di_idx) == 0:
return None
if di_idx[0] >= TEST_PULSE_MAX_TIME*hz:
return None
if len(di_idx) == 1:
raise Exception("Cannot detect and end to the test pulse")
start_pulse_idx = di_idx[0] + 1 # shift by one to compensate for diff()
end_pulse_idx = di_idx[1]
padding = start_pulse_idx
start_idx = start_pulse_idx - padding
end_idx = end_pulse_idx + padding
return start_idx, end_idx
def get_experiment_epoch(i, hz, test_pulse=True):
"""
Find index range for the experiment epoch.
The start index of the experiment epoch is defined as stim_start_idx - PRESTIM_DURATION*sampling_rate
The end index of the experiment epoch is defined as stim_end_idx + POSTSTIM_DURATION*sampling_rate
Parameters
----------
i : float np.array of current
hz : float sampling rate
test_pulse: bool True if present, False otherwise
Returns
-------
(expt_start_idx,expt_end_idx): int tuple with start, end indices of the epoch
"""
stim_epoch = get_stim_epoch(i,test_pulse)
if stim_epoch:
stim_start_idx,stim_end_idx = stim_epoch
expt_start_idx = stim_start_idx - int(PRESTIM_STABILITY_EPOCH * hz)
expt_end_idx = stim_end_idx + int(POSTSTIM_STABILITY_EPOCH * hz)
return expt_start_idx,expt_end_idx
else:
return None | PypiClean |
/Cahoots-0.5.2.zip/Cahoots-0.5.2/cahoots/parsers/programming/bayesian.py | from SereneRegistry import registry
import simplebayes
import zipfile
import os
class ProgrammingBayesianClassifier(object):
"""
Responsible for classifying an example of source
code into a specific programming language
"""
@staticmethod
# pylint: disable=unused-argument
def bootstrap(config):
"""
Trains the bayes classifier with examples
from various programming languages
:param config: cahoots config
:type config: cahoots.config.BaseConfig
"""
classifier = simplebayes.SimpleBayes(
ProgrammingBayesianClassifier.bayes_tokenizer
)
directory = os.path.dirname(os.path.abspath(__file__))
trainers = {}
trainer_zip = zipfile.ZipFile(directory + '/trainers.zip', 'r')
for filename in trainer_zip.namelist():
language = filename.split('.')[0]
trainers[language] = trainer_zip.read(filename)
for language in trainers:
classifier.train(language, trainers[language])
registry.set('PP_bayes', classifier)
@staticmethod
def bayes_tokenizer(text):
"""
Breaks a string down into tokens for our classifier
:param text: text we want to tokenize
:type text: str
:return: tokenized text
:rtype: list
"""
text = text.replace('->', ' -> ')
text = text.replace('.', ' . ')
text = text.replace(')', ' ) ')
text = text.replace('(', ' ( ')
text = text.replace('{', ' { ')
text = text.replace('}', ' } ')
text = text.replace('[', ' [ ')
text = text.replace(']', ' ] ')
text = text.replace('$', ' $ ')
text = text.replace(':', ' : ')
text = text.replace('\\', ' \\ ')
return text.split()
@classmethod
def classify(cls, data_string):
"""
Takes an string and creates a dict of
programming language match probabilities
:param data_string: the string we want to classify
:type data_string: str
:return: bayesian probabilities
:rtype: dict
"""
classifier = registry.get('PP_bayes')
scores = classifier.score(data_string)
return scores | PypiClean |
/Evgeny_client-0.0.1.tar.gz/Evgeny_client-0.0.1/client/main_window.py | from PyQt5.QtWidgets import QMainWindow, qApp, QMessageBox, QApplication, QListView
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QBrush, QColor
from PyQt5.QtCore import pyqtSlot, QEvent, Qt
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
import json
import logging
import base64
from client.main_window_conv import Ui_MainClientWindow
from client.add_contact import AddContactDialog
from client.del_contact import DelContactDialog
from common.errors import ServerError
from common.variables import *
logger = logging.getLogger('client')
class ClientMainWindow(QMainWindow):
'''
Класс - основное окно пользователя.
Содержит всю основную логику работы клиентского модуля.
Конфигурация окна создана в QTDesigner и загружается из
конвертированого файла main_window_conv.py
'''
def __init__(self, database, transport, keys):
super().__init__()
# основные переменные
self.database = database
self.transport = transport
# объект - дешифорвщик сообщений с предзагруженным ключём
self.decrypter = PKCS1_OAEP.new(keys)
# Загружаем конфигурацию окна из дизайнера
self.ui = Ui_MainClientWindow()
self.ui.setupUi(self)
# Кнопка "Выход"
self.ui.menu_exit.triggered.connect(qApp.exit)
# Кнопка отправить сообщение
self.ui.btn_send.clicked.connect(self.send_message)
# "добавить контакт"
self.ui.btn_add_contact.clicked.connect(self.add_contact_window)
self.ui.menu_add_contact.triggered.connect(self.add_contact_window)
# Удалить контакт
self.ui.btn_remove_contact.clicked.connect(self.delete_contact_window)
self.ui.menu_del_contact.triggered.connect(self.delete_contact_window)
# Дополнительные требующиеся атрибуты
self.contacts_model = None
self.history_model = None
self.messages = QMessageBox()
self.current_chat = None
self.current_chat_key = None
self.encryptor = None
self.ui.list_messages.setHorizontalScrollBarPolicy(
Qt.ScrollBarAlwaysOff)
self.ui.list_messages.setWordWrap(True)
# Даблклик по листу контактов отправляется в обработчик
self.ui.list_contacts.doubleClicked.connect(self.select_active_user)
self.clients_list_update()
self.set_disabled_input()
self.show()
def set_disabled_input(self):
''' Метод делающий поля ввода неактивными'''
# Надпись - получатель.
self.ui.label_new_message.setText(
'Для выбора получателя дважды кликните на нем в окне контактов.')
self.ui.text_message.clear()
if self.history_model:
self.history_model.clear()
# Поле ввода и кнопка отправки неактивны до выбора получателя.
self.ui.btn_clear.setDisabled(True)
self.ui.btn_send.setDisabled(True)
self.ui.text_message.setDisabled(True)
self.encryptor = None
self.current_chat = None
self.current_chat_key = None
def history_list_update(self):
'''
Метод заполняющий соответствующий QListView
историей переписки с текущим собеседником.
'''
# Получаем историю сортированную по дате
list = sorted(
self.database.get_history(
self.current_chat),
key=lambda item: item[3])
# Если модель не создана, создадим.
if not self.history_model:
self.history_model = QStandardItemModel()
self.ui.list_messages.setModel(self.history_model)
# Очистим от старых записей
self.history_model.clear()
# Берём не более 20 последних записей.
length = len(list)
start_index = 0
if length > 20:
start_index = length - 20
# Заполнение модели записями, так-же стоит разделить входящие
# и исходящие выравниванием и разным фоном.
# отображает только последие 20 сообщений
for i in range(start_index, length):
item = list[i]
if item[1] == 'in':
mess = QStandardItem(
f'Входящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setBackground(QBrush(QColor(255, 213, 213)))
mess.setTextAlignment(Qt.AlignLeft)
self.history_model.appendRow(mess)
else:
mess = QStandardItem(
f'Исходящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setTextAlignment(Qt.AlignRight)
mess.setBackground(QBrush(QColor(204, 255, 204)))
self.history_model.appendRow(mess)
self.ui.list_messages.scrollToBottom()
def select_active_user(self):
'''Метод обработчик события двойного клика по списку контактов.'''
# Выбранный пользователем (даблклик) находится в выделеном элементе в
# QListView
self.current_chat = self.ui.list_contacts.currentIndex().data()
# вызываем основную функцию
self.set_active_user()
def set_active_user(self):
'''Метод активации чата с собеседником.'''
# Запрашиваем публичный ключ пользователя и создаём объект шифрования
try:
self.current_chat_key = self.transport.key_request(
self.current_chat)
logger.debug(f'Загружен открытый ключ для {self.current_chat}')
if self.current_chat_key:
self.encryptor = PKCS1_OAEP.new(
RSA.import_key(self.current_chat_key))
except (OSError, json.JSONDecodeError):
self.current_chat_key = None
self.encryptor = None
logger.debug(f'Не удалось получить ключ для {self.current_chat}')
# Если ключа нет то ошибка, что не удалось начать чат с пользователем
if not self.current_chat_key:
self.messages.warning(
self, 'Ошибка', 'Для выбранного пользователя нет ключа шифрования.')
return
# Ставим надпись и активируем кнопки
self.ui.label_new_message.setText(
f'Введите сообщенние для {self.current_chat}:')
self.ui.btn_clear.setDisabled(False)
self.ui.btn_send.setDisabled(False)
self.ui.text_message.setDisabled(False)
# Заполняем окно историю сообщений по требуемому пользователю.
self.history_list_update()
def clients_list_update(self):
'''Метод обновляющий список контактов.'''
contacts_list = self.database.get_contacts()
self.contacts_model = QStandardItemModel()
for i in sorted(contacts_list):
item = QStandardItem(i)
item.setEditable(False)
self.contacts_model.appendRow(item)
self.ui.list_contacts.setModel(self.contacts_model)
def add_contact_window(self):
'''Метод создающий окно - диалог добавления контакта'''
global select_dialog
select_dialog = AddContactDialog(self.transport, self.database)
select_dialog.btn_ok.clicked.connect(
lambda: self.add_contact_action(select_dialog))
select_dialog.show()
def add_contact_action(self, item):
'''Метод обработчк нажатия кнопки "Добавить"'''
new_contact = item.selector.currentText()
self.add_contact(new_contact)
item.close()
def add_contact(self, new_contact):
'''
Метод добавляющий контакт в серверную и клиентсткую BD.
После обновления баз данных обновляет и содержимое окна.
'''
try:
self.transport.add_contact(new_contact)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.add_contact(new_contact)
new_contact = QStandardItem(new_contact)
new_contact.setEditable(False)
self.contacts_model.appendRow(new_contact)
logger.info(f'Успешно добавлен контакт {new_contact}')
self.messages.information(
self, 'Успех', 'Контакт успешно добавлен.')
def delete_contact_window(self):
'''Метод создающий окно удаления контакта.'''
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect(
lambda: self.delete_contact(remove_dialog))
remove_dialog.show()
def delete_contact(self, item):
'''
Метод удаляющий контакт из серверной и клиентсткой BD.
После обновления баз данных обновляет и содержимое окна.
'''
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
# Если удалён активный пользователь, то деактивируем поля ввода.
if selected == self.current_chat:
self.current_chat = None
self.set_disabled_input()
def send_message(self):
'''
Функция отправки сообщения текущему собеседнику.
Реализует шифрование сообщения и его отправку.
'''
# Текст в поле, проверяем что поле не пустое затем забирается сообщение
# и поле очищается
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if not message_text:
return
# Шифруем сообщение ключом получателя и упаковываем в base64.
message_text_encrypted = self.encryptor.encrypt(
message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(
message_text_encrypted)
try:
self.transport.send_message(
self.current_chat,
message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(
f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()
@pyqtSlot(dict)
def message(self, message):
'''
Слот обработчик поступаемых сообщений, выполняет дешифровку
поступаемых сообщений и их сохранение в истории сообщений.
Запрашивает пользователя если пришло сообщение не от текущего
собеседника. При необходимости меняет собеседника.
'''
# Получаем строку байтов
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
# Декодируем строку, при ошибке выдаём сообщение и завершаем функцию
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(
self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
# Сохраняем сообщение в базу и обновляем историю сообщений или
# открываем новый чат.
self.database.save_message(
self.current_chat,
'in',
decrypted_message.decode('utf8'))
sender = message[SENDER]
if sender == self.current_chat:
self.history_list_update()
else:
# Проверим есть ли такой пользователь у нас в контактах:
if self.database.check_contact(sender):
# Если есть, спрашиваем и желании открыть с ним чат и открываем
# при желании
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}, открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.current_chat = sender
self.set_active_user()
else:
print('NO')
# Раз нету,спрашиваем хотим ли добавить юзера в контакты.
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}.\n Данного пользователя нет в вашем контакт-листе.\n Добавить в контакты и открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.add_contact(sender)
self.current_chat = sender
# Нужно заново сохранить сообщение, иначе оно будет потеряно,
# т.к. на момент предыдущего вызова контакта не было.
self.database.save_message(
self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()
@pyqtSlot()
def connection_lost(self):
'''
Слот обработчик потери соеднинения с сервером.
Выдаёт окно предупреждение и завершает работу приложения.
'''
self.messages.warning(
self,
'Сбой соединения',
'Потеряно соединение с сервером. ')
self.close()
@pyqtSlot()
def sig_205(self):
'''
Слот выполняющий обновление баз данных по команде сервера.
'''
if self.current_chat and not self.database.check_user(
self.current_chat):
self.messages.warning(
self,
'Сочувствую',
'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()
def make_connection(self, trans_obj):
'''Метод обеспечивающий соединение сигналов и слотов.'''
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205) | PypiClean |
/Heterogeneous_Highway_Env-0.0.3-py3-none-any.whl/Heterogeneous_Highway_Env/envs/summon_env.py | from gym.envs.registration import register
import numpy as np
from highway_env import utils
from highway_env.envs import ParkingEnv
from highway_env.road.lane import StraightLane, LineType
from highway_env.road.road import Road, RoadNetwork
from highway_env.vehicle.kinematics import Vehicle
from highway_env.vehicle.objects import Landmark
class SummonEnv(ParkingEnv):
"""
A continuous control environment.
It implements a reach-type task, where the agent observes their position and speed and must
control their acceleration and steering so as to reach a given goal.
Credits to Vinny Ruia for the idea and initial implementation.
"""
@classmethod
def default_config(cls) -> dict:
config = super().default_config()
config.update({
"vehicles_count": 10,
"other_vehicles_type": "highway_env.vehicle.behavior.IDMVehicle",
})
return config
def _create_road(self, spots: int = 15) -> None:
"""
Create a road composed of straight adjacent lanes.
:param spots: number of parking spots
"""
net = RoadNetwork()
width = 4.0
lt = (LineType.CONTINUOUS, LineType.CONTINUOUS)
x_offset = 0
y_offset = 12
length = 8
# Parking spots
for k in range(spots):
x = (k - spots // 2) * (width + x_offset) - width / 2
net.add_lane("a", "b", StraightLane([x, y_offset], [x, y_offset + length],
width=width, line_types=lt, speed_limit=5))
net.add_lane("b", "c", StraightLane([x, -y_offset], [x, -y_offset - length],
width=width, line_types=lt, speed_limit=5))
self.spots = spots
self.vehicle_starting = [x, y_offset + (length / 2)]
self.num_middle_lanes = 0
self.x_range = (int(spots / 2) + 1) * width
# Generate the middle lane for the busy parking lot
for y in np.arange(-y_offset + width, y_offset, width):
net.add_lane("d", "e", StraightLane([-self.x_range, y], [self.x_range, y],
width=width,
line_types=(LineType.STRIPED, LineType.STRIPED),
speed_limit=5))
self.num_middle_lanes += 1
self.road = Road(network=net,
np_random=self.np_random,
record_history=self.config["show_trajectories"])
def _create_vehicles(self, parked_probability: float = 0.75) -> None:
"""
Create some new random vehicles of a given type, and add them on the road.
:param parked_probability: probability that a spot is occupied
"""
self.vehicle = self.action_type.vehicle_class(self.road,
self.vehicle_starting,
2 * np.pi * self.np_random.rand(), 0)
self.road.vehicles.append(self.vehicle)
goal_position = [self.np_random.choice([-2 * self.spots - 10, 2 * self.spots + 10]), 0]
self.goal = Landmark(self.road, goal_position, heading=0)
self.road.objects.append(self.goal)
vehicles_type = utils.class_from_path(self.config["other_vehicles_type"])
for i in range(self.config["vehicles_count"]):
is_parked = self.np_random.rand() <= parked_probability
if not is_parked:
# Just an effort to spread the vehicles out
idx = self.np_random.randint(0, self.num_middle_lanes)
longitudinal = (i * 5) - (self.x_range / 8) * self.np_random.randint(-1, 1)
self.road.vehicles.append(
vehicles_type.make_on_lane(self.road, ("d", "e", idx), longitudinal, speed=2))
else:
lane = ("a", "b", i) if self.np_random.rand() >= 0.5 else ("b", "c", i)
self.road.vehicles.append(Vehicle.make_on_lane(self.road, lane, 4, speed=0))
for v in self.road.vehicles: # Prevent early collisions
if v is not self.vehicle and np.linalg.norm(v.position - self.vehicle.position) < 20:
self.road.vehicles.remove(v)
def compute_reward(self, achieved_goal: np.ndarray, desired_goal: np.ndarray, info: dict, p: float = 0.5) -> float:
"""
Proximity to the goal is rewarded
We use a weighted p-norm
:param achieved_goal: the goal that was achieved
:param desired_goal: the goal that was desired
:param info: any supplementary information
:param p: the Lp^p norm used in the reward. Use p<1 to have high kurtosis for rewards in [0, 1]
:return: the corresponding reward
"""
return super().compute_reward(achieved_goal, desired_goal, info, p) + \
self.config["collision_reward"] * self.vehicle.crashed
class SummonEnvActionRepeat(SummonEnv):
def __init__(self):
super().__init__()
self.configure({"policy_frequency": 1})
register(
id='summon-v0',
entry_point='highway_env.envs:SummonEnv',
max_episode_steps=100
)
register(
id='summon-ActionRepeat-v0',
entry_point='highway_env.envs:SummonEnvActionRepeat',
max_episode_steps=20
) | PypiClean |
/MatchZoo-2.2.0.tar.gz/MatchZoo-2.2.0/matchzoo/contrib/models/match_srnn.py |
import keras
from matchzoo.contrib.layers import MatchingTensorLayer
from matchzoo.contrib.layers import SpatialGRU
from matchzoo.engine import hyper_spaces
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
class MatchSRNN(BaseModel):
"""
Match-SRNN Model.
Examples:
>>> model = MatchSRNN()
>>> model.params['channels'] = 4
>>> model.params['units'] = 10
>>> model.params['dropout_rate'] = 0.0
>>> model.params['direction'] = 'lt'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params.add(Param(name='channels', value=4,
desc="Number of word interaction tensor channels"))
params.add(Param(name='units', value=10,
desc="Number of SpatialGRU units"))
params.add(Param(name='direction', value='lt',
desc="Direction of SpatialGRU scanning"))
params.add(Param(
name='dropout_rate', value=0.0,
hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
q=0.01),
desc="The dropout rate."
))
return params
def build(self):
"""
Build model structure.
Match-SRNN: Modeling the Recursive Matching Structure
with Spatial RNN
"""
# Scalar dimensions referenced here:
# B = batch size (number of sequences)
# D = embedding size
# L = `input_left` sequence length
# R = `input_right` sequence length
# C = number of channels
# Left input and right input.
# query = [B, L]
# doc = [B, R]
query, doc = self._make_inputs()
# Process left and right input.
# embed_query = [B, L, D]
# embed_doc = [B, R, D]
embedding = self._make_embedding_layer()
embed_query = embedding(query)
embed_doc = embedding(doc)
# Get matching tensor
# matching_tensor = [B, C, L, R]
matching_tensor_layer = MatchingTensorLayer(
channels=self._params['channels'])
matching_tensor = matching_tensor_layer([embed_query, embed_doc])
# Apply spatial GRU to the word level interaction tensor
# h_ij = [B, U]
spatial_gru = SpatialGRU(
units=self._params['units'],
direction=self._params['direction'])
h_ij = spatial_gru(matching_tensor)
# Apply Dropout
x = keras.layers.Dropout(
rate=self._params['dropout_rate'])(h_ij)
# Make output layer
x_out = self._make_output_layer()(x)
self._backend = keras.Model(inputs=[query, doc], outputs=x_out) | PypiClean |
/Adjector-1.0b1.tar.gz/Adjector-1.0b1/adjector/forms/validators.py |
import logging, re, time
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from paste.deploy.converters import asbool
from tw.forms.validators import FancyValidator, FormValidator, Invalid, UnicodeString, Wrapper
from adjector.core.conf import conf
AsBool = Wrapper(to_python=asbool)
log = logging.getLogger(__name__)
class DateTime(FancyValidator):
strip = True
end_interval = False
messages = {
'invalidDate': 'Enter a valid date of the form YYYY-MM-DD HH:MM:SS. You may leave off anything but the year.'
}
def _to_python(self, value, state):
formats = ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M', '%Y-%m-%d', '%Y-%m', '%Y']
add_if_end = [None, relativedelta(seconds=59), relativedelta(days=1, seconds=-1),
relativedelta(months=1, seconds=-1), relativedelta(years=1, seconds=-1)]
for format, aie in zip(formats, add_if_end):
try:
dt = datetime(*(time.strptime(value, format)[0:6]))
if self.end_interval and aie:
dt += aie
return conf.timezone.localize(dt)
except ValueError, e:
log.debug('Validation error %s' % e)
raise Invalid(self.message('invalidDate', state), value, state)
def _from_python(self, value, state):
return value.strftime('%Y-%m-%d %H:%M:%S')
class SimpleString(UnicodeString):
messages = {
'invalidString': 'May only contain alphanumerics, underscores, periods, and dashes.'
}
def validate_python(self, value, state):
UnicodeString.validate_python(self, value, state)
if re.search(r'[^\w\-.]', value):
raise Invalid(self.message('invalidString', state), value, state)
# From Siafoo
class UniqueValue(FormValidator):
validate_partial_form = True
value_field = ''
previous_value_field = ''
unique_test = None # A function that gets passed the new value to test for uniqueness. should return trueish or falsish
not_empty = True
__unpackargs__ = ('unique_test', 'value_field', 'previous_value_field')
messages = {
'notUnique': 'You must enter a unique value'
}
def validate_partial(self, field_dict, state):
for name in [self.value_field, self.previous_value_field]:
if name and not field_dict.has_key(name):
return
self.validate_python(field_dict, state)
def validate_python(self, field_dict, state):
FormValidator.validate_python(self, field_dict, state)
value = field_dict.get(self.value_field)
previous_value = field_dict.get(self.previous_value_field)
if (not self.not_empty or value == '') and value != previous_value and not self.unique_test(value):
errors = {self.value_field: self.message('notUnique', state)}
error_list = errors.items()
error_list.sort()
error_message = '<br>\n'.join(
['%s: %s' % (name, value) for name, value in error_list])
raise Invalid(error_message, field_dict, state, error_dict=errors) | PypiClean |
/Mock.GPIO-0.1.8-py3-none-any.whl/Mock/GPIO.py | import time
import logging
import os
logger = logging.getLogger(__name__)
log_level = os.getenv('LOG_LEVEL')
if log_level is not None:
if log_level == "Info":
logger.setLevel(logging.INFO)
if log_level == "Debug":
logger.setLevel(logging.DEBUG)
if log_level == "Warning":
logger.setLevel(logging.WARNING)
if log_level == "Error":
logger.setLevel(logging.ERROR)
if log_level == "Critical":
logger.setLevel(logging.CRITICAL)
else:
logger.setLevel(logging.ERROR)
stream_formatter = logging.Formatter('%(asctime)s:%(levelname)s: %(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(stream_formatter)
logger.addHandler(stream_handler)
BCM = 11
BOARD = 10
BOTH = 33
FALLING = 32
HARD_PWM = 43
HIGH = 1
I2C = 42
IN = 1
LOW = 0
OUT = 0
PUD_DOWN = 21
PUD_OFF = 20
PUD_UP = 22
RISING = 31
RPI_INFO = {'MANUFACTURER': 'Sony', 'P1_REVISION': 3, 'PROCESSOR': 'BCM2837', 'RAM': '1G', 'REVISION': 'a020d3', 'TYPE': 'Pi 3 Model B+'}
RPI_REVISION = 3
SERIAL = 40
SPI = 41
UNKNOWN = -1
VERSION = '0.7.0'
_mode = 0
channel_config = {}
#flags
setModeDone = False
class Channel:
def __init__(self,channel, direction, initial=0,pull_up_down=PUD_OFF):
self.chanel = channel
self.direction = direction
self.initial = initial
self.pull_up_down = pull_up_down
#GPIO LIBRARY Functions
def setmode(mode):
"""
Set up numbering mode to use for channels.
BOARD - Use Raspberry Pi board numbers
BCM - Use Broadcom GPIO 00..nn numbers
"""
# GPIO = GPIO()
time.sleep(1)
if(mode == BCM):
setModeDone = True
_mode = mode
elif (mode == BOARD):
setModeDone = True
else:
setModeDone = False
def getmode():
"""
Get numbering mode used for channel numbers.
Returns BOARD, BCM or None
"""
return _mode
def setwarnings(flag):
"""
Enable or disable warning messages
"""
logger.info("Set Warings as {}".format(flag))
def setup(channel, direction, initial=0,pull_up_down=PUD_OFF):
"""
Set up a GPIO channel or list of channels with a direction and (optional) pull/up down control
channel - either board pin number or BCM number depending on which mode is set.
direction - IN or OUT
[pull_up_down] - PUD_OFF (default), PUD_UP or PUD_DOWN
[initial] - Initial value for an output channel
"""
logger.info("setup channel : {} as {} with intial :{} and pull_up_dowm {}".format(channel,direction,initial,pull_up_down))
global channel_config
channel_config[channel] = Channel(channel, direction, initial, pull_up_down)
def output(channel, value):
"""
Output to a GPIO channel or list of channels
channel - either board pin number or BCM number depending on which mode is set.
value - 0/1 or False/True or LOW/HIGH
"""
logger.info("output channel : {} with value : {}".format(channel, value))
def input(channel):
"""
Input from a GPIO channel. Returns HIGH=1=True or LOW=0=False
channel - either board pin number or BCM number depending on which mode is set.
"""
logger.info("reading from chanel {}".format(channel))
def wait_for_edge(channel,edge,bouncetime,timeout):
"""
Wait for an edge. Returns the channel number or None on timeout.
channel - either board pin number or BCM number depending on which mode is set.
edge - RISING, FALLING or BOTH
[bouncetime] - time allowed between calls to allow for switchbounce
[timeout] - timeout in ms
"""
logger.info("waiting for edge : {} on channel : {} with bounce time : {} and Timeout :{}".format(edge,channel,bouncetime,timeout))
def add_event_detect(channel,edge,callback,bouncetime):
"""
Enable edge detection events for a particular GPIO channel.
channel - either board pin number or BCM number depending on which mode is set.
edge - RISING, FALLING or BOTH
[callback] - A callback function for the event (optional)
[bouncetime] - Switch bounce timeout in ms for callback
"""
logger.info("Event detect added for edge : {} on channel : {} with bouce time : {} and callback {}".format(edge,channel,bouncetime,callback))
def event_detected(channel):
"""
Returns True if an edge has occurred on a given GPIO. You need to enable edge detection using add_event_detect() first.
channel - either board pin number or BCM number depending on which mode is set.
"""
logger.info("Waiting for even detection on channel :{}".format(channel))
def add_event_callback(channel,callback):
"""
Add a callback for an event already defined using add_event_detect()
channel - either board pin number or BCM number depending on which mode is set.
callback - a callback function
"""
logger.info("Event Calback : {} added for channel : {}".format(callback,channel))
def remove_event_detect(channel):
"""
Remove edge detection for a particular GPIO channel
channel - either board pin number or BCM number depending on which mode is set.
"""
logger.info("Event Detect Removed for channel : {}".format(channel))
def gpio_function(channel):
"""
Return the current GPIO function (IN, OUT, PWM, SERIAL, I2C, SPI)
channel - either board pin number or BCM number depending on which mode is set.
"""
logger.info("GPIO function of Channel : {} is {}".format(channel,channel_config[channel].direction))
class PWM:
# initialise PWM channel
def __init__(self, channel, frequency):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
self.chanel = channel
self.frequency = frequency
self.dutycycle = 0
global channel_config
channel_config[channel] = Channel(channel,PWM,)
logger.info("Initialized PWM for Channel : {} at frequency : {}".format(channel,frequency))
# where dc is the duty cycle (0.0 <= dc <= 100.0)
def start(self, dutycycle):
"""
Start software PWM
dutycycle - the duty cycle (0.0 to 100.0)
"""
self.dutycycle = dutycycle
logger.info("start pwm on channel : {} with Duty cycle : {}".format(self.chanel,dutycycle))
# where freq is the new frequency in Hz
def ChangeFrequency(self, frequency):
"""
Change the frequency
frequency - frequency in Hz (freq > 1.0)
"""
logger.info("Freqency Changed for channel : {} from : {} -> to : {}".format(self.chanel,self.frequency,frequency))
self.frequency = frequency
# where 0.0 <= dc <= 100.0
def ChangeDutyCycle(self, dutycycle):
"""
Change the duty cycle
dutycycle - between 0.0 and 100.0
"""
self.dutycycle = dutycycle
logger.info("Dutycycle Changed for channel : {} from : {} -> to : {}".format(self.chanel,self.dutycycle,dutycycle))
# stop PWM generation
def stop(self):
logger.info("Stop pwm on channel : {} with Duty cycle : {}".format(self.chanel,self.dutycycle))
def cleanup(channel=None):
"""
Clean up by resetting all GPIO channels that have been used by this program to INPUT with no pullup/pulldown and no event detection
[channel] - individual channel or list/tuple of channels to clean up. Default - clean every channel that has been used.
"""
if channel is not None:
logger.info("Cleaning Up Channel : {}".format(channel))
else:
logger.info("Cleaning Up all channels") | PypiClean |
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/specific_report_response_py3.py |
from msrest.serialization import Model
class SpecificReportResponse(Model):
"""SpecificReportResponse.
:param specific_report_id: Unique identifier for the report
:type specific_report_id: int
:param base_report:
:type base_report: ~energycap.sdk.models.ReportChild
:param specific_report_code: Report code
:type specific_report_code: str
:param specific_report_info: Report name
:type specific_report_info: str
:param last_viewed: Last time this report was run
:type last_viewed: datetime
:param recommended: Indicates if this is a recommended report
:type recommended: bool
:param report_category:
:type report_category: ~energycap.sdk.models.ReportCategoryChild
:param description: The report description
:type description: str
:param visible: Indicates if this report is visible to the current user
:type visible: bool
:param shared: Indicates if this report is shared
:type shared: bool
:param user:
:type user: ~energycap.sdk.models.UserChild
:param subscription:
:type subscription: ~energycap.sdk.models.ReportSubscriptionChild
:param related_reports: List of reports related to this report
:type related_reports: list[~energycap.sdk.models.RelatedReportResponse]
"""
_attribute_map = {
'specific_report_id': {'key': 'specificReportId', 'type': 'int'},
'base_report': {'key': 'baseReport', 'type': 'ReportChild'},
'specific_report_code': {'key': 'specificReportCode', 'type': 'str'},
'specific_report_info': {'key': 'specificReportInfo', 'type': 'str'},
'last_viewed': {'key': 'lastViewed', 'type': 'iso-8601'},
'recommended': {'key': 'recommended', 'type': 'bool'},
'report_category': {'key': 'reportCategory', 'type': 'ReportCategoryChild'},
'description': {'key': 'description', 'type': 'str'},
'visible': {'key': 'visible', 'type': 'bool'},
'shared': {'key': 'shared', 'type': 'bool'},
'user': {'key': 'user', 'type': 'UserChild'},
'subscription': {'key': 'subscription', 'type': 'ReportSubscriptionChild'},
'related_reports': {'key': 'relatedReports', 'type': '[RelatedReportResponse]'},
}
def __init__(self, *, specific_report_id: int=None, base_report=None, specific_report_code: str=None, specific_report_info: str=None, last_viewed=None, recommended: bool=None, report_category=None, description: str=None, visible: bool=None, shared: bool=None, user=None, subscription=None, related_reports=None, **kwargs) -> None:
super(SpecificReportResponse, self).__init__(**kwargs)
self.specific_report_id = specific_report_id
self.base_report = base_report
self.specific_report_code = specific_report_code
self.specific_report_info = specific_report_info
self.last_viewed = last_viewed
self.recommended = recommended
self.report_category = report_category
self.description = description
self.visible = visible
self.shared = shared
self.user = user
self.subscription = subscription
self.related_reports = related_reports | PypiClean |
/Marcellus-1.1.3.tar.gz/Marcellus-1.1.3/marcellus/postgres/search.py |
from marcellus.dataset import DataSet
from sqlalchemy import Table, MetaData, select
from sqlalchemy.sql.expression import and_, Select, or_, alias
from sqlalchemy.types import DATE, TIME, DATETIME, TIMESTAMP, INTEGER, NUMERIC, BOOLEAN, BIGINT
import re
import datetime as dt
_CONSTANT_DATE_TODAY = 'today'
_CONSTANT_DATE_START_WEEK = 'start_week'
_CONSTANT_DATE_END_WEEK = 'end_week'
_CONSTANT_DATE_START_MONTH = 'start_month'
_CONSTANT_DATE_END_MONTH = 'end_month'
_CONSTANT_DATE_START_YEAR = 'start_year'
_CONSTANT_DATE_END_YEAR = 'end_year'
_CONSTANT_DAY = 'd'
_CONSTANT_WEEK = 'w'
_CONSTANT_MONTH = 'm'
_CONSTANT_YEAR = 'y'
class Busqueda(object):
def __init__(self, tabla, columnas_trans=None, strtodatef=None):
self.tabla = tabla
self.cols = []
self.types = []
for col in self.tabla.columns:
self.cols.append(col.name)
self.types.append(col.type)
self.cache_campo = {}
# string to date conversion function
self.strtodatef = strtodatef
if not columnas_trans:
self.cols_trans = [re.sub(r'[^a-z0-9]*', '', self.remove_accents(col))
for col in self.cols]
else:
self.cols_trans = columnas_trans
def no_accents(self, texto):
resultado = texto
resultado = re.sub(r'(a|á|à|ä)', '(a|á|à|ä)', resultado)
resultado = re.sub(r'(e|é|è|ë)', '(e|é|è|ë)', resultado)
resultado = re.sub(r'(i|í|ì|ï)', '(i|í|ì|ï)', resultado)
resultado = re.sub(r'(o|ó|ò|ö)', '(o|ó|ò|ö)', resultado)
resultado = re.sub(r'(u|ú|ù|ü)', '(u|ú|ù|ü)', resultado)
return resultado
def remove_accents(self, texto):
texto = texto.encode('utf-8')
return texto.lower(). \
replace('á', 'a'). \
replace('é', 'e'). \
replace('í', 'i'). \
replace('ó', 'o'). \
replace('ú', 'u'). \
replace('ü', 'u')
def get_col(self, campo):
if campo.lower() in self.cache_campo:
return self.cache_campo[campo.lower()]
else:
col = None
for ct, c in zip(self.cols_trans, self.cols):
if ct.startswith(campo.lower()):
if not col or col['length'] > len(ct):
col = dict(name=c, length=len(ct))
if col:
self.cache_campo[campo.lower()] = col['name']
return col['name']
else:
return None
def process_date_constants(self, termino):
def _sub(m):
cons = m.group(1)
s = m.group(3)
q = int(m.group(4) or 0)
t = m.group(5) or _CONSTANT_DAY
r = None
if cons == _CONSTANT_DATE_TODAY:
r = dt.date.today()
elif cons == _CONSTANT_DATE_START_WEEK:
wd = dt.date.today().weekday()
r = dt.date.today() - dt.timedelta(days=wd)
elif cons == _CONSTANT_DATE_END_WEEK:
wd = 6 - dt.date.today().weekday()
r = dt.date.today() + dt.timedelta(days=wd)
elif cons == _CONSTANT_DATE_START_MONTH:
today = dt.date.today()
r = dt.date(today.year, today.month, 1)
elif cons == _CONSTANT_DATE_END_MONTH:
today = dt.date.today()
next_month = today + dt.timedelta(days=30)
r = dt.date(next_month.year, next_month.month, 1) - dt.timedelta(days=1)
elif cons == _CONSTANT_DATE_START_YEAR:
today = dt.date.today()
r = dt.date(today.year, 1, 1)
elif cons == _CONSTANT_DATE_END_YEAR:
today = dt.date.today()
r = dt.date(today.year, 12, 31)
if r:
if s and q:
if t == _CONSTANT_DAY:
if s == '-':
n = -q
else:
n = q
r = r + dt.timedelta(days=n)
elif t == _CONSTANT_WEEK:
if s == '-':
n = -q * 7
else:
n = q * 7
r = r + dt.timedelta(days=n)
elif t == _CONSTANT_MONTH:
r_ = r
if s == '-':
n = -1
else:
n = +1
def _next_month(d, inc):
year = d.year
month = 1
if inc > 0:
if d.month < 12:
month = d.month + inc
elif d.month == 12:
month = 1
year += 1
else:
if d.month > 1:
month = d.month + inc
else:
month = 12
year -= 1
return dt.date(year, month, 1)
i = 0
while abs(i) != q:
next_month = _next_month(r_, n)
if cons == _CONSTANT_DATE_END_MONTH:
day = (_next_month(next_month, 1) - dt.timedelta(days=1)).day
else:
day = r.day
r_ = dt.date(next_month.year, next_month.month, day)
i += n
r = r_
elif t == _CONSTANT_YEAR:
if s == '-':
n = -q
else:
n = +q
r = dt.date(r.year + n, r.month, r.day)
return r.strftime('%Y-%m-%d')
else:
return m.group(0)
return re.sub(r'\{\s*(\w+)(\s+([\+\-])\s*(\d+)\s*(\w?))?\s*}', _sub, termino)
def _get_date(self, s):
if self.strtodatef:
try:
return self.strtodatef(s).strftime('%Y-%m-%d')
except:
s_ = self.process_date_constants(s)
if s != s_:
return s_
return None
else:
try:
return dt.datetime.strptime(s, '%d/%m/%Y').strftime('%Y-%m-%d')
except ValueError:
s_ = self.process_date_constants(s)
if s != s_:
return s_
return None
def _get_time(self, s):
m_time = re.search(r'^\d{1,2}:\d{1,2}(:\d{1,2})?$', s)
if m_time:
return s
def _get_datetime(self, s):
m_datetime = re.search(r'^\d{1,2}/\d{1,2}/\d{4}\s+\d{1,2}:\d{1,2}(:\d{1,2})?$', s)
if m_datetime:
fmt = '%d/%m/%Y %H:%M'
if m_datetime.group(1):
fmt += ':%S'
try:
return dt.datetime.strptime(s, fmt).strftime('%Y-%m-%d %H:%M:%S')
except ValueError:
return None
def _get_number(self, s):
m_number = re.search(r'^\d+(\.\d+)?$', s)
if m_number:
return s
def process(self, s):
filters = []
order_by = []
terminos = re.findall(r'(!|#|\+|\-)?(\w[\w\s]+\w?|\w)\s*(<>|<=|>=|==|!=|=|<|>)?\s*("[^"]+"|\d+\.\d+|\d+|\w[\w\s]+\w?)?,?', s)
for termino in terminos:
operator0 = termino[0]
field = termino[1].strip()
operator = termino[2].strip()
# remove " at the begining and the end
operand = termino[3]
m_operand = re.search(r'^"([^"]+)"$', operand)
if m_operand:
operand = m_operand.group(1)
c = None
if operator == '' and operator0 == '':
for operand in field.split():
# buscar en cada columna por el término correspondiente
filters_ = []
for col in self.tabla.columns:
col_name = col.name.encode('utf-8')
if col_name == 'id' or col_name.startswith('id_'):
continue
filter_ = []
if isinstance(col.type, DATE) or isinstance(col.type, TIME) or \
isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
filter_.append('"{0}" IS NOT NULL'.format(col_name))
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or \
isinstance(col.type, BIGINT):
filter_.append('"{0}" IS NOT NULL'.format(col_name))
else:
filter_.append('COALESCE(CAST("{0}" as Text), \'\') != \'\''.format(col_name))
args_ = (col_name, self.no_accents(operand))
filter_.append("UPPER(CAST(\"{0}\" as Text)) SIMILAR TO UPPER('%{1}%')".format(*args_))
filters_.append('({0})'.format(' AND '.join(filter_)))
filters.append('({0})'.format('\nOR '.join(filters_)))
else:
c = self.get_col(field)
if not c:
continue
col = self.tabla.columns[c]
if operator0 == '#':
# DATE, TIME, DATETIME/TIMESTAMP
if isinstance(col.type, DATE) or isinstance(col.type, TIME) or \
isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
filters.append('"{0}" IS NOT NULL'.format(c.encode('utf-8')))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
filters.append('"{0}" IS NOT NULL'.format(c.encode('utf-8')))
# BOOLEAN
elif isinstance(col.type, BOOLEAN):
filters.append('"{0}" = TRUE'.format(c.encode('utf-8')))
else:
filters.append('TRIM(COALESCE(CAST("{0}" as Text), \'\')) != \'\''.format(c.encode('utf-8')))
elif operator0 == '!':
# DATE, TIME, DATETIME/TIMESTAMP
if isinstance(col.type, DATE) or isinstance(col.type, TIME) or \
isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
filters.append('"{0}" IS NULL'.format(c.encode('utf-8')))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
filters.append('"{0}" IS NULL'.format(c.encode('utf-8')))
# BOOLEAN
elif isinstance(col.type, BOOLEAN):
filters.append('"{0}" = FALSE'.format(c.encode('utf-8')))
else:
filters.append('TRIM(COALESCE(CAST("{0}" as Text), \'\')) = \'\''.format(c.encode('utf-8')))
# ORDER BY: ASC
elif operator0 == '+':
order_by.append('"{0}" ASC'.format(c.encode('utf-8')))
# ORDER BY: DESC
elif operator0 == '-':
order_by.append('"{0}" DESC'.format(c.encode('utf-8')))
elif operator != '':
# operator = <, >, <=, >=, =, <>, ==, !=
# CONTAIN (=)
if operator == '=':
# DATE
if isinstance(col.type, DATE):
operand_ = self._get_date(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# TIME
elif isinstance(col.type, TIME):
operand_ = self._get_time(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# DATETIME/TIMESTAMP
elif isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
operand_ = self._get_datetime(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
operand_ = self._get_number(operand)
if operand_:
filters.append('"{0}" = {1}'.format(c.encode('utf-8'), operand))
else:
args_ = (c.encode('utf-8'), self.no_accents(operand))
filters.append("UPPER(CAST(\"{0}\" as Text)) SIMILAR TO UPPER('%{1}%')".format(*args_))
# NOT CONTAIN (<>)
elif operator == '<>':
# DATE
if isinstance(col.type, DATE):
operand_ = self._get_date(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# TIME
elif isinstance(col.type, TIME):
operand_ = self._get_time(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# DATETIME/TIMESTAMP
elif isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
operand_ = self._get_datetime(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
operand_ = self._get_number(operand)
if operand_:
filters.append('"{0}" != {1}'.format(c.encode('utf-8'), operand))
else:
args_ = (c.encode('utf-8'), self.no_accents(operand))
filters.append("UPPER(CAST(\"{0}\" as Text)) NOT SIMILAR TO UPPER('%{1}%')".format(*args_))
# EQUAL TO (==)
elif operator == '==':
# DATE
if isinstance(col.type, DATE):
operand_ = self._get_date(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# TIME
elif isinstance(col.type, TIME):
operand_ = self._get_time(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# DATETIME/TIMESTAMP
elif isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
operand_ = self._get_datetime(operand)
if operand_:
filters.append('"{0}" = \'{1}\''.format(c.encode('utf-8'), operand_))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
operand_ = self._get_number(operand)
if operand_:
filters.append('"{0}" = {1}'.format(c.encode('utf-8'), operand))
else:
args_ = (c.encode('utf-8'), operand)
filters.append('UPPER(TRIM(COALESCE(CAST("{0}" as Text), \'\'))) = UPPER(\'{1}\')'.format(*args_))
# NOT EQUAL TO (!=)
elif operator == '!=':
# DATE
if isinstance(col.type, DATE):
operand_ = self._get_date(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# TIME
elif isinstance(col.type, TIME):
operand_ = self._get_time(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# DATETIME/TIMESTAMP
elif isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
operand_ = self._get_datetime(operand)
if operand_:
filters.append('"{0}" != \'{1}\''.format(c.encode('utf-8'), operand_))
# INTEGER, NUMERIC, BIGINT
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
operand_ = self._get_number(operand)
if operand_:
filters.append('"{0}" != {1}'.format(c.encode('utf-8'), operand))
else:
args_ = (c.encode('utf-8'), operand)
filters.append('UPPER(TRIM(COALESCE(CAST("{0}" as Text), \'\'))) != UPPER(\'{1}\')'.format(*args_))
# LESS THAN (<), LESS THAN OR EQUAL TO (<=), GREATER THAN (>), GREATER THAN OR EQUAL TO (>=)
elif operator in ['<', '<=', '>', '>=']:
# DATE
if isinstance(col.type, DATE):
operand_ = self._get_date(operand)
if operand_:
filters.append('"{0}" {1} \'{2}\''.format(c.encode('utf-8'), operator, operand_))
# TIME
elif isinstance(col.type, TIME):
operand_ = self._get_time(operand)
if operand_:
filters.append('"{0}" {1} \'{2}\''.format(c.encode('utf-8'), operator, operand_))
# DATETIME/TIMESTAMP
elif isinstance(col.type, DATETIME) or isinstance(col.type, TIMESTAMP):
operand_ = self._get_datetime(operand)
if operand_:
filters.append('"{0}" {1} \'{2}\''.format(c.encode('utf-8'), operator, operand_))
elif isinstance(col.type, INTEGER) or isinstance(col.type, NUMERIC) or isinstance(col.type, BIGINT):
operand_ = self._get_number(operand)
if operand_:
filters.append('"{0}" {1} \'{2}\''.format(c.encode('utf-8'), operator, operand))
filters_sql = None
if len(filters) > 0:
filters_sql = ('\nAND '.join(filters)).decode('utf-8')
order_by_sql = None
if len(order_by) > 0:
order_by_sql = (', '.join(order_by)).decode('utf-8')
return filters_sql, order_by_sql
class Search(object):
def __init__(self, session, table_name, strtodatef=None):
self.session = session
self.table_name = table_name
self.meta = MetaData(bind=self.session.bind)
self.strtodatef = strtodatef
self.sql = None
self.order = ''
if isinstance(self.table_name, Select):
self.tbl = self.table_name
else:
self.tbl = Table(self.table_name, self.meta, autoload=True)
self.from_ = self.tbl
def apply_qry(self, q):
if q:
if isinstance(q, unicode):
q = q.encode('utf-8')
# process "q"
qres = Busqueda(self.tbl, strtodatef=self.strtodatef)
filters_sql, order_sql = qres.process(q)
# apply search conditions
self.sql = and_(filters_sql, self.sql)
# apply order
if order_sql:
self.order = order_sql
def and_(self, *cond):
self.sql = and_(self.sql, *cond)
def or_(self, cond):
self.sql = or_(self.sql, cond)
def join(self, *args, **kwargs):
self.from_ = self.from_.join(*args, **kwargs)
def outerjoin(self, *args, **kwargs):
self.from_ = self.from_.outerjoin(*args, **kwargs)
def apply_filters(self, filters):
tbl = self.tbl
# apply filters
if filters:
filters_tuple = (self.sql,)
for f in filters:
if len(f) > 2:
# (<field name>, <field value>, <operator>,)
# different
if f[2] == '!=':
filters_tuple += (tbl.c[f[0]] != f[1],)
# greater
elif f[2] == '>':
filters_tuple += (tbl.c[f[0]] > f[1],)
# greater or equal
elif f[2] == '>=':
filters_tuple += (tbl.c[f[0]] >= f[1],)
# less
elif f[2] == '<':
filters_tuple += (tbl.c[f[0]] < f[1],)
# less or equal
elif f[2] == '<=':
filters_tuple += (tbl.c[f[0]] <= f[1],)
# equal (and anything else...)
else:
filters_tuple += (tbl.c[f[0]] == f[1],)
elif len(f) == 2:
# (<field name>, <field value>,)
# equal (by default)
filters_tuple += (tbl.c[f[0]] == f[1],)
elif len(f) == 1:
filters_tuple += (f[0],)
self.sql = and_(*filters_tuple)
def __call__(self, rp=100, offset=0, collection=None, no_count=False, show_ids=False):
"""
IN
rp <int>
offset <int>
filters [<tuple>, ...]
collecion <tuple> (<str>, <str>, <int>,)
no_count <bool> => False
show_ids <bool> => False
OUT
<DataSet>
"""
sql = self.sql
if collection:
child_table_name = collection[0]
child_attr = collection[1]
parent_id = collection[2]
child_attr_alias = '{0}$'.format(child_attr)
if child_attr_alias in self.tbl.c:
sql = and_(sql, self.tbl.c[child_attr_alias] == parent_id)
else:
child_table = alias(Table(child_table_name, self.meta, autoload=True))
self.from_ = self.from_.\
join(child_table,
and_(child_table.c.id == self.tbl.c.id,
child_table.c[child_attr] == parent_id))
# where
if isinstance(self.tbl, Select):
qry = self.tbl.where(sql)
else:
qry = select([self.tbl], from_obj=self.from_, whereclause=sql)
# order by
if self.order:
qry = qry.order_by(self.order)
return DataSet.procesar_resultado(self.session, qry, rp, offset, no_count=no_count, show_ids=show_ids) | PypiClean |
/Nitrous-0.9.3-py3-none-any.whl/turbogears/i18n/data/es_UY.py |
languages={'gv': u'ga\xe9lico man\xe9s', 'gu': u'goujarat\xed', 'gd': u'ga\xe9lico escoc\xe9s', 'ga': u'irland\xe9s', 'gl': 'gallego', 'la': u'lat\xedn', 'ln': 'lingala', 'lo': 'laosiano', 'tt': u't\xe1taro', 'tr': 'turco', 'ts': 'tsonga', 'lv': 'letonio', 'lt': 'lituano', 'th': u'tailand\xe9s', 'ti': 'tigrinya', 'te': 'telugu', 'haw': 'hawaiano', 'yo': 'yoruba', 'de': u'alem\xe1n', 'da': u'dan\xe9s', 'qu': 'quechua', 'el': 'griego', 'eo': 'esperanto', 'en': u'ingl\xe9s', 'zh': 'chino', 'za': 'zhuang', 'eu': 'vasco', 'et': 'estonio', 'es': u'espa\xf1ol', 'ru': 'ruso', 'ro': 'rumano', 'be': 'bielorruso', 'bg': u'b\xfalgaro', 'uk': 'ucraniano', 'wo': 'uolof', 'bn': u'bengal\xed', 'bo': 'tibetano', 'bh': 'bihari', 'bi': 'bislama', 'br': u'bret\xf3n', 'ja': u'japon\xe9s', 'om': 'oromo', 'root': u'ra\xedz', 'or': 'oriya', 'xh': 'xhosa', 'co': 'corso', 'ca': u'catal\xe1n', 'cy': u'gal\xe9s', 'cs': 'checo', 'ps': 'pashto', 'pt': u'portugu\xe9s', 'tl': 'tagalo', 'pa': u'punjab\xed', 'vi': 'vietnamita', 'pl': 'polaco', 'hy': 'armenio', 'hr': 'croata', 'iu': 'inuktitut', 'hu': u'h\xfangaro', 'hi': 'hindi', 'ha': 'hausa', 'he': 'hebreo', 'mg': 'malgache', 'uz': 'uzbeko', 'ml': 'malayalam', 'mo': 'moldavo', 'mn': 'mongol', 'mi': u'maor\xed', 'ik': 'inupiak', 'mk': 'macedonio', 'ur': 'urdu', 'mt': u'malt\xe9s', 'ms': 'malayo', 'mr': 'marathi', 'ug': 'uigur', 'ta': 'tamil', 'my': 'birmano', 'aa': 'afar', 'af': 'afrikaans', 'sw': 'swahili', 'is': u'island\xe9s', 'am': u'am\xe1rico', 'it': 'italiano', 'sv': 'sueco', 'as': u'asam\xe9s', 'ar': u'\xe1rabe', 'su': u'sundan\xe9s', 'zu': u'zul\xfa', 'az': 'azerbayano', 'ie': 'interlingue', 'id': 'indonesio', 'nl': u'holand\xe9s', 'nn': 'nynorsk noruego', 'no': 'noruego', 'na': 'nauruano', 'nb': 'bokmal noruego', 'ne': u'nepal\xed', 'vo': 'volapuk', 'so': u'somal\xed', 'fr': u'franc\xe9s', 'sm': 'samoano', 'fa': 'farsi', 'fi': u'finland\xe9s', 'sa': u's\xe1nscrito', 'fo': u'fero\xe9s', 'ka': 'georgiano', 'kk': 'kazajo', 'sr': 'serbio', 'sq': u'alban\xe9s', 'ko': 'coreano', 'kn': 'canara', 'km': 'kmer', 'kl': u'groenland\xe9s', 'sk': 'eslovaco', 'si': u'cingal\xe9s', 'sh': 'serbocroata', 'kw': u'c\xf3rnico', 'ku': 'kurdo', 'sl': 'esloveno', 'ky': 'kirghiz', 'sg': 'sango'}
countries={'BD': 'Bangladesh', 'BE': u'B\xe9lgica', 'BF': 'Burkina Faso', 'BG': 'Bulgaria', 'BA': 'Bosnia y Hercegovina', 'BB': 'Barbados', 'WF': 'Wallis y Futuna', 'BM': 'Bermudas', 'BN': u'Brun\xe9i', 'BO': 'Bolivia', 'BH': u'Bahr\xe1in', 'BI': 'Burundi', 'BJ': u'Ben\xedn', 'BT': u'But\xe1n', 'JM': 'Jamaica', 'BV': 'Isla Bouvet', 'BW': 'Botsuana', 'WS': 'Samoa', 'BR': 'Brasil', 'BS': 'Bahamas', 'BY': 'Bielorrusia', 'BZ': 'Belice', 'RU': 'Rusia', 'RW': 'Ruanda', 'TL': 'Timor Oriental', 'RE': u'R\xe9union', 'TM': u'Turkmenist\xe1n', 'TJ': u'Tayikist\xe1n', 'RO': 'Rumania', 'TK': 'Tokelau', 'GW': 'Guinea-Bissau', 'GU': 'Guam', 'GT': 'Guatemala', 'GS': 'Islas Georgia del Sur y Sandwich del Sur', 'GR': 'Grecia', 'GQ': 'Guinea Ecuatorial', 'GP': 'Guadalupe', 'JP': u'Jap\xf3n', 'GY': 'Guyana', 'GF': 'Guayana Francesa', 'GE': 'Georgia', 'GD': 'Granada', 'GB': 'Reino Unido', 'GA': u'Gab\xf3n', 'SV': 'El Salvador', 'GN': 'Guinea', 'GM': 'Gambia', 'GL': 'Groenlandia', 'GI': 'Gibraltar', 'GH': 'Ghana', 'OM': u'Om\xe1n', 'TN': u'T\xfanez', 'JO': 'Jordania', 'SP': 'Serbia', 'HR': 'Croacia', 'HT': u'Hait\xed', 'HU': u'Hungr\xeda', 'HK': u'Hong Kong, Regi\xf3n administrativa especial de China', 'HN': 'Honduras', 'HM': 'Islas Heard y McDonald', 'VE': 'Venezuela', 'PR': 'Puerto Rico', 'PS': 'Territorios Palestinos', 'PW': 'Palau', 'PT': 'Portugal', 'SJ': 'Svalbard y Jan Mayen', 'PY': 'Paraguay', 'IQ': 'Irak', 'PA': u'Panam\xe1', 'PF': 'Polinesia Francesa', 'PG': u'Pap\xfaa Nueva Guinea', 'PE': u'Per\xfa', 'PK': u'Pakist\xe1n', 'PH': 'Filipinas', 'PN': 'Pitcairn', 'PL': 'Polonia', 'PM': u'San Pedro y Miquel\xf3n', 'ZM': 'Zambia', 'EH': u'S\xe1hara Occidental', 'EE': 'Estonia', 'EG': 'Egipto', 'ZA': u'Sud\xe1frica', 'EC': 'Ecuador', 'IT': 'Italia', 'VN': 'Vietnam', 'SB': u'Islas Salom\xf3n', 'ET': u'Etiop\xeda', 'SO': 'Somalia', 'ZW': 'Zimbabue', 'SA': u'Arabia Saud\xed', 'ES': u'Espa\xf1a', 'ER': 'Eritrea', 'MD': 'Moldova', 'MG': 'Madagascar', 'MA': 'Marruecos', 'MC': u'M\xf3naco', 'UZ': u'Uzbekist\xe1n', 'MM': 'Myanmar', 'ML': u'Mal\xed', 'MO': u'Macao, Regi\xf3n administrativa especial de China', 'MN': 'Mongolia', 'MH': 'Islas Marshall', 'MK': 'Macedonia', 'MU': 'Mauricio', 'MT': 'Malta', 'MW': 'Malaui', 'MV': 'Maldivas', 'MQ': 'Martinica', 'MP': 'Islas Marianas del Norte', 'MS': 'Montserrat', 'MR': 'Mauritania', 'UG': 'Uganda', 'MY': 'Malasia', 'MX': u'M\xe9xico', 'IL': 'Israel', 'FR': 'Francia', 'IO': u'Territorios Brit\xe1nicos del Oc\xe9ano \xcdndico', 'SH': 'Santa Elena', 'FI': 'Finlandia', 'FJ': 'Fidji', 'FK': 'Islas Falkland (Malvinas)', 'FM': 'Micronesia', 'FO': 'Islas Feroe', 'NI': 'Nicaragua', 'NL': u'Pa\xedses Bajos', 'NO': 'Noruega', 'NA': 'Namibia', 'VU': 'Vanuatu', 'NC': 'Nueva Caledonia', 'NE': u'N\xedger', 'NF': 'Isla Norfolk', 'NG': 'Nigeria', 'NZ': 'Nueva Zelanda', 'NP': 'Nepal', 'NR': 'Nauru', 'NU': 'Niue', 'CK': 'Islas Cook', 'CI': 'Costa de Marfil', 'CH': 'Suiza', 'CO': 'Colombia', 'CN': 'China', 'CM': u'Camer\xfan', 'CL': 'Chile', 'CC': 'Islas Cocos (Keeling)', 'CA': u'Canad\xe1', 'CG': 'Congo', 'CF': u'Rep\xfablica Centroafricana', 'CD': u'Rep\xfablica Democr\xe1tica del Congo', 'CZ': u'Rep\xfablica Checa', 'CY': 'Chipre', 'CX': 'Isla de Christmas', 'CR': 'Costa Rica', 'Fallback': 'en', 'CV': 'Cabo Verde', 'CU': 'Cuba', 'SZ': 'Suazilandia', 'SY': 'Siria', 'KG': u'Kirguizist\xe1n', 'KE': 'Kenia', 'SR': 'Suriname', 'KI': 'Kiribati', 'KH': 'Camboya', 'KN': u'San Crist\xf3bal y Nieves', 'KM': 'Comores', 'ST': u'Santo Tom\xe9 y Pr\xedncipe', 'SK': 'Eslovaquia', 'KR': 'Corea del Sur', 'SI': 'Eslovenia', 'KP': 'Corea del Norte', 'KW': 'Kuwait', 'SN': 'Senegal', 'SM': 'San Marino', 'SL': 'Sierra Leona', 'SC': 'Seychelles', 'KZ': u'Kazajist\xe1n', 'KY': u'Islas Caim\xe1n', 'SG': 'Singapur', 'SE': 'Suecia', 'SD': u'Sud\xe1n', 'DO': u'Rep\xfablica Dominicana', 'DM': 'Dominica', 'DJ': 'Yibuti', 'DK': 'Dinamarca', 'VG': u'Islas V\xedrgenes Brit\xe1nicas', 'DE': 'Alemania', 'YE': 'Yemen', 'DZ': 'Argelia', 'US': 'Estados Unidos', 'UY': 'Uruguay', 'YU': 'Yugoslavia', 'YT': 'Mayotte', 'UM': 'Islas menores alejadas de Estados Unidos', 'LB': u'L\xedbano', 'LC': 'Saint Lucia', 'LA': 'Laos', 'TV': 'Tuvalu', 'TW': u'Taiw\xe1n, Rep\xfablica de China', 'TT': 'Trinidad y Tabago', 'TR': u'Turqu\xeda', 'LK': 'Sri Lanka', 'LI': 'Liechtenstein', 'LV': 'Letonia', 'TO': 'Tonga', 'LT': 'Lituania', 'LU': 'Luxemburgo', 'LR': 'Liberia', 'LS': 'Lesoto', 'TH': 'Tailandia', 'TF': 'Territorios Australes Franceses', 'TG': 'Togo', 'TD': 'Chad', 'TC': 'Islas Turcas y Caicos', 'LY': 'Libia', 'VA': 'Ciudad del Vaticano', 'VC': 'San Vicente y las Granadinas', 'AE': u'Emiratos \xc1rabes Unidos', 'AD': 'Andorra', 'AG': 'Antigua y Barbuda', 'AF': u'Afganist\xe1n', 'AI': 'Anguila', 'VI': u'Islas V\xedrgenes de los Estados Unidos', 'IS': 'Islandia', 'IR': u'Ir\xe1n', 'AM': 'Armenia', 'AL': 'Albania', 'AO': 'Angola', 'AN': 'Antillas Neerlandesas', 'AQ': u'Ant\xe1rtica', 'AS': 'Samoa Americana', 'AR': 'Argentina', 'AU': 'Australia', 'AT': 'Austria', 'AW': 'Aruba', 'IN': 'India', 'TZ': 'Tanzania', 'AZ': u'Azerbaiy\xe1n', 'IE': 'Irlanda', 'ID': 'Indonesia', 'UA': 'Ucrania', 'QA': 'Qatar', 'MZ': 'Mozambique'}
months=['enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre']
abbrMonths=['ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic']
days=['lunes', 'martes', u'mi\xe9rcoles', 'jueves', 'viernes', u's\xe1bado', 'domingo']
abbrDays=['lun', 'mar', u'mi\xe9', 'jue', 'vie', u's\xe1b', 'dom']
dateFormats={'medium': '%d/%m/%Y', 'full': '%%(dayname)s %d de %%(monthname)s de %Y', 'long': '%d de %%(monthname)s de %Y', 'short': '%d/%m/%y'}
numericSymbols={'group': '.', 'nativeZeroDigit': '0', 'exponential': 'E', 'perMille': u'\u2030', 'nan': u'\ufffd', 'decimal': ',', 'percentSign': '%', 'list': ';', 'patternDigit': '#', 'plusSign': '+', 'infinity': u'\u221e', 'minusSign': '-'} | PypiClean |
/Flask-Reuploaded-1.3.0.tar.gz/Flask-Reuploaded-1.3.0/PACKAGING.rst | PACKAGING
=========
This guide is loosely following one of Hynek's fantastic blog posts:
https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/
Note
----
This document is meant as a help for the maintainer only.
Preparation
-----------
- make sure changelog is up to date
- make sure the `long_description` can be rendered properly, ie. check with `longtest` (zest.releaser)
- make sure the version number is set correctly in `setup.py`
- make sure the version number is set correctly in `docs/conf.py`
Release process
---------------
# create a development environment
$ tox --devenv dev-env
# install all necessary build tools
$ dev-env/bin/pip install -U pip pep517 twine
# make clean slate
$ rm -rf build dist
# build the packages
$ dev-env/bin/python -m pep517.build .
# make clean slate
$ rm -rf venv-sdist
# create a venv for the sdist installation test
$ virtualenv venv-sdist
# install package in sdist format
$ venv-sdist/bin/pip install dist/Flask-Reuploaded-0.3.tar.gz # swap version number
# check the installed package
$ venv-sdist/bin/python
>>> import flask_uploads
# make clean slate
$ rm -rf venv-wheel
# create a venv for the wheel installation test
$ virtualenv venv-wheel
# install the package in wheel format
$ venv-wheel/bin/pip install dist/Flask_Reuploaded-0.3-py3-none-any.whl # swap version number
# check the installed packaged
$ venv-wheel/bin/python
>>> import flask_uploads
# upload to test pypi
$ twine upload -r test --sign dist/Flask*
# upload to pypi
$ twine upload -r pypi --sign dist/Flask*
| PypiClean |
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/search_place_child_search_place_child_py3.py |
from msrest.serialization import Model
class SearchPlaceChildSearchPlaceChild(Model):
"""SearchPlaceChildSearchPlaceChild.
All required parameters must be populated in order to send to Azure.
:param place_id: The place identifier
:type place_id: int
:param place_code: Required. The place code <span
class='property-internal'>Required</span> <span
class='property-internal'>Must be between 0 and 32 characters</span>
:type place_code: str
:param place_info: Required. The place info <span
class='property-internal'>Required</span> <span
class='property-internal'>Must be between 0 and 50 characters</span>
:type place_info: str
:param place_type:
:type place_type: ~energycap.sdk.models.PlaceTypeResponse
:param address:
:type address: ~energycap.sdk.models.AddressChild
:param parent_path: The collection of places representing the path to its
parent
:type parent_path: list[~energycap.sdk.models.SearchParentPlaceChild]
"""
_validation = {
'place_code': {'required': True, 'max_length': 32, 'min_length': 0},
'place_info': {'required': True, 'max_length': 50, 'min_length': 0},
}
_attribute_map = {
'place_id': {'key': 'placeId', 'type': 'int'},
'place_code': {'key': 'placeCode', 'type': 'str'},
'place_info': {'key': 'placeInfo', 'type': 'str'},
'place_type': {'key': 'placeType', 'type': 'PlaceTypeResponse'},
'address': {'key': 'address', 'type': 'AddressChild'},
'parent_path': {'key': 'parentPath', 'type': '[SearchParentPlaceChild]'},
}
def __init__(self, *, place_code: str, place_info: str, place_id: int=None, place_type=None, address=None, parent_path=None, **kwargs) -> None:
super(SearchPlaceChildSearchPlaceChild, self).__init__(**kwargs)
self.place_id = place_id
self.place_code = place_code
self.place_info = place_info
self.place_type = place_type
self.address = address
self.parent_path = parent_path | PypiClean |
/Labeventtable-0.0.1.tar.gz/Labeventtable-0.0.1/src/Labeventtable.py | import pandas as pd
import numpy as np
import datetime
import math
from datetime import datetime, timedelta
def labeventtable(labevents_data,propensity_data,labitems_data):
labevents_data_demo=labevents_data[['subject_id','charttime','storetime','itemid']].drop_duplicates()
propensity_data_demo=propensity_data[['subject_id','hadm_id','admittime','dischtime']].drop_duplicates()
labitems_data_demo=labitems_data[['itemid','label']].drop_duplicates()
labitems_data_demo['label'].replace(' ', np.nan, inplace=True)
labitems_data_demo=labitems_data_demo.dropna(subset=['label'])
selected_lebel=pd.DataFrame({
'label':["Glucose, POCT",
'Hemoglobin',
'Platelet Count',
'Hematocrit',
'Erythrocytes',
'Leukocytes',
'Potassium',
'Sodium',
'Creatinine',
'Bicarbonate',
'Chloride',
'Anion Gap',
"Calcium, Total",
'Glucose',
"Bld Urea Nitrog(BUN)",
'Lymphocytes',
'Monocytes',
'Neutrophils',
'Eosinophils',
'Basophils',
'pH',
'pCO2',
'FIO2',
'pO2',
'Lactate',
"Troponin T, 6 hr, 5th gen",
"Troponin T, 2 hr, 5th gen",
"Troponin T, Baseline, 5th gen",
'Venous pH',
'Venous pCO2',
'Venous pO2']
})
labitems_data_demo=pd.merge(selected_lebel,labitems_data_demo,on='label',how='left')
labitems_data_demo=labitems_data_demo.dropna(subset=['itemid'])
first_merge=pd.merge(labevents_data_demo,labitems_data_demo,on='itemid',how='left')
first_merge_1=first_merge.dropna(subset=['label'])
second_merge=pd.merge(first_merge_1,propensity_data_demo,on='subject_id',how='left')
second_merge_1=second_merge.dropna(subset=['admittime'])
second_merge_1['charttime'] = pd.to_datetime(second_merge_1['charttime'], infer_datetime_format=True)#Converting this column into datetime
second_merge_1['storetime'] = pd.to_datetime(second_merge_1['storetime'], infer_datetime_format=True)#Converting this column into datetime
second_merge_1['admittime'] = pd.to_datetime(second_merge_1['admittime'], infer_datetime_format=True)#Converting this column into datetime
second_merge_1['dischtime'] = pd.to_datetime(second_merge_1['dischtime'], infer_datetime_format=True)#Converting this column into datetime
base_data=second_merge_1.copy()
base_data['Timeperiod']=np.nan#Creating and assigning null value to Timeperiod column
base_data.loc[(base_data['charttime'] >= base_data['admittime']) & (base_data['charttime'] <= base_data['dischtime']),['Timeperiod']]='Valid' #valid when starttime is in between admittime and dischtime
final_base_data=base_data.dropna(subset=['Timeperiod'])#Droping 'Timeperiod' column's nan value only
final_base_data['cnt']=1
table = pd.pivot_table(final_base_data, values='cnt', index=['subject_id','admittime','dischtime'],columns=['label'], aggfunc='sum')
pivot_columns=(table.columns).to_list()
all_columns=selected_lebel['label'].to_list()
result=list(set(all_columns)-set(pivot_columns))
for item in result:
table[item]=0
return table | PypiClean |
/AISTLAB_nitrotyper-0.6.10.tar.gz/AISTLAB_nitrotyper-0.6.10/README.rst | AISTLAB\_nitrotyper
===================
https://www.nitrotype.com/race auto typer using python3 and cv2
only supported 1920x1080 Resolution currently.
winxos, AISTLAB 2017-03-17
INSTALL:
--------
pip3 install nitrotyper
USAGE:
------
1. open https://www.nitrotype.com/race using your web browser
2. open console and run **nitrotyper** command.
3. make sure the web browser is on the top layer of the desktop, enjoy
it.
4. delay parameters can control the type speed.
*Just for educational purpose, take care of yourself.*
| PypiClean |
/NEURAL_py_EEG-0.1.4.tar.gz/NEURAL_py_EEG-0.1.4/src/NEURAL_py_EEG/NEURAL_parameters.py |
def NEURAL_parameters():
"""
This function is to generate all the default NEURAL parameters
:return: a dictionary containing all the system parameters
"""
params = dict()
'''
---------------------------------------------------------------------
Proprocessing (lowpass filter and resample)
---------------------------------------------------------------------
'''
params['LP_fc'] = 30 # low pass filter cut-off
params['Fs_new'] = 64 # down_sample to Fs_new
'''
---------------------------------------------------------------------
Directories - fill in directories
---------------------------------------------------------------------
'''
params['EEG_DATA_DIR'] = ''
params['EEG_DATA_DIR_CSV'] = ''
'''
---------------------------------------------------------------------
Montage - bipolar mantage of NICU babies
---------------------------------------------------------------------
'''
params['BI_MONT'] = [['F4', 'C4'], ['F3', 'C3'], ['C4', 'T4'], ['C3', 'T3'], ['C4', 'Cz'], ['Cz', 'C3'],
['C4', 'O2'], ['C3', 'O1']]
'''
---------------------------------------------------------------------
Artefacts
---------------------------------------------------------------------
'''
params['REMOVE_ART'] = 1 # simple procedure to remove artefacts; 0 to turn off
# some default values used for preterm infants (<32 weeks of gestation)
params['ART_HIGH_VOLT'] = 1500 # in mirco Vs
params['ART_TIME_COLLAR'] = 10 # time collar( in seconds) around high - amplitude artefact
params['ART_DIFF_VOLT'] = 200 # in mirco Vs
params['ART_DIFF_TIME_COLLAR'] = 0.5 # time collar( in seconds) around fast jumps
params['ART_DIFF_MIN_TIME'] = 0.1 # min time( in seconds) for flat(continuous) trace to be artefact
params['ART_ELEC_CHECK'] = 1 # minimum length required for electrode check( in seconds)
params['ART_REF_LOW_CORR'] = 0.15 # if mean correlation coefficent across referential channels
# is < this value then remove
# what to replace artefacts with before filtering?
# options: 1) zeros('zeros')
# 2) linear interpolation('linear_interp')
# 3) cubic spline interpolation('cubic_interp')
# 4) NaN('nans'): replace with cubic spline before filtering and then NaN's % after filtering
params['FILTER_REPLACE_ARTEFACTS'] = 'nans'
params['amplitude'] = dict()
params['rEEG'] = dict()
params['connectivity'] = dict()
params['FD'] = dict()
params['amplitude']['FILTER_REPLACE_ARTEFACTS'] = params['FILTER_REPLACE_ARTEFACTS']
params['rEEG']['FILTER_REPLACE_ARTEFACTS'] = params['FILTER_REPLACE_ARTEFACTS']
params['connectivity']['FILTER_REPLACE_ARTEFACTS'] = params['FILTER_REPLACE_ARTEFACTS']
params['FD']['FILTER_REPLACE_ARTEFACTS'] = params['FILTER_REPLACE_ARTEFACTS']
'''
---------------------------------------------------------------------
Features
---------------------------------------------------------------------
'''
params['FEATURE_SET_ALL'] = [
'spectral_power'
, 'spectral_relative_power'
, 'spectral_flatness'
, 'spectral_diff'
, 'spectral_entropy'
, 'spectral_edge_frequency'
, 'FD'
, 'amplitude_total_power'
, 'amplitude_SD'
, 'amplitude_skew'
, 'amplitude_kurtosis'
, 'amplitude_env_mean'
, 'amplitude_env_SD'
, 'connectivity_BSI'
, 'connectivity_corr'
, 'connectivity_coh_mean'
, 'connectivity_coh_max'
, 'connectivity_coh_freqmax'
, 'rEEG_mean'
, 'rEEG_median'
, 'rEEG_lower_margin'
, 'rEEG_upper_margin'
, 'rEEG_width'
, 'rEEG_SD'
, 'rEEG_CV'
, 'rEEG_asymmetry'
, 'IBI_length_max'
, 'IBI_length_median'
, 'IBI_burst_prc'
, 'IBI_burst_number'
]
'''
Frequency bands
'''
params['FREQ_BANDS'] = [[0.5, 4], [4, 7], [7, 13], [13, 30]]
# these bands often used for preterm infants( < 32 weeks GA):
# params['FREQ_BANDS'] = [[0.5, 3], [3, 8], [8, 15], [15, 30]];
'''
---------------------------------------------------------------------
A.spectral features
---------------------------------------------------------------------
'''
# how to estimate the spectrum for 'spectral_flatness', 'spectral_entropy', % spectral_edge_frequency features:
# 1) PSD: estimate power spectral density(e.g.Welch periodogram)
# 2) robust - PSD: median(instead of mean) of spectrogram
# 3) periodogram: magnitude of the discrete Fourier transform
params['spectral'] = dict()
params['spectral']['method'] = 'PSD'
# length of time - domain analysis window and overlap:
# (applies to 'spectral_power', 'spectral_relative_power',
# 'spectral_flatness', and 'spectral_diff' features)
params['spectral']['L_window'] = 2 # in seconds
params['spectral']['window_type'] = 'hamm' # type of window
params['spectral']['overlap'] = 50 # overlap in percentage
params['spectral']['freq_bands'] = params['FREQ_BANDS']
params['spectral']['total_freq_bands'] = [params['FREQ_BANDS'][0][0], params['FREQ_BANDS'][-1][-1]]
params['spectral']['SEF'] = 0.95 # spectral edge frequency
# fractal dimension(FD):
params['FD']['method'] = 'higuchi' # method to estimate FD, either 'higuchi' or 'katz'
params['FD']['freq_bands'] = [params['FREQ_BANDS'][0][0], params['FREQ_BANDS'][-1][-1]]
# $$$ params['FD']['freq_bands'] = params['FREQ_BANDS']
params['FD']['qmax'] = 6 # Higuchi method: max.value of k
'''
---------------------------------------------------------------------
B. amplitude features
---------------------------------------------------------------------
'''
# $$$ params['amplitude']['freq_bands'] = [params['FREQ_BANDS'][0][0], params['FREQ_BANDS'][-1][-1]]
params['amplitude']['freq_bands'] = params['FREQ_BANDS']
# for rEEG(range - EEG, similar to aEEG) from [1]
#
# [1] DO’Reilly, MA Navakatikyan, M Filip, D Greene, & LJ Van Marter(2012).Peak - to - peak amplitude in neonatal
# brain monitoring of premature infants.Clinical Neurophysiology, 123(11), 2139 –53.
#
# settings in [1]: window = 2 seconds; overlap = 0 %; and no log - linear scale
params['rEEG']['L_window'] = 2 # in seconds
params['rEEG']['window_type'] = 'rect' # type of window
params['rEEG']['overlap'] = 0 # overlap in percentage
params['rEEG']['APPLY_LOG_LINEAR_SCALE'] = 0 # use this scale(either 0 or 1)
params['rEEG']['freq_bands'] = params['FREQ_BANDS']
'''
---------------------------------------------------------------------
C. connectivity features
---------------------------------------------------------------------
'''
# how to estimate the cross spectrum for the coherence function:
# 1) PSD: estimate power spectral density (e.g. Welch periodogram)
# 2) bartlett-PSD: Welch periodogram with 0% overlap and rectangular window
# (necessary if using the analytic assessment of zero coherence, see below)
params['connectivity']['method'] = 'bartlett-PSD'
params['connectivity']['freq_bands'] = params['FREQ_BANDS']
params['connectivity']['L_window'] = 8 # PSD window in seconds
params['connectivity']['overlap'] = 75 # PSD window percentage overlap
params['connectivity']['window_type'] = 'hamm' # PSD window type
# find lower coherence limit using either either a surrogate-data
# approach [1] or an analytic threshold [2]
# [1] Faes L, Pinna GD, Porta A, Maestri R, Nollo G (2004). Surrogate data analysis for
# assessing the significance of the coherence function. IEEE Transactions on
# Biomedical Engineering, 51(7):1156–1166.
# [2] Halliday, DM, Rosenberg, JR, Amjad, AM, Breeze, P, Conway, BA, &
# Farmer, SF. (1995). A framework for the analysis of mixed time series/point
# process data--theory and application to the study of physiological tremor, single
# motor unit discharges and electromyograms. Progress in Biophysics and Molecular
# Biology, 64(2–3), 237–278.
#
# options for 'feat_params_st.connectivity.coherence_zero_level' are:
# 1) 'surr' for [1]
# 2) 'analytic' for [2]
# 3) '' not to implement (no threshold)
params['connectivity']['coherence_zero_level'] = 'analytic'
# alpha value for null-hypothesis disribution cut-off:
params['connectivity']['coherence_zero_alpha'] = 0.05
# number of iterations required to generate null-hypothesis distribution if
# using surrogate data approach ([2]):
params['connectivity']['coherence_surr_iter'] = 500
'''
---------------------------------------------------------------------
Short-time analysis on EEG
---------------------------------------------------------------------
'''
params['EPOCH_LENGTH'] = 64 # seconds
params['EPOCH_OVERLAP'] = 50 # percent
params['EPOCH_IGNORE_PRC_NANS'] = 50 # if epoch has ≥ EPOCH_IGNORE_PRC_NANS (percent) then ignore
return params | PypiClean |
/BatchQ-0.1-1-pre-alpha.tar.gz/BatchQ-0.1-1-pre-alpha/batchq/core/communication.py |
import os
import sys
import time
import re
from batchq.core.process import Process
from batchq.core.terminal import XTermInterpreter
from batchq.core.errors import CommunicationIOException, BasePipeException, CommunicationTimeout
from batchq.core.memory import Memory
class BasePipe(object):
def __init__(self, pipe, expect_token, submit_token, mem_scale= 1000., vt100 = None, initiate_pipe = True):
self._last_output = ""
self._last_input = ""
self._pipe = pipe
self._expect_token = expect_token
self._submit_token = submit_token
self._timeout = 4000
self._expect_stack = [expect_token]
self._submit_stack = [expect_token]
self._n_expect_stack = 1
self._n_submit_stack = 1
self._debug_level = 3
if vt100 is None:
self._xterminterpreter = XTermInterpreter()
else:
self._xterminterpreter = vt100
self._reset_timeout_onoutput = True
self._memory = Memory(mem_scale)
if initiate_pipe: self.initiate_pipe()
@property
def last_input(self):
return self._last_input
@property
def last_output(self):
return self._last_output
@property
def terminal_interpreter(self):
"""
This property holds an instance of a ``XTermInterpreter``.
"""
return self._xterminterpreter
@property
def buffer(self):
"""
Contains the VT100 interpreted buffer.
"""
return self._xterminterpreter.buffer
@property
def pipe_buffer(self):
"""
Contains the true pipe buffer.
"""
return self._pipe.buffer
def push_submit(self, submit_token):
self._submit_token = submit_token
self._submit_stack += [submit_token]
self._n_submit_stack+=1
def push_expect(self, expect_token):
self._expect_token = expect_token
self._expect_stack += [expect_token]
self._n_expect_stack+=1
def pop_expect(self):
self._n_expect_stack-=1
if self._n_expect_stack<1:
raise BasePipeException("Expectation stack is empty.")
self._expect_stack.pop()
self._expect_token = self._expect_stack[-1]
def pop_submit(self):
self._n_submit_stack-=1
if self._n_submit_stack<1:
raise BasePipeException("Submit stack is empty.")
self._submit_stack.pop()
self._submit_token = self._submit_stack[-1]
def initiate_pipe(self):
"""
This function is a virtual function which is called
at the end if the objects initiation. Its purpose is to initiate
the pipe with a series of commands.
"""
pass
def consume_output(self, pipe = None, consume_until = None):
"""
This function consumes output of the pipe which is separated
with no more than 10 ms and returns it.
"""
if not pipe:
pipe = self._pipe
output = ""
echo = ""
self._xterminterpreter.set_mark()
if consume_until and hasattr(consume_until, "search"):
end_time = time.time()+self._timeout
m = consume_until.search(output)
while pipe.isalive() and not m:
try:
b = pipe.getchar()
except CommunicationIOException, e:
break
if b!="":
self._xterminterpreter.write(b)
output = self._xterminterpreter.copy()
echo = self._xterminterpreter.copy_echo()
tot_len = len(output)
tot_echo_len = len(echo)
m = self._xterminterpreter.monitor
if self._reset_timeout_onoutput and (self._xterminterpreter.monitor_echo !="" or m !=""):
end_time = time.time()+self._timeout
if self._debug_level == 2:
sys.stdout.write(m)
if end_time<time.time():
if self._debug_level >= 3:
print "-"*20, "BUFFER", "-"*20
print self._xterminterpreter.buffer
print "-"*20, "END OF BUFFER", "-"*20
print "Expecting: ", consume_until
print "Consumed: ", output
print "Escape mode:", self._xterminterpreter.escape_mode
if self._xterminterpreter.escape_mode:
print "Last escape:", self._xterminterpreter.last_escape
raise CommunicationTimeout("Consuming output timed out. You can increase the timeout by using set_timeout(t).")
m = consume_until.search(output)
elif consume_until:
n = len(consume_until)
tot_len = 0
tot_echo_len = 0
end_time = time.time()+self._timeout
while pipe.isalive() and (tot_len < n or not consume_until == output[tot_len -n: tot_len]) and \
(tot_echo_len < n or not consume_until == echo[tot_echo_len -n: tot_echo_len]):
try:
b = pipe.getchar()
except CommunicationIOException, e:
break
if b!="":
self._xterminterpreter.write(b)
output = self._xterminterpreter.copy()
echo = self._xterminterpreter.copy_echo()
tot_len = len(output)
tot_echo_len = len(echo)
m = self._xterminterpreter.monitor
if self._reset_timeout_onoutput and (self._xterminterpreter.monitor_echo !="" or m !=""):
end_time = time.time()+self._timeout
if self._debug_level == 2:
sys.stdout.write(m)
if end_time<time.time():
if self._debug_level >= 3:
print "-"*20, "BUFFER", "-"*20
print self._xterminterpreter.buffer
print "-"*20, "END OF BUFFER", "-"*20
print "Expecting: ", consume_until
print "Consumed: ", output
print "Escape mode:", self._xterminterpreter.escape_mode
if self._xterminterpreter.escape_mode:
print "Last escape:", self._xterminterpreter.last_escape
raise CommunicationTimeout("Consuming output timed out. You can increase the timeout by using set_timeout(t).")
if consume_until == output[tot_len -n: tot_len]:
output = output[0:tot_len -n]
# TODO: figure out what to do when the match is in the echo
else:
b = pipe.read()
while b !="" and pipe.isalive():
self._xterminterpreter.write(b)
output = self._xterminterpreter.copy()
b = pipe.read()
if self._debug_level == 2:
sys.stdout.write(self._xterminterpreter.monitor)
if self._debug_level == 1:
sys.stdout.write(output)
elif self._debug_level == 2:
sys.stdout.write(self._xterminterpreter.monitor)
self._xterminterpreter.move_monitor_cursors()
return output
def send_command(self, cmd):
"""
This function sends a command to the pipe, wait for the prompt
to appear and return the output.
"""
self._last_input = cmd
self._pipe.write(cmd)
# We first consume the echo
self.consume_output()
self._pipe.write(self._submit_token)
# Next we consume the result of sending a submit token
# This is done as bash sometimes send additional escape codes
# to manipulate the echo.
self.consume_output(consume_until = self._submit_token)
# Then we wait for the output
ret = self.consume_output(consume_until = self._expect_token)
self._last_output = ret
# print "$", cmd
# print ret
# print "="*40
# print self._xterminterpreter.readable_echo
# print "="*40
# print "--!"
return ret
def expect(self, val = None):
if val is None:
return self.consume_output(consume_until = self._expect_token)
return self.consume_output(consume_until = val)
def set_timeout(self, t):
self._timeout = int(t)
def remaining(self):
self._xterminterpreter.move_monitor_cursors()
r = self._pipe.read()
# print "WRITING ", len(r), "CHARS:", r,
self._xterminterpreter.write(r)
# print self._xterminterpreter.monitor_echo
return self._xterminterpreter.monitor
def kill(self):
return self._pipe.kill()
def terminate(self):
return self._pipe.terminate()
def cpu_usage(self):
# TODO: Yet to be implemented
pass
@property
def pid(self):
return self._pipe.pid
@property
def pipe(self):
return self._pipe
@property
def memory_usage(self):
return self._memory.process(self._pipe.pid)
@property
def child_memory_usage(self):
return self._memory.child_processes(self._pipe.pid)
@property
def total_memory_usage(self):
return self._memory.total() | PypiClean |
/Misago-0.36.1.tar.gz/Misago-0.36.1/misago/static/misago/admin/momentjs/de-at.js |
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, (function (moment) { 'use strict';
function processRelativeTime(number, withoutSuffix, key, isFuture) {
var format = {
'm': ['eine Minute', 'einer Minute'],
'h': ['eine Stunde', 'einer Stunde'],
'd': ['ein Tag', 'einem Tag'],
'dd': [number + ' Tage', number + ' Tagen'],
'M': ['ein Monat', 'einem Monat'],
'MM': [number + ' Monate', number + ' Monaten'],
'y': ['ein Jahr', 'einem Jahr'],
'yy': [number + ' Jahre', number + ' Jahren']
};
return withoutSuffix ? format[key][0] : format[key][1];
}
var deAt = moment.defineLocale('de-at', {
months : 'Jänner_Februar_März_April_Mai_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'),
monthsShort : 'Jän._Feb._März_Apr._Mai_Juni_Juli_Aug._Sep._Okt._Nov._Dez.'.split('_'),
monthsParseExact : true,
weekdays : 'Sonntag_Montag_Dienstag_Mittwoch_Donnerstag_Freitag_Samstag'.split('_'),
weekdaysShort : 'So._Mo._Di._Mi._Do._Fr._Sa.'.split('_'),
weekdaysMin : 'So_Mo_Di_Mi_Do_Fr_Sa'.split('_'),
weekdaysParseExact : true,
longDateFormat : {
LT: 'HH:mm',
LTS: 'HH:mm:ss',
L : 'DD.MM.YYYY',
LL : 'D. MMMM YYYY',
LLL : 'D. MMMM YYYY HH:mm',
LLLL : 'dddd, D. MMMM YYYY HH:mm'
},
calendar : {
sameDay: '[heute um] LT [Uhr]',
sameElse: 'L',
nextDay: '[morgen um] LT [Uhr]',
nextWeek: 'dddd [um] LT [Uhr]',
lastDay: '[gestern um] LT [Uhr]',
lastWeek: '[letzten] dddd [um] LT [Uhr]'
},
relativeTime : {
future : 'in %s',
past : 'vor %s',
s : 'ein paar Sekunden',
ss : '%d Sekunden',
m : processRelativeTime,
mm : '%d Minuten',
h : processRelativeTime,
hh : '%d Stunden',
d : processRelativeTime,
dd : processRelativeTime,
M : processRelativeTime,
MM : processRelativeTime,
y : processRelativeTime,
yy : processRelativeTime
},
dayOfMonthOrdinalParse: /\d{1,2}\./,
ordinal : '%d.',
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
});
return deAt;
}))); | PypiClean |
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/summernote/lang/summernote-hr-HR.min.js | !function(e,a){if("object"==typeof exports&&"object"==typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var o=a();for(var r in o)("object"==typeof exports?exports:e)[r]=o[r]}}(self,(function(){return(e=jQuery).extend(e.summernote.lang,{"hr-HR":{font:{bold:"Podebljano",italic:"Kurziv",underline:"Podvučeno",clear:"Ukloni stilove fonta",height:"Visina linije",name:"Font Family",strikethrough:"Precrtano",subscript:"Subscript",superscript:"Superscript",size:"Veličina fonta"},image:{image:"Slika",insert:"Ubaci sliku",resizeFull:"Puna veličina",resizeHalf:"Umanji na 50%",resizeQuarter:"Umanji na 25%",floatLeft:"Poravnaj lijevo",floatRight:"Poravnaj desno",floatNone:"Bez poravnanja",shapeRounded:"Shape: Rounded",shapeCircle:"Shape: Circle",shapeThumbnail:"Shape: Thumbnail",shapeNone:"Shape: None",dragImageHere:"Povuci sliku ovdje",dropImage:"Drop image or Text",selectFromFiles:"Izaberi iz datoteke",maximumFileSize:"Maximum file size",maximumFileSizeError:"Maximum file size exceeded.",url:"Adresa slike",remove:"Ukloni sliku",original:"Original"},video:{video:"Video",videoLink:"Veza na video",insert:"Ubaci video",url:"URL video",providers:"(YouTube, Vimeo, Vine, Instagram, DailyMotion ili Youku)"},link:{link:"Veza",insert:"Ubaci vezu",unlink:"Ukloni vezu",edit:"Uredi",textToDisplay:"Tekst za prikaz",url:"Internet adresa",openInNewWindow:"Otvori u novom prozoru"},table:{table:"Tablica",addRowAbove:"Add row above",addRowBelow:"Add row below",addColLeft:"Add column left",addColRight:"Add column right",delRow:"Delete row",delCol:"Delete column",delTable:"Delete table"},hr:{insert:"Ubaci horizontalnu liniju"},style:{style:"Stil",p:"pni",blockquote:"Citat",pre:"Kôd",h1:"Naslov 1",h2:"Naslov 2",h3:"Naslov 3",h4:"Naslov 4",h5:"Naslov 5",h6:"Naslov 6"},lists:{unordered:"Obična lista",ordered:"Numerirana lista"},options:{help:"Pomoć",fullscreen:"Preko cijelog ekrana",codeview:"Izvorni kôd"},paragraph:{paragraph:"Paragraf",outdent:"Smanji uvlačenje",indent:"Povećaj uvlačenje",left:"Poravnaj lijevo",center:"Centrirano",right:"Poravnaj desno",justify:"Poravnaj obostrano"},color:{recent:"Posljednja boja",more:"Više boja",background:"Boja pozadine",foreground:"Boja teksta",transparent:"Prozirna",setTransparent:"Prozirna",reset:"Poništi",resetToDefault:"Podrazumijevana"},shortcut:{shortcuts:"Prečice s tipkovnice",close:"Zatvori",textFormatting:"Formatiranje teksta",action:"Akcija",paragraphFormatting:"Formatiranje paragrafa",documentStyle:"Stil dokumenta",extraKeys:"Dodatne kombinacije"},help:{insertParagraph:"Insert Paragraph",undo:"Undoes the last command",redo:"Redoes the last command",tab:"Tab",untab:"Untab",bold:"Set a bold style",italic:"Set a italic style",underline:"Set a underline style",strikethrough:"Set a strikethrough style",removeFormat:"Clean a style",justifyLeft:"Set left align",justifyCenter:"Set center align",justifyRight:"Set right align",justifyFull:"Set full align",insertUnorderedList:"Toggle unordered list",insertOrderedList:"Toggle ordered list",outdent:"Outdent on current paragraph",indent:"Indent on current paragraph",formatPara:"Change current block's format as a paragraph(P tag)",formatH1:"Change current block's format as H1",formatH2:"Change current block's format as H2",formatH3:"Change current block's format as H3",formatH4:"Change current block's format as H4",formatH5:"Change current block's format as H5",formatH6:"Change current block's format as H6",insertHorizontalRule:"Insert horizontal rule","linkDialog.show":"Show Link Dialog"},history:{undo:"Poništi",redo:"Ponovi"},specialChar:{specialChar:"SPECIAL CHARACTERS",select:"Select Special characters"}}}),{};var e})); | PypiClean |
/MezzanineFor1.7-3.1.10.tar.gz/MezzanineFor1.7-3.1.10/mezzanine/mobile/static/js/jquery.mobile-1.0a2.js | * jQuery UI Widget @VERSION
*
* Copyright 2010, AUTHORS.txt (http://jqueryui.com/about)
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* http://docs.jquery.com/UI/Widget
*/
(function( $, undefined ) {
// jQuery 1.4+
if ( $.cleanData ) {
var _cleanData = $.cleanData;
$.cleanData = function( elems ) {
for ( var i = 0, elem; (elem = elems[i]) != null; i++ ) {
$( elem ).triggerHandler( "remove" );
}
_cleanData( elems );
};
} else {
var _remove = $.fn.remove;
$.fn.remove = function( selector, keepData ) {
return this.each(function() {
if ( !keepData ) {
if ( !selector || $.filter( selector, [ this ] ).length ) {
$( "*", this ).add( [ this ] ).each(function() {
$( this ).triggerHandler( "remove" );
});
}
}
return _remove.call( $(this), selector, keepData );
});
};
}
$.widget = function( name, base, prototype ) {
var namespace = name.split( "." )[ 0 ],
fullName;
name = name.split( "." )[ 1 ];
fullName = namespace + "-" + name;
if ( !prototype ) {
prototype = base;
base = $.Widget;
}
// create selector for plugin
$.expr[ ":" ][ fullName ] = function( elem ) {
return !!$.data( elem, name );
};
$[ namespace ] = $[ namespace ] || {};
$[ namespace ][ name ] = function( options, element ) {
// allow instantiation without initializing for simple inheritance
if ( arguments.length ) {
this._createWidget( options, element );
}
};
var basePrototype = new base();
// we need to make the options hash a property directly on the new instance
// otherwise we'll modify the options hash on the prototype that we're
// inheriting from
// $.each( basePrototype, function( key, val ) {
// if ( $.isPlainObject(val) ) {
// basePrototype[ key ] = $.extend( {}, val );
// }
// });
basePrototype.options = $.extend( true, {}, basePrototype.options );
$[ namespace ][ name ].prototype = $.extend( true, basePrototype, {
namespace: namespace,
widgetName: name,
widgetEventPrefix: $[ namespace ][ name ].prototype.widgetEventPrefix || name,
widgetBaseClass: fullName
}, prototype );
$.widget.bridge( name, $[ namespace ][ name ] );
};
$.widget.bridge = function( name, object ) {
$.fn[ name ] = function( options ) {
var isMethodCall = typeof options === "string",
args = Array.prototype.slice.call( arguments, 1 ),
returnValue = this;
// allow multiple hashes to be passed on init
options = !isMethodCall && args.length ?
$.extend.apply( null, [ true, options ].concat(args) ) :
options;
// prevent calls to internal methods
if ( isMethodCall && options.charAt( 0 ) === "_" ) {
return returnValue;
}
if ( isMethodCall ) {
this.each(function() {
var instance = $.data( this, name );
if ( !instance ) {
throw "cannot call methods on " + name + " prior to initialization; " +
"attempted to call method '" + options + "'";
}
if ( !$.isFunction( instance[options] ) ) {
throw "no such method '" + options + "' for " + name + " widget instance";
}
var methodValue = instance[ options ].apply( instance, args );
if ( methodValue !== instance && methodValue !== undefined ) {
returnValue = methodValue;
return false;
}
});
} else {
this.each(function() {
var instance = $.data( this, name );
if ( instance ) {
instance.option( options || {} )._init();
} else {
$.data( this, name, new object( options, this ) );
}
});
}
return returnValue;
};
};
$.Widget = function( options, element ) {
// allow instantiation without initializing for simple inheritance
if ( arguments.length ) {
this._createWidget( options, element );
}
};
$.Widget.prototype = {
widgetName: "widget",
widgetEventPrefix: "",
options: {
disabled: false
},
_createWidget: function( options, element ) {
// $.widget.bridge stores the plugin instance, but we do it anyway
// so that it's stored even before the _create function runs
$.data( element, this.widgetName, this );
this.element = $( element );
this.options = $.extend( true, {},
this.options,
this._getCreateOptions(),
options );
var self = this;
this.element.bind( "remove." + this.widgetName, function() {
self.destroy();
});
this._create();
this._trigger( "create" );
this._init();
},
_getCreateOptions: function() {
var options = {};
if ( $.metadata ) {
options = $.metadata.get( element )[ this.widgetName ];
}
return options;
},
_create: function() {},
_init: function() {},
destroy: function() {
this.element
.unbind( "." + this.widgetName )
.removeData( this.widgetName );
this.widget()
.unbind( "." + this.widgetName )
.removeAttr( "aria-disabled" )
.removeClass(
this.widgetBaseClass + "-disabled " +
"ui-state-disabled" );
},
widget: function() {
return this.element;
},
option: function( key, value ) {
var options = key;
if ( arguments.length === 0 ) {
// don't return a reference to the internal hash
return $.extend( {}, this.options );
}
if (typeof key === "string" ) {
if ( value === undefined ) {
return this.options[ key ];
}
options = {};
options[ key ] = value;
}
this._setOptions( options );
return this;
},
_setOptions: function( options ) {
var self = this;
$.each( options, function( key, value ) {
self._setOption( key, value );
});
return this;
},
_setOption: function( key, value ) {
this.options[ key ] = value;
if ( key === "disabled" ) {
this.widget()
[ value ? "addClass" : "removeClass"](
this.widgetBaseClass + "-disabled" + " " +
"ui-state-disabled" )
.attr( "aria-disabled", value );
}
return this;
},
enable: function() {
return this._setOption( "disabled", false );
},
disable: function() {
return this._setOption( "disabled", true );
},
_trigger: function( type, event, data ) {
var callback = this.options[ type ];
event = $.Event( event );
event.type = ( type === this.widgetEventPrefix ?
type :
this.widgetEventPrefix + type ).toLowerCase();
data = data || {};
// copy original event properties over to the new event
// this would happen if we could call $.event.fix instead of $.Event
// but we don't have a way to force an event to be fixed multiple times
if ( event.originalEvent ) {
for ( var i = $.event.props.length, prop; i; ) {
prop = $.event.props[ --i ];
event[ prop ] = event.originalEvent[ prop ];
}
}
this.element.trigger( event, data );
return !( $.isFunction(callback) &&
callback.call( this.element[0], event, data ) === false ||
event.isDefaultPrevented() );
}
};
})( jQuery );
/*
* jQuery Mobile Framework : widget factory extentions for mobile
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.widget", {
_getCreateOptions: function() {
var elem = this.element,
options = {};
$.each( this.options, function( option ) {
var value = elem.data( option.replace( /[A-Z]/g, function( c ) {
return "-" + c.toLowerCase();
} ) );
if ( value !== undefined ) {
options[ option ] = value;
}
});
return options;
}
});
})( jQuery );
/*
* jQuery Mobile Framework : support tests
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
// test whether a CSS media type or query applies
$.media = (function() {
// TODO: use window.matchMedia once at least one UA implements it
var cache = {},
$html = $( "html" ),
testDiv = $( "<div id='jquery-mediatest'>" ),
fakeBody = $( "<body>" ).append( testDiv );
return function( query ) {
if ( !( query in cache ) ) {
var styleBlock = $( "<style type='text/css'>" +
"@media " + query + "{#jquery-mediatest{position:absolute;}}" +
"</style>" );
$html.prepend( fakeBody ).prepend( styleBlock );
cache[ query ] = testDiv.css( "position" ) === "absolute";
fakeBody.add( styleBlock ).remove();
}
return cache[ query ];
};
})();
var fakeBody = $( "<body>" ).prependTo( "html" ),
fbCSS = fakeBody[0].style,
vendors = ['webkit','moz','o'],
webos = window.palmGetResource || window.PalmServiceBridge, //only used to rule out scrollTop
bb = window.blackberry; //only used to rule out box shadow, as it's filled opaque on BB
//thx Modernizr
function propExists( prop ){
var uc_prop = prop.charAt(0).toUpperCase() + prop.substr(1),
props = (prop + ' ' + vendors.join(uc_prop + ' ') + uc_prop).split(' ');
for(var v in props){
if( fbCSS[ v ] !== undefined ){
return true;
}
}
};
//test for dynamic-updating base tag support (allows us to avoid href,src attr rewriting)
function baseTagTest(){
var fauxBase = location.protocol + '//' + location.host + location.pathname + "ui-dir/",
base = $("<base>", {"href": fauxBase}).appendTo("head"),
link = $( "<a href='testurl'></a>" ).prependTo( fakeBody ),
rebase = link[0].href;
base.remove();
return rebase.indexOf(fauxBase) === 0;
};
$.extend( $.support, {
orientation: "orientation" in window,
touch: "ontouchend" in document,
cssTransitions: "WebKitTransitionEvent" in window,
pushState: !!history.pushState,
mediaquery: $.media('only all'),
cssPseudoElement: !!propExists('content'),
boxShadow: !!propExists('boxShadow') && !bb,
scrollTop: ("pageXOffset" in window || "scrollTop" in document.documentElement || "scrollTop" in fakeBody[0]) && !webos,
dynamicBaseTag: baseTagTest()
});
fakeBody.remove();
//for ruling out shadows via css
if( !$.support.boxShadow ){ $('html').addClass('ui-mobile-nosupport-boxshadow'); }
})( jQuery );(function($, undefined ) {
// add new event shortcuts
$.each( "touchstart touchmove touchend orientationchange tap taphold swipe swipeleft swiperight scrollstart scrollstop".split( " " ), function( i, name ) {
$.fn[ name ] = function( fn ) {
return fn ? this.bind( name, fn ) : this.trigger( name );
};
$.attrFn[ name ] = true;
});
var supportTouch = $.support.touch,
scrollEvent = "touchmove scroll",
touchStartEvent = supportTouch ? "touchstart" : "mousedown",
touchStopEvent = supportTouch ? "touchend" : "mouseup",
touchMoveEvent = supportTouch ? "touchmove" : "mousemove";
// also handles scrollstop
$.event.special.scrollstart = {
enabled: true,
setup: function() {
var thisObject = this,
$this = $( thisObject ),
scrolling,
timer;
function trigger( event, state ) {
scrolling = state;
var originalType = event.type;
event.type = scrolling ? "scrollstart" : "scrollstop";
$.event.handle.call( thisObject, event );
event.type = originalType;
}
// iPhone triggers scroll after a small delay; use touchmove instead
$this.bind( scrollEvent, function( event ) {
if ( !$.event.special.scrollstart.enabled ) {
return;
}
if ( !scrolling ) {
trigger( event, true );
}
clearTimeout( timer );
timer = setTimeout(function() {
trigger( event, false );
}, 50 );
});
}
};
// also handles taphold
$.event.special.tap = {
setup: function() {
var thisObject = this,
$this = $( thisObject );
$this
.bind( touchStartEvent, function( event ) {
if ( event.which && event.which !== 1 ) {
return;
}
var moved = false,
touching = true,
origPos = [ event.pageX, event.pageY ],
originalType,
timer;
function moveHandler() {
if ((Math.abs(origPos[0] - event.pageX) > 10) ||
(Math.abs(origPos[1] - event.pageY) > 10)) {
moved = true;
}
}
timer = setTimeout(function() {
if ( touching && !moved ) {
originalType = event.type;
event.type = "taphold";
$.event.handle.call( thisObject, event );
event.type = originalType;
}
}, 750 );
$this
.one( touchMoveEvent, moveHandler)
.one( touchStopEvent, function( event ) {
$this.unbind( touchMoveEvent, moveHandler );
clearTimeout( timer );
touching = false;
if ( !moved ) {
originalType = event.type;
event.type = "tap";
$.event.handle.call( thisObject, event );
event.type = originalType;
}
});
});
}
};
// also handles swipeleft, swiperight
$.event.special.swipe = {
setup: function() {
var thisObject = this,
$this = $( thisObject );
$this
.bind( touchStartEvent, function( event ) {
var data = event.originalEvent.touches ?
event.originalEvent.touches[ 0 ] :
event,
start = {
time: (new Date).getTime(),
coords: [ data.pageX, data.pageY ],
origin: $( event.target )
},
stop;
function moveHandler( event ) {
if ( !start ) {
return;
}
var data = event.originalEvent.touches ?
event.originalEvent.touches[ 0 ] :
event;
stop = {
time: (new Date).getTime(),
coords: [ data.pageX, data.pageY ]
};
// prevent scrolling
if ( Math.abs( start.coords[0] - stop.coords[0] ) > 10 ) {
event.preventDefault();
}
}
$this
.bind( touchMoveEvent, moveHandler )
.one( touchStopEvent, function( event ) {
$this.unbind( touchMoveEvent, moveHandler );
if ( start && stop ) {
if ( stop.time - start.time < 1000 &&
Math.abs( start.coords[0] - stop.coords[0]) > 30 &&
Math.abs( start.coords[1] - stop.coords[1]) < 75 ) {
start.origin
.trigger( "swipe" )
.trigger( start.coords[0] > stop.coords[0] ? "swipeleft" : "swiperight" );
}
}
start = stop = undefined;
});
});
}
};
(function($){
// "Cowboy" Ben Alman
var win = $(window),
special_event,
get_orientation,
last_orientation;
$.event.special.orientationchange = special_event = {
setup: function(){
// If the event is supported natively, return false so that jQuery
// will bind to the event using DOM methods.
if ( $.support.orientation ) { return false; }
// Get the current orientation to avoid initial double-triggering.
last_orientation = get_orientation();
// Because the orientationchange event doesn't exist, simulate the
// event by testing window dimensions on resize.
win.bind( "resize", handler );
},
teardown: function(){
// If the event is not supported natively, return false so that
// jQuery will unbind the event using DOM methods.
if ( $.support.orientation ) { return false; }
// Because the orientationchange event doesn't exist, unbind the
// resize event handler.
win.unbind( "resize", handler );
},
add: function( handleObj ) {
// Save a reference to the bound event handler.
var old_handler = handleObj.handler;
handleObj.handler = function( event ) {
// Modify event object, adding the .orientation property.
event.orientation = get_orientation();
// Call the originally-bound event handler and return its result.
return old_handler.apply( this, arguments );
};
}
};
// If the event is not supported natively, this handler will be bound to
// the window resize event to simulate the orientationchange event.
function handler() {
// Get the current orientation.
var orientation = get_orientation();
if ( orientation !== last_orientation ) {
// The orientation has changed, so trigger the orientationchange event.
last_orientation = orientation;
win.trigger( "orientationchange" );
}
};
// Get the current page orientation. This method is exposed publicly, should it
// be needed, as jQuery.event.special.orientationchange.orientation()
special_event.orientation = get_orientation = function() {
var elem = document.documentElement;
return elem && elem.clientWidth / elem.clientHeight < 1.1 ? "portrait" : "landscape";
};
})(jQuery);
$.each({
scrollstop: "scrollstart",
taphold: "tap",
swipeleft: "swipe",
swiperight: "swipe"
}, function( event, sourceEvent ) {
$.event.special[ event ] = {
setup: function() {
$( this ).bind( sourceEvent, $.noop );
}
};
});
})( jQuery );
/*!
* jQuery hashchange event - v1.3 - 7/21/2010
* http://benalman.com/projects/jquery-hashchange-plugin/
*
* Copyright (c) 2010 "Cowboy" Ben Alman
* Dual licensed under the MIT and GPL licenses.
* http://benalman.com/about/license/
*/
// Script: jQuery hashchange event
//
// *Version: 1.3, Last updated: 7/21/2010*
//
// Project Home - http://benalman.com/projects/jquery-hashchange-plugin/
// GitHub - http://github.com/cowboy/jquery-hashchange/
// Source - http://github.com/cowboy/jquery-hashchange/raw/master/jquery.ba-hashchange.js
// (Minified) - http://github.com/cowboy/jquery-hashchange/raw/master/jquery.ba-hashchange.min.js (0.8kb gzipped)
//
// About: License
//
// Copyright (c) 2010 "Cowboy" Ben Alman,
// Dual licensed under the MIT and GPL licenses.
// http://benalman.com/about/license/
//
// About: Examples
//
// These working examples, complete with fully commented code, illustrate a few
// ways in which this plugin can be used.
//
// hashchange event - http://benalman.com/code/projects/jquery-hashchange/examples/hashchange/
// document.domain - http://benalman.com/code/projects/jquery-hashchange/examples/document_domain/
//
// About: Support and Testing
//
// Information about what version or versions of jQuery this plugin has been
// tested with, what browsers it has been tested in, and where the unit tests
// reside (so you can test it yourself).
//
// jQuery Versions - 1.2.6, 1.3.2, 1.4.1, 1.4.2
// Browsers Tested - Internet Explorer 6-8, Firefox 2-4, Chrome 5-6, Safari 3.2-5,
// Opera 9.6-10.60, iPhone 3.1, Android 1.6-2.2, BlackBerry 4.6-5.
// Unit Tests - http://benalman.com/code/projects/jquery-hashchange/unit/
//
// About: Known issues
//
// While this jQuery hashchange event implementation is quite stable and
// robust, there are a few unfortunate browser bugs surrounding expected
// hashchange event-based behaviors, independent of any JavaScript
// window.onhashchange abstraction. See the following examples for more
// information:
//
// Chrome: Back Button - http://benalman.com/code/projects/jquery-hashchange/examples/bug-chrome-back-button/
// Firefox: Remote XMLHttpRequest - http://benalman.com/code/projects/jquery-hashchange/examples/bug-firefox-remote-xhr/
// WebKit: Back Button in an Iframe - http://benalman.com/code/projects/jquery-hashchange/examples/bug-webkit-hash-iframe/
// Safari: Back Button from a different domain - http://benalman.com/code/projects/jquery-hashchange/examples/bug-safari-back-from-diff-domain/
//
// Also note that should a browser natively support the window.onhashchange
// event, but not report that it does, the fallback polling loop will be used.
//
// About: Release History
//
// 1.3 - (7/21/2010) Reorganized IE6/7 Iframe code to make it more
// "removable" for mobile-only development. Added IE6/7 document.title
// support. Attempted to make Iframe as hidden as possible by using
// techniques from http://www.paciellogroup.com/blog/?p=604. Added
// support for the "shortcut" format $(window).hashchange( fn ) and
// $(window).hashchange() like jQuery provides for built-in events.
// Renamed jQuery.hashchangeDelay to <jQuery.fn.hashchange.delay> and
// lowered its default value to 50. Added <jQuery.fn.hashchange.domain>
// and <jQuery.fn.hashchange.src> properties plus document-domain.html
// file to address access denied issues when setting document.domain in
// IE6/7.
// 1.2 - (2/11/2010) Fixed a bug where coming back to a page using this plugin
// from a page on another domain would cause an error in Safari 4. Also,
// IE6/7 Iframe is now inserted after the body (this actually works),
// which prevents the page from scrolling when the event is first bound.
// Event can also now be bound before DOM ready, but it won't be usable
// before then in IE6/7.
// 1.1 - (1/21/2010) Incorporated document.documentMode test to fix IE8 bug
// where browser version is incorrectly reported as 8.0, despite
// inclusion of the X-UA-Compatible IE=EmulateIE7 meta tag.
// 1.0 - (1/9/2010) Initial Release. Broke out the jQuery BBQ event.special
// window.onhashchange functionality into a separate plugin for users
// who want just the basic event & back button support, without all the
// extra awesomeness that BBQ provides. This plugin will be included as
// part of jQuery BBQ, but also be available separately.
(function($,window,undefined){
'$:nomunge'; // Used by YUI compressor.
// Reused string.
var str_hashchange = 'hashchange',
// Method / object references.
doc = document,
fake_onhashchange,
special = $.event.special,
// Does the browser support window.onhashchange? Note that IE8 running in
// IE7 compatibility mode reports true for 'onhashchange' in window, even
// though the event isn't supported, so also test document.documentMode.
doc_mode = doc.documentMode,
supports_onhashchange = 'on' + str_hashchange in window && ( doc_mode === undefined || doc_mode > 7 );
// Get location.hash (or what you'd expect location.hash to be) sans any
// leading #. Thanks for making this necessary, Firefox!
function get_fragment( url ) {
url = url || location.href;
return '#' + url.replace( /^[^#]*#?(.*)$/, '$1' );
};
// Method: jQuery.fn.hashchange
//
// Bind a handler to the window.onhashchange event or trigger all bound
// window.onhashchange event handlers. This behavior is consistent with
// jQuery's built-in event handlers.
//
// Usage:
//
// > jQuery(window).hashchange( [ handler ] );
//
// Arguments:
//
// handler - (Function) Optional handler to be bound to the hashchange
// event. This is a "shortcut" for the more verbose form:
// jQuery(window).bind( 'hashchange', handler ). If handler is omitted,
// all bound window.onhashchange event handlers will be triggered. This
// is a shortcut for the more verbose
// jQuery(window).trigger( 'hashchange' ). These forms are described in
// the <hashchange event> section.
//
// Returns:
//
// (jQuery) The initial jQuery collection of elements.
// Allow the "shortcut" format $(elem).hashchange( fn ) for binding and
// $(elem).hashchange() for triggering, like jQuery does for built-in events.
$.fn[ str_hashchange ] = function( fn ) {
return fn ? this.bind( str_hashchange, fn ) : this.trigger( str_hashchange );
};
// Property: jQuery.fn.hashchange.delay
//
// The numeric interval (in milliseconds) at which the <hashchange event>
// polling loop executes. Defaults to 50.
// Property: jQuery.fn.hashchange.domain
//
// If you're setting document.domain in your JavaScript, and you want hash
// history to work in IE6/7, not only must this property be set, but you must
// also set document.domain BEFORE jQuery is loaded into the page. This
// property is only applicable if you are supporting IE6/7 (or IE8 operating
// in "IE7 compatibility" mode).
//
// In addition, the <jQuery.fn.hashchange.src> property must be set to the
// path of the included "document-domain.html" file, which can be renamed or
// modified if necessary (note that the document.domain specified must be the
// same in both your main JavaScript as well as in this file).
//
// Usage:
//
// jQuery.fn.hashchange.domain = document.domain;
// Property: jQuery.fn.hashchange.src
//
// If, for some reason, you need to specify an Iframe src file (for example,
// when setting document.domain as in <jQuery.fn.hashchange.domain>), you can
// do so using this property. Note that when using this property, history
// won't be recorded in IE6/7 until the Iframe src file loads. This property
// is only applicable if you are supporting IE6/7 (or IE8 operating in "IE7
// compatibility" mode).
//
// Usage:
//
// jQuery.fn.hashchange.src = 'path/to/file.html';
$.fn[ str_hashchange ].delay = 50;
/*
$.fn[ str_hashchange ].domain = null;
$.fn[ str_hashchange ].src = null;
*/
// Event: hashchange event
//
// Fired when location.hash changes. In browsers that support it, the native
// HTML5 window.onhashchange event is used, otherwise a polling loop is
// initialized, running every <jQuery.fn.hashchange.delay> milliseconds to
// see if the hash has changed. In IE6/7 (and IE8 operating in "IE7
// compatibility" mode), a hidden Iframe is created to allow the back button
// and hash-based history to work.
//
// Usage as described in <jQuery.fn.hashchange>:
//
// > // Bind an event handler.
// > jQuery(window).hashchange( function(e) {
// > var hash = location.hash;
// > ...
// > });
// >
// > // Manually trigger the event handler.
// > jQuery(window).hashchange();
//
// A more verbose usage that allows for event namespacing:
//
// > // Bind an event handler.
// > jQuery(window).bind( 'hashchange', function(e) {
// > var hash = location.hash;
// > ...
// > });
// >
// > // Manually trigger the event handler.
// > jQuery(window).trigger( 'hashchange' );
//
// Additional Notes:
//
// * The polling loop and Iframe are not created until at least one handler
// is actually bound to the 'hashchange' event.
// * If you need the bound handler(s) to execute immediately, in cases where
// a location.hash exists on page load, via bookmark or page refresh for
// example, use jQuery(window).hashchange() or the more verbose
// jQuery(window).trigger( 'hashchange' ).
// * The event can be bound before DOM ready, but since it won't be usable
// before then in IE6/7 (due to the necessary Iframe), recommended usage is
// to bind it inside a DOM ready handler.
// Override existing $.event.special.hashchange methods (allowing this plugin
// to be defined after jQuery BBQ in BBQ's source code).
special[ str_hashchange ] = $.extend( special[ str_hashchange ], {
// Called only when the first 'hashchange' event is bound to window.
setup: function() {
// If window.onhashchange is supported natively, there's nothing to do..
if ( supports_onhashchange ) { return false; }
// Otherwise, we need to create our own. And we don't want to call this
// until the user binds to the event, just in case they never do, since it
// will create a polling loop and possibly even a hidden Iframe.
$( fake_onhashchange.start );
},
// Called only when the last 'hashchange' event is unbound from window.
teardown: function() {
// If window.onhashchange is supported natively, there's nothing to do..
if ( supports_onhashchange ) { return false; }
// Otherwise, we need to stop ours (if possible).
$( fake_onhashchange.stop );
}
});
// fake_onhashchange does all the work of triggering the window.onhashchange
// event for browsers that don't natively support it, including creating a
// polling loop to watch for hash changes and in IE 6/7 creating a hidden
// Iframe to enable back and forward.
fake_onhashchange = (function(){
var self = {},
timeout_id,
// Remember the initial hash so it doesn't get triggered immediately.
last_hash = get_fragment(),
fn_retval = function(val){ return val; },
history_set = fn_retval,
history_get = fn_retval;
// Start the polling loop.
self.start = function() {
timeout_id || poll();
};
// Stop the polling loop.
self.stop = function() {
timeout_id && clearTimeout( timeout_id );
timeout_id = undefined;
};
// This polling loop checks every $.fn.hashchange.delay milliseconds to see
// if location.hash has changed, and triggers the 'hashchange' event on
// window when necessary.
function poll() {
var hash = get_fragment(),
history_hash = history_get( last_hash );
if ( hash !== last_hash ) {
history_set( last_hash = hash, history_hash );
$(window).trigger( str_hashchange );
} else if ( history_hash !== last_hash ) {
location.href = location.href.replace( /#.*/, '' ) + history_hash;
}
timeout_id = setTimeout( poll, $.fn[ str_hashchange ].delay );
};
// vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
// vvvvvvvvvvvvvvvvvvv REMOVE IF NOT SUPPORTING IE6/7/8 vvvvvvvvvvvvvvvvvvv
// vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
$.browser.msie && !supports_onhashchange && (function(){
// Not only do IE6/7 need the "magical" Iframe treatment, but so does IE8
// when running in "IE7 compatibility" mode.
var iframe,
iframe_src;
// When the event is bound and polling starts in IE 6/7, create a hidden
// Iframe for history handling.
self.start = function(){
if ( !iframe ) {
iframe_src = $.fn[ str_hashchange ].src;
iframe_src = iframe_src && iframe_src + get_fragment();
// Create hidden Iframe. Attempt to make Iframe as hidden as possible
// by using techniques from http://www.paciellogroup.com/blog/?p=604.
iframe = $('<iframe tabindex="-1" title="empty"/>').hide()
// When Iframe has completely loaded, initialize the history and
// start polling.
.one( 'load', function(){
iframe_src || history_set( get_fragment() );
poll();
})
// Load Iframe src if specified, otherwise nothing.
.attr( 'src', iframe_src || 'javascript:0' )
// Append Iframe after the end of the body to prevent unnecessary
// initial page scrolling (yes, this works).
.insertAfter( 'body' )[0].contentWindow;
// Whenever `document.title` changes, update the Iframe's title to
// prettify the back/next history menu entries. Since IE sometimes
// errors with "Unspecified error" the very first time this is set
// (yes, very useful) wrap this with a try/catch block.
doc.onpropertychange = function(){
try {
if ( event.propertyName === 'title' ) {
iframe.document.title = doc.title;
}
} catch(e) {}
};
}
};
// Override the "stop" method since an IE6/7 Iframe was created. Even
// if there are no longer any bound event handlers, the polling loop
// is still necessary for back/next to work at all!
self.stop = fn_retval;
// Get history by looking at the hidden Iframe's location.hash.
history_get = function() {
return get_fragment( iframe.location.href );
};
// Set a new history item by opening and then closing the Iframe
// document, *then* setting its location.hash. If document.domain has
// been set, update that as well.
history_set = function( hash, history_hash ) {
var iframe_doc = iframe.document,
domain = $.fn[ str_hashchange ].domain;
if ( hash !== history_hash ) {
// Update Iframe with any initial `document.title` that might be set.
iframe_doc.title = doc.title;
// Opening the Iframe's document after it has been closed is what
// actually adds a history entry.
iframe_doc.open();
// Set document.domain for the Iframe document as well, if necessary.
domain && iframe_doc.write( '<script>document.domain="' + domain + '"</script>' );
iframe_doc.close();
// Update the Iframe's hash, for great justice.
iframe.location.hash = hash;
}
};
})();
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
// ^^^^^^^^^^^^^^^^^^^ REMOVE IF NOT SUPPORTING IE6/7/8 ^^^^^^^^^^^^^^^^^^^
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
return self;
})();
})(jQuery,this);
/*
* jQuery Mobile Framework : "page" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.page", $.mobile.widget, {
options: {
backBtnText: "Back",
addBackBtn: true,
degradeInputs: {
color: false,
date: false,
datetime: false,
"datetime-local": false,
email: false,
month: false,
number: false,
range: true,
search: true,
tel: false,
time: false,
url: false,
week: false
}
},
_create: function() {
var $elem = this.element,
o = this.options;
if ( this._trigger( "beforeCreate" ) === false ) {
return;
}
//some of the form elements currently rely on the presence of ui-page and ui-content
// classes so we'll handle page and content roles outside of the main role processing
// loop below.
$elem.find( "[data-role='page'], [data-role='content']" ).andSelf().each(function() {
$(this).addClass( "ui-" + $(this).data( "role" ) );
});
$elem.find( "[data-role='nojs']" ).addClass( "ui-nojs" );
this._enchanceControls();
// pre-find data els
var $dataEls = $elem.find( "[data-role]" ).andSelf().each(function() {
var $this = $( this ),
role = $this.data( "role" ),
theme = $this.data( "theme" );
//apply theming and markup modifications to page,header,content,footer
if ( role === "header" || role === "footer" ) {
$this.addClass( "ui-bar-" + (theme || $this.parent('[data-role=page]').data( "theme" ) || "a") );
// add ARIA role
$this.attr( "role", role === "header" ? "banner" : "contentinfo" );
//right,left buttons
var $headeranchors = $this.children( "a" ),
leftbtn = $headeranchors.hasClass( "ui-btn-left" ),
rightbtn = $headeranchors.hasClass( "ui-btn-right" );
if ( !leftbtn ) {
leftbtn = $headeranchors.eq( 0 ).not( ".ui-btn-right" ).addClass( "ui-btn-left" ).length;
}
if ( !rightbtn ) {
rightbtn = $headeranchors.eq( 1 ).addClass( "ui-btn-right" ).length;
}
// auto-add back btn on pages beyond first view
if ( o.addBackBtn && role === "header" &&
($.mobile.urlStack.length > 1 || $(".ui-page").length > 1) &&
!leftbtn && !$this.data( "noBackBtn" ) ) {
$( "<a href='#' class='ui-btn-left' data-icon='arrow-l'>"+ o.backBtnText +"</a>" )
.click(function() {
history.back();
return false;
})
.prependTo( $this );
}
//page title
$this.children( "h1, h2, h3, h4, h5, h6" )
.addClass( "ui-title" )
//regardless of h element number in src, it becomes h1 for the enhanced page
.attr({ "tabindex": "0", "role": "heading", "aria-level": "1" });
} else if ( role === "content" ) {
if ( theme ) {
$this.addClass( "ui-body-" + theme );
}
// add ARIA role
$this.attr( "role", "main" );
} else if ( role === "page" ) {
$this.addClass( "ui-body-" + (theme || "c") );
}
switch(role) {
case "header":
case "footer":
case "page":
case "content":
$this.addClass( "ui-" + role );
break;
case "collapsible":
case "fieldcontain":
case "navbar":
case "listview":
case "dialog":
$this[ role ]();
break;
}
});
//links in bars, or those with data-role become buttons
$elem.find( "[data-role='button'], .ui-bar a, .ui-header a, .ui-footer a" )
.not( ".ui-btn" )
.buttonMarkup();
$elem
.find("[data-role='controlgroup']")
.controlgroup();
//links within content areas
$elem.find( "a:not(.ui-btn):not(.ui-link-inherit)" )
.addClass( "ui-link" );
//fix toolbars
$elem.fixHeaderFooter();
},
_enchanceControls: function() {
var o = this.options;
// degrade inputs to avoid poorly implemented native functionality
this.element.find( "input" ).each(function() {
var type = this.getAttribute( "type" );
if ( o.degradeInputs[ type ] ) {
$( this ).replaceWith(
$( "<div>" ).html( $(this).clone() ).html()
.replace( /type="([a-zA-Z]+)"/, "data-type='$1'" ) );
}
});
// enchance form controls
this.element
.find( "[type='radio'], [type='checkbox']" )
.checkboxradio();
this.element
.find( "button, [type='button'], [type='submit'], [type='reset'], [type='image']" )
.not( ".ui-nojs" )
.button();
this.element
.find( "input, textarea" )
.not( "[type='radio'], [type='checkbox'], button, [type='button'], [type='submit'], [type='reset'], [type='image']" )
.textinput();
this.element
.find( "input, select" )
.filter( "[data-role='slider'], [data-type='range']" )
.slider();
this.element
.find( "select:not([data-role='slider'])" )
.selectmenu();
}
});
})( jQuery );
/*
* jQuery Mobile Framework : "fixHeaderFooter" plugin - on-demand positioning for headers,footers
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.fn.fixHeaderFooter = function(options){
if( !$.support.scrollTop ){ return $(this); }
return $(this).each(function(){
if( $(this).data('fullscreen') ){ $(this).addClass('ui-page-fullscreen'); }
$(this).find('.ui-header[data-position="fixed"]').addClass('ui-header-fixed ui-fixed-inline fade'); //should be slidedown
$(this).find('.ui-footer[data-position="fixed"]').addClass('ui-footer-fixed ui-fixed-inline fade'); //should be slideup
});
};
//single controller for all showing,hiding,toggling
$.fixedToolbars = (function(){
if( !$.support.scrollTop ){ return; }
var currentstate = 'inline',
delayTimer,
ignoreTargets = 'a,input,textarea,select,button,label,.ui-header-fixed,.ui-footer-fixed',
toolbarSelector = '.ui-header-fixed:first, .ui-footer-fixed:not(.ui-footer-duplicate):last',
stickyFooter, //for storing quick references to duplicate footers
supportTouch = $.support.touch,
touchStartEvent = supportTouch ? "touchstart" : "mousedown",
touchStopEvent = supportTouch ? "touchend" : "mouseup",
stateBefore = null,
scrollTriggered = false;
$(function() {
$(document)
.bind(touchStartEvent,function(event){
if( $(event.target).closest(ignoreTargets).length ){ return; }
stateBefore = currentstate;
$.fixedToolbars.hide(true);
})
.bind('scrollstart',function(event){
if( $(event.target).closest(ignoreTargets).length ){ return; } //because it could be a touchmove...
scrollTriggered = true;
if(stateBefore == null){ stateBefore = currentstate; }
$.fixedToolbars.hide(true);
})
.bind(touchStopEvent,function(event){
if( $(event.target).closest(ignoreTargets).length ){ return; }
if( !scrollTriggered ){
$.fixedToolbars.toggle(stateBefore);
stateBefore = null;
}
})
.bind('scrollstop',function(event){
if( $(event.target).closest(ignoreTargets).length ){ return; }
scrollTriggered = false;
$.fixedToolbars.toggle( stateBefore == 'overlay' ? 'inline' : 'overlay' );
stateBefore = null;
});
//function to return another footer already in the dom with the same data-id
function findStickyFooter(el){
var thisFooter = el.find('[data-role="footer"]');
return $( '.ui-footer[data-id="'+ thisFooter.data('id') +'"]:not(.ui-footer-duplicate)' ).not(thisFooter);
}
//before page is shown, check for duplicate footer
$('.ui-page').live('pagebeforeshow', function(event, ui){
stickyFooter = findStickyFooter( $(event.target) );
if( stickyFooter.length ){
//if the existing footer is the first of its kind, create a placeholder before stealing it
if( stickyFooter.parents('.ui-page:eq(0)').find('.ui-footer[data-id="'+ stickyFooter.data('id') +'"]').length == 1 ){
stickyFooter.before( stickyFooter.clone().addClass('ui-footer-duplicate') );
}
$(event.target).find('[data-role="footer"]').addClass('ui-footer-duplicate');
stickyFooter.appendTo($.pageContainer).css('top',0);
setTop(stickyFooter);
}
});
//after page is shown, append footer to new page
$('.ui-page').live('pageshow', function(event, ui){
if( stickyFooter && stickyFooter.length ){
stickyFooter.appendTo(event.target).css('top',0);
}
$.fixedToolbars.show(true, this);
});
});
// element.getBoundingClientRect() is broken in iOS 3.2.1 on the iPad. The
// coordinates inside of the rect it returns don't have the page scroll position
// factored out of it like the other platforms do. To get around this,
// we'll just calculate the top offset the old fashioned way until core has
// a chance to figure out how to handle this situation.
//
// TODO: We'll need to get rid of getOffsetTop() once a fix gets folded into core.
function getOffsetTop(ele)
{
var top = 0;
if (ele)
{
var op = ele.offsetParent, body = document.body;
top = ele.offsetTop;
while (ele && ele != body)
{
top += ele.scrollTop || 0;
if (ele == op)
{
top += op.offsetTop;
op = ele.offsetParent;
}
ele = ele.parentNode;
}
}
return top;
}
function setTop(el){
var fromTop = $(window).scrollTop(),
thisTop = getOffsetTop(el[0]), // el.offset().top returns the wrong value on iPad iOS 3.2.1, call our workaround instead.
thisCSStop = el.css('top') == 'auto' ? 0 : parseFloat(el.css('top')),
screenHeight = window.innerHeight,
thisHeight = el.outerHeight(),
useRelative = el.parents('.ui-page:not(.ui-page-fullscreen)').length,
relval;
if( el.is('.ui-header-fixed') ){
relval = fromTop - thisTop + thisCSStop;
if( relval < thisTop){ relval = 0; }
return el.css('top', ( useRelative ) ? relval : fromTop);
}
else{
//relval = -1 * (thisTop - (fromTop + screenHeight) + thisCSStop + thisHeight);
//if( relval > thisTop ){ relval = 0; }
relval = fromTop + screenHeight - thisHeight - (thisTop - thisCSStop);
return el.css('top', ( useRelative ) ? relval : fromTop + screenHeight - thisHeight );
}
}
//exposed methods
return {
show: function(immediately, page){
currentstate = 'overlay';
var $ap = page ? $(page) : ($.mobile.activePage ? $.mobile.activePage : $(".ui-page-active"));
return $ap.children( toolbarSelector ).each(function(){
var el = $(this),
fromTop = $(window).scrollTop(),
thisTop = getOffsetTop(el[0]), // el.offset().top returns the wrong value on iPad iOS 3.2.1, call our workaround instead.
screenHeight = window.innerHeight,
thisHeight = el.outerHeight(),
alreadyVisible = (el.is('.ui-header-fixed') && fromTop <= thisTop + thisHeight) || (el.is('.ui-footer-fixed') && thisTop <= fromTop + screenHeight);
//add state class
el.addClass('ui-fixed-overlay').removeClass('ui-fixed-inline');
if( !alreadyVisible && !immediately ){
el.addClass('in').animationComplete(function(){
el.removeClass('in');
});
}
setTop(el);
});
},
hide: function(immediately){
currentstate = 'inline';
var $ap = $.mobile.activePage ? $.mobile.activePage : $(".ui-page-active");
return $ap.children( toolbarSelector ).each(function(){
var el = $(this);
var thisCSStop = el.css('top'); thisCSStop = thisCSStop == 'auto' ? 0 : parseFloat(thisCSStop);
//add state class
el.addClass('ui-fixed-inline').removeClass('ui-fixed-overlay');
if (thisCSStop < 0 || (el.is('.ui-header-fixed') && thisCSStop != 0))
{
if(immediately){
el.css('top',0);
}
else{
if( el.css('top') !== 'auto' && parseFloat(el.css('top')) !== 0 ){
var classes = 'out reverse';
el.addClass(classes).animationComplete(function(){
el.removeClass(classes);
el.css('top',0);
});
}
}
}
});
},
hideAfterDelay: function(){
delayTimer = setTimeout(function(){
$.fixedToolbars.hide();
}, 3000);
},
toggle: function(from){
if(from){ currentstate = from; }
return (currentstate == 'overlay') ? $.fixedToolbars.hide() : $.fixedToolbars.show();
}
};
})();
})(jQuery);/*
* jQuery Mobile Framework : "checkboxradio" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.checkboxradio", $.mobile.widget, {
options: {
theme: null
},
_create: function(){
var input = this.element,
label = $("label[for='" + input.attr( "id" ) + "']"),
inputtype = input.attr( "type" ),
checkedicon = "ui-icon-" + inputtype + "-on",
uncheckedicon = "ui-icon-" + inputtype + "-off";
if ( inputtype != "checkbox" && inputtype != "radio" ) { return; }
label
.buttonMarkup({
theme: this.options.theme,
icon: this.element.parents( "[data-type='horizontal']" ).length ? undefined : uncheckedicon,
shadow: false
});
// wrap the input + label in a div
input
.add( label )
.wrapAll( "<div class='ui-" + inputtype +"'></div>" );
label.bind({
mouseover: function() {
if( $(this).parent().is('.ui-disabled') ){ return false; }
},
mousedown: function() {
if( $(this).parent().is('.ui-disabled') ){ return false; }
label.data( "state", input.attr( "checked" ) );
},
click: function() {
setTimeout(function() {
if ( input.attr( "checked" ) === label.data( "state" ) ) {
input.trigger( "click" );
}
}, 1);
}
});
input
.bind({
click: function() {
$( "input[name='" + input.attr( "name" ) + "'][type='" + inputtype + "']" ).checkboxradio( "refresh" );
},
focus: function() {
label.addClass( "ui-focus" );
},
blur: function() {
label.removeClass( "ui-focus" );
}
});
this.refresh();
},
refresh: function( ){
var input = this.element,
label = $("label[for='" + input.attr( "id" ) + "']"),
inputtype = input.attr( "type" ),
icon = label.find( ".ui-icon" ),
checkedicon = "ui-icon-" + inputtype + "-on",
uncheckedicon = "ui-icon-" + inputtype + "-off";
if ( input[0].checked ) {
label.addClass( "ui-btn-active" );
icon.addClass( checkedicon );
icon.removeClass( uncheckedicon );
} else {
label.removeClass( "ui-btn-active" );
icon.removeClass( checkedicon );
icon.addClass( uncheckedicon );
}
if( input.is( ":disabled" ) ){
this.disable();
}
else {
this.enable();
}
},
disable: function(){
this.element.attr("disabled",true).parent().addClass("ui-disabled");
},
enable: function(){
this.element.attr("disabled",false).parent().removeClass("ui-disabled");
}
});
})( jQuery );
/*
* jQuery Mobile Framework : "textinput" plugin for text inputs, textareas
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.textinput", $.mobile.widget, {
options: {
theme: null
},
_create: function(){
var input = this.element,
o = this.options,
theme = o.theme,
themeclass;
if ( !theme ) {
var themedParent = this.element.closest("[class*='ui-bar-'],[class*='ui-body-']");
theme = themedParent.length ?
/ui-(bar|body)-([a-z])/.exec( themedParent.attr("class") )[2] :
"c";
}
themeclass = " ui-body-" + theme;
$('label[for='+input.attr('id')+']').addClass('ui-input-text');
input.addClass('ui-input-text ui-body-'+ o.theme);
var focusedEl = input;
//"search" input widget
if( input.is('[type="search"],[data-type="search"]') ){
focusedEl = input.wrap('<div class="ui-input-search ui-shadow-inset ui-btn-corner-all ui-btn-shadow ui-icon-search'+ themeclass +'"></div>').parent();
var clearbtn = $('<a href="#" class="ui-input-clear" title="clear text">clear text</a>')
.click(function(){
input.val('').focus();
input.trigger('change');
clearbtn.addClass('ui-input-clear-hidden');
return false;
})
.appendTo(focusedEl)
.buttonMarkup({icon: 'delete', iconpos: 'notext', corners:true, shadow:true});
function toggleClear(){
if(input.val() == ''){
clearbtn.addClass('ui-input-clear-hidden');
}
else{
clearbtn.removeClass('ui-input-clear-hidden');
}
}
toggleClear();
input.keyup(toggleClear);
}
else{
input.addClass('ui-corner-all ui-shadow-inset' + themeclass);
}
input
.focus(function(){
focusedEl.addClass('ui-focus');
})
.blur(function(){
focusedEl.removeClass('ui-focus');
});
//autogrow
if ( input.is('textarea') ) {
var extraLineHeight = 15,
keyupTimeoutBuffer = 100,
keyup = function() {
var scrollHeight = input[0].scrollHeight,
clientHeight = input[0].clientHeight;
if ( clientHeight < scrollHeight ) {
input.css({ height: (scrollHeight + extraLineHeight) });
}
},
keyupTimeout;
input.keyup(function() {
clearTimeout( keyupTimeout );
keyupTimeout = setTimeout( keyup, keyupTimeoutBuffer );
});
}
},
disable: function(){
( this.element.attr("disabled",true).is('[type="search"],[data-type="search"]') ? this.element.parent() : this.element ).addClass("ui-disabled");
},
enable: function(){
( this.element.attr("disabled", false).is('[type="search"],[data-type="search"]') ? this.element.parent() : this.element ).removeClass("ui-disabled");
}
});
})( jQuery );
/*
* jQuery Mobile Framework : "selectmenu" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.selectmenu", $.mobile.widget, {
options: {
theme: null,
disabled: false,
icon: 'arrow-d',
iconpos: 'right',
inline: null,
corners: true,
shadow: true,
iconshadow: true,
menuPageTheme: 'b',
overlayTheme: 'a'
},
_create: function(){
var self = this,
o = this.options,
select = this.element
.attr( "tabindex", "-1" )
.wrap( "<div class='ui-select'>" ),
selectID = select.attr( "id" ),
label = $( "label[for="+ selectID +"]" ).addClass( "ui-select" ),
buttonId = selectID + "-button",
menuId = selectID + "-menu",
thisPage = select.closest( ".ui-page" ),
button = $( "<a>", {
"href": "#",
"role": "button",
"id": buttonId,
"aria-haspopup": "true",
"aria-owns": menuId
})
.text( $( select[0].options.item(select[0].selectedIndex) ).text() )
.insertBefore( select )
.buttonMarkup({
theme: o.theme,
icon: o.icon,
iconpos: o.iconpos,
inline: o.inline,
corners: o.corners,
shadow: o.shadow,
iconshadow: o.iconshadow
}),
theme = /ui-btn-up-([a-z])/.exec( button.attr("class") )[1],
menuPage = $( "<div data-role='dialog' data-theme='"+ o.menuPageTheme +"'>" +
"<div data-role='header'>" +
"<div class='ui-title'>" + label.text() + "</div>"+
"</div>"+
"<div data-role='content'></div>"+
"</div>" )
.appendTo( $.pageContainer )
.page(),
menuPageContent = menuPage.find( ".ui-content" ),
screen = $( "<div>", {"class": "ui-listbox-screen ui-overlay ui-screen-hidden fade"})
.appendTo( thisPage ),
listbox = $( "<div>", { "class": "ui-listbox ui-listbox-hidden ui-overlay-shadow ui-corner-all pop ui-body-" + o.overlayTheme } )
.insertAfter(screen),
list = $( "<ul>", {
"class": "ui-listbox-list",
"id": menuId,
"role": "listbox",
"aria-labelledby": buttonId,
"data-theme": theme
})
.appendTo( listbox ),
menuType;
//expose to other methods
$.extend(self, {
select: select,
selectID: selectID,
label: label,
buttonId:buttonId,
menuId:menuId,
thisPage:thisPage,
button:button,
menuPage:menuPage,
menuPageContent:menuPageContent,
screen:screen,
listbox:listbox,
list:list,
menuType:menuType
});
//create list from select, update state
self.refresh();
//disable if specified
if( this.options.disabled ){ this.disable(); }
//events on native select
select
.change(function(){
self.refresh();
})
.focus(function(){
$(this).blur();
button.focus();
});
//button events
button.click(function(event){
self.open();
return false;
});
//events for list items
list.delegate("li",'click', function(){
//update select
var newIndex = list.find( "li" ).index( this ),
prevIndex = select[0].selectedIndex;
select[0].selectedIndex = newIndex;
//trigger change event
if(newIndex !== prevIndex){
select.trigger( "change" );
}
self.refresh();
//hide custom select
self.close();
return false;
});
//events on "screen" overlay
screen.click(function(){
self.close();
return false;
});
},
_buildList: function(){
var self = this;
self.list.empty().filter('.ui-listview').listview('destroy');
//populate menu with options from select element
self.select.find( "option" ).each(function( i ){
var anchor = $("<a>", {
"role": "option",
"href": "#"
})
.text( $(this).text() );
$( "<li>", {"data-icon": "checkbox-on"})
.append( anchor )
.appendTo( self.list );
});
//now populated, create listview
self.list.listview();
},
refresh: function( forceRebuild ){
var self = this,
select = this.element,
selected = select[0].selectedIndex;
if( forceRebuild || select[0].options.length > self.list.find('li').length ){
self._buildList();
}
self.button.find( ".ui-btn-text" ).text( $(select[0].options.item(selected)).text() );
self.list
.find('li').removeClass( $.mobile.activeBtnClass ).attr('aria-selected', false)
.eq(selected).addClass( $.mobile.activeBtnClass ).find('a').attr('aria-selected', true);
},
open: function(){
if( this.options.disabled ){ return; }
var self = this,
menuHeight = self.list.outerHeight(),
scrollTop = $(window).scrollTop(),
btnOffset = self.button.offset().top,
screenHeight = window.innerHeight;
function focusMenuItem(){
self.list.find( ".ui-btn-active" ).focus();
}
if( menuHeight > screenHeight - 80 || !$.support.scrollTop ){
//for webos (set lastscroll using button offset)
if( scrollTop == 0 && btnOffset > screenHeight ){
self.thisPage.one('pagehide',function(){
$(this).data('lastScroll', btnOffset);
});
}
self.menuPage.one('pageshow',focusMenuItem);
self.menuType = "page";
self.menuPageContent.append( self.list );
$.mobile.changePage(self.menuPage, 'pop', false, false);
}
else {
self.menuType = "overlay";
self.screen
.height( $(document).height() )
.removeClass('ui-screen-hidden');
self.listbox
.append( self.list )
.removeClass( "ui-listbox-hidden" )
.css({
top: scrollTop + (screenHeight/2),
"margin-top": -menuHeight/2,
left: window.innerWidth/2,
"margin-left": -1* self.listbox.outerWidth() / 2
})
.addClass("in");
focusMenuItem();
}
},
close: function(){
if( this.options.disabled ){ return; }
var self = this;
function focusButton(){
setTimeout(function(){
self.button.focus();
}, 40);
self.listbox.removeAttr('style').append( self.list );
}
if(self.menuType == "page"){
$.mobile.changePage([self.menuPage,self.thisPage], 'pop', true, false);
self.menuPage.one("pagehide",function(){
focusButton();
//return false;
});
}
else{
self.screen.addClass( "ui-screen-hidden" );
self.listbox.addClass( "ui-listbox-hidden" ).removeAttr( "style" ).removeClass("in");
focusButton();
}
},
disable: function(){
this.element.attr("disabled",true);
this.button.addClass('ui-disabled').attr("aria-disabled", true);
return this._setOption( "disabled", true );
},
enable: function(){
this.element.attr("disabled",false);
this.button.removeClass('ui-disabled').attr("aria-disabled", false);
return this._setOption( "disabled", false );
}
});
})( jQuery );
/*
* jQuery Mobile Framework : plugin for making button-like links
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.fn.buttonMarkup = function( options ){
return this.each( function() {
var el = $( this ),
o = $.extend( {}, $.fn.buttonMarkup.defaults, el.data(), options),
// Classes Defined
buttonClass,
innerClass = "ui-btn-inner",
iconClass;
if ( attachEvents ) {
attachEvents();
}
// if not, try to find closest theme container
if ( !o.theme ) {
var themedParent = el.closest("[class*='ui-bar-'],[class*='ui-body-']");
o.theme = themedParent.length ?
/ui-(bar|body)-([a-z])/.exec( themedParent.attr("class") )[2] :
"c";
}
buttonClass = "ui-btn ui-btn-up-" + o.theme;
if ( o.inline ) {
buttonClass += " ui-btn-inline";
}
if ( o.icon ) {
o.icon = "ui-icon-" + o.icon;
o.iconpos = o.iconpos || "left";
iconClass = "ui-icon " + o.icon;
if ( o.shadow ) {
iconClass += " ui-icon-shadow"
}
}
if ( o.iconpos ) {
buttonClass += " ui-btn-icon-" + o.iconpos;
if ( o.iconpos == "notext" && !el.attr("title") ) {
el.attr( "title", el.text() );
}
}
if ( o.corners ) {
buttonClass += " ui-btn-corner-all";
innerClass += " ui-btn-corner-all";
}
if ( o.shadow ) {
buttonClass += " ui-shadow";
}
el
.attr( "data-theme", o.theme )
.addClass( buttonClass );
var wrap = ("<D class='" + innerClass + "'><D class='ui-btn-text'></D>" +
( o.icon ? "<span class='" + iconClass + "'></span>" : "" ) +
"</D>").replace(/D/g, o.wrapperEls);
el.wrapInner( wrap );
});
};
$.fn.buttonMarkup.defaults = {
corners: true,
shadow: true,
iconshadow: true,
wrapperEls: "span"
};
var attachEvents = function() {
$(".ui-btn").live({
mousedown: function() {
var theme = $(this).attr( "data-theme" );
$(this).removeClass( "ui-btn-up-" + theme ).addClass( "ui-btn-down-" + theme );
},
mouseup: function() {
var theme = $(this).attr( "data-theme" );
$(this).removeClass( "ui-btn-down-" + theme ).addClass( "ui-btn-up-" + theme );
},
"mouseover focus": function() {
var theme = $(this).attr( "data-theme" );
$(this).removeClass( "ui-btn-up-" + theme ).addClass( "ui-btn-hover-" + theme );
},
"mouseout blur": function() {
var theme = $(this).attr( "data-theme" );
$(this).removeClass( "ui-btn-hover-" + theme ).addClass( "ui-btn-up-" + theme );
}
});
attachEvents = null;
};
})(jQuery);
/*
* jQuery Mobile Framework : "button" plugin - links that proxy to native input/buttons
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.button", $.mobile.widget, {
options: {
theme: null,
icon: null,
iconpos: null,
inline: null,
corners: true,
shadow: true,
iconshadow: true
},
_create: function(){
var $el = this.element,
o = this.options,
type = $el.attr('type');
$el
.addClass('ui-btn-hidden')
.attr('tabindex','-1');
//add ARIA role
$( "<a>", {
"href": "#",
"role": "button",
"aria-label": $el.attr( "type" )
} )
.text( $el.text() || $el.val() )
.insertBefore( $el )
.click(function(){
if( type == "submit" ){
$(this).closest('form').submit();
}
else{
$el.click();
}
return false;
})
.buttonMarkup({
theme: o.theme,
icon: o.icon,
iconpos: o.iconpos,
inline: o.inline,
corners: o.corners,
shadow: o.shadow,
iconshadow: o.iconshadow
});
}
});
})( jQuery );/*
* jQuery Mobile Framework : "slider" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.slider", $.mobile.widget, {
options: {
theme: null,
trackTheme: null
},
_create: function(){
var control = this.element,
parentTheme = control.parents('[class*=ui-bar-],[class*=ui-body-]').eq(0),
parentTheme = parentTheme.length ? parentTheme.attr('class').match(/ui-(bar|body)-([a-z])/)[2] : 'c',
theme = this.options.theme ? this.options.theme : parentTheme,
trackTheme = this.options.trackTheme ? this.options.trackTheme : parentTheme,
cType = control[0].nodeName.toLowerCase(),
selectClass = (cType == 'select') ? 'ui-slider-switch' : '',
controlID = control.attr('id'),
labelID = controlID + '-label',
label = $('[for='+ controlID +']').attr('id',labelID),
val = (cType == 'input') ? parseFloat(control.val()) : control[0].selectedIndex,
min = (cType == 'input') ? parseFloat(control.attr('min')) : 0,
max = (cType == 'input') ? parseFloat(control.attr('max')) : control.find('option').length-1,
percent = ((parseFloat(val) - min) / (max - min)) * 100,
snappedPercent = percent,
slider = $('<div class="ui-slider '+ selectClass +' ui-btn-down-'+ trackTheme+' ui-btn-corner-all" role="application"></div>'),
handle = $('<a href="#" class="ui-slider-handle"></a>')
.appendTo(slider)
.buttonMarkup({corners: true, theme: theme, shadow: true})
.attr({
'role': 'slider',
'aria-valuemin': min,
'aria-valuemax': max,
'aria-valuenow': val,
'aria-valuetext': val,
'title': val,
'aria-labelledby': labelID
}),
dragging = false;
if(cType == 'select'){
slider.wrapInner('<div class="ui-slider-inneroffset"></div>');
var options = control.find('option');
control.find('option').each(function(i){
var side = (i==0) ?'b':'a',
corners = (i==0) ? 'right' :'left',
theme = (i==0) ? ' ui-btn-down-' + trackTheme :' ui-btn-active';
$('<div class="ui-slider-labelbg ui-slider-labelbg-'+ side + theme +' ui-btn-corner-'+ corners+'"></div>').prependTo(slider);
$('<span class="ui-slider-label ui-slider-label-'+ side + theme +' ui-btn-corner-'+ corners+'" role="img">'+$(this).text()+'</span>').prependTo(handle);
});
}
function updateControl(val){
if(cType == 'input'){
control.val(val);
}
else {
control[0].selectedIndex = val;
}
control.trigger("change");
}
function slideUpdate(event, val){
if (val){
percent = (parseFloat(val) - min) / (max - min) * 100;
} else {
var data = event.originalEvent.touches ? event.originalEvent.touches[ 0 ] : event,
// a slight tolerance helped get to the ends of the slider
tol = 8;
if( !dragging
|| data.pageX < slider.offset().left - tol
|| data.pageX > slider.offset().left + slider.width() + tol ){
return;
}
percent = Math.round(((data.pageX - slider.offset().left) / slider.width() ) * 100);
}
if( percent < 0 ) { percent = 0; }
if( percent > 100 ) { percent = 100; }
var newval = Math.round( (percent/100) * (max-min) ) + min;
if( newval < min ) { newval = min; }
if( newval > max ) { newval = max; }
//flip the stack of the bg colors
if(percent > 60 && cType == 'select'){
}
snappedPercent = Math.round( newval / (max-min) * 100 );
handle.css('left', percent + '%');
handle.attr({
'aria-valuenow': (cType == 'input') ? newval : control.find('option').eq(newval).attr('value'),
'aria-valuetext': (cType == 'input') ? newval : control.find('option').eq(newval).text(),
'title': newval
});
updateSwitchClass(newval);
updateControl(newval);
}
function updateSwitchClass(val){
if(cType == 'input'){return;}
if(val == 0){ slider.addClass('ui-slider-switch-a').removeClass('ui-slider-switch-b'); }
else { slider.addClass('ui-slider-switch-b').removeClass('ui-slider-switch-a'); }
}
updateSwitchClass(val);
function updateSnap(){
if(cType == 'select'){
handle
.addClass('ui-slider-handle-snapping')
.css('left', snappedPercent + '%')
.animationComplete(function(){
handle.removeClass('ui-slider-handle-snapping');
});
}
}
label.addClass('ui-slider');
control
.addClass((cType == 'input') ? 'ui-slider-input' : 'ui-slider-switch')
.keyup(function(e){
slideUpdate(e, $(this).val() );
});
$(document).bind($.support.touch ? "touchmove" : "mousemove", function(event){
if(dragging){
slideUpdate(event);
return false;
}
});
slider
.bind($.support.touch ? "touchstart" : "mousedown", function(event){
dragging = true;
if((cType == 'select')){
val = control[0].selectedIndex;
}
slideUpdate(event);
return false;
});
slider
.add(document)
.bind($.support.touch ? "touchend" : "mouseup", function(event){
if(dragging){
dragging = false;
if(cType == 'select'){
if(val == control[0].selectedIndex){
val = val == 0 ? 1 : 0;
//tap occurred, but value didn't change. flip it!
slideUpdate(event,val);
}
updateSnap();
}
return false;
}
});
slider.insertAfter(control);
handle
.css('left', percent + '%')
.bind('click', function(e){ return false; });
}
});
})( jQuery );
/*
* jQuery Mobile Framework : "collapsible" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.collapsible", $.mobile.widget, {
options: {
expandCueText: ' click to expand contents',
collapseCueText: ' click to collapse contents',
collapsed: false,
heading: '>:header,>legend',
theme: null,
iconTheme: 'd'
},
_create: function(){
var $el = this.element,
o = this.options,
collapsibleContain = $el.addClass('ui-collapsible-contain'),
collapsibleHeading = $el.find(o.heading).eq(0),
collapsibleContent = collapsibleContain.wrapInner('<div class="ui-collapsible-content"></div>').find('.ui-collapsible-content'),
collapsibleParent = $el.closest('[data-role="collapsible-set"]').addClass('ui-collapsible-set');
//replace collapsibleHeading if it's a legend
if(collapsibleHeading.is('legend')){
collapsibleHeading = $('<div role="heading">'+ collapsibleHeading.html() +'</div>').insertBefore(collapsibleHeading);
collapsibleHeading.next().remove();
}
//drop heading in before content
collapsibleHeading.insertBefore(collapsibleContent);
//modify markup & attributes
collapsibleHeading.addClass('ui-collapsible-heading')
.append('<span class="ui-collapsible-heading-status"></span>')
.wrapInner('<a href="#" class="ui-collapsible-heading-toggle"></a>')
.find('a:eq(0)')
.buttonMarkup({
shadow: !!!collapsibleParent.length,
corners:false,
iconPos: 'left',
icon: 'plus',
theme: o.theme
})
.find('.ui-icon')
.removeAttr('class')
.buttonMarkup({
shadow: true,
corners:true,
iconPos: 'notext',
icon: 'plus',
theme: o.iconTheme
});
if( !collapsibleParent.length ){
collapsibleHeading
.find('a:eq(0)')
.addClass('ui-corner-all')
.find('.ui-btn-inner')
.addClass('ui-corner-all');
}
else {
if( collapsibleContain.data('collapsible-last') ){
collapsibleHeading
.find('a:eq(0), .ui-btn-inner')
.addClass('ui-corner-bottom');
}
}
//events
collapsibleContain
.bind('collapse', function(event){
if( !event.isDefaultPrevented() ){
event.preventDefault();
collapsibleHeading
.addClass('ui-collapsible-heading-collapsed')
.find('.ui-collapsible-heading-status').text(o.expandCueText);
collapsibleHeading.find('.ui-icon').removeClass('ui-icon-minus').addClass('ui-icon-plus');
collapsibleContent.addClass('ui-collapsible-content-collapsed').attr('aria-hidden',true);
if( collapsibleContain.data('collapsible-last') ){
collapsibleHeading
.find('a:eq(0), .ui-btn-inner')
.addClass('ui-corner-bottom');
}
}
})
.bind('expand', function(event){
if( !event.isDefaultPrevented() ){
event.preventDefault();
collapsibleHeading
.removeClass('ui-collapsible-heading-collapsed')
.find('.ui-collapsible-heading-status').text(o.collapseCueText);
collapsibleHeading.find('.ui-icon').removeClass('ui-icon-plus').addClass('ui-icon-minus');
collapsibleContent.removeClass('ui-collapsible-content-collapsed').attr('aria-hidden',false);
if( collapsibleContain.data('collapsible-last') ){
collapsibleHeading
.find('a:eq(0), .ui-btn-inner')
.removeClass('ui-corner-bottom');
}
}
})
.trigger(o.collapsed ? 'collapse' : 'expand');
//close others in a set
if( collapsibleParent.length && !collapsibleParent.data("collapsiblebound") ){
collapsibleParent
.data("collapsiblebound", true)
.bind("expand", function( event ){
$(this).find( ".ui-collapsible-contain" )
.not( $(event.target).closest( ".ui-collapsible-contain" ) )
.not( "> .ui-collapsible-contain .ui-collapsible-contain" )
.trigger( "collapse" );
})
var set = collapsibleParent.find('[data-role=collapsible]')
set.first()
.find('a:eq(0)')
.addClass('ui-corner-top')
.find('.ui-btn-inner')
.addClass('ui-corner-top');
set.last().data('collapsible-last', true)
}
collapsibleHeading.click(function(){
if( collapsibleHeading.is('.ui-collapsible-heading-collapsed') ){
collapsibleContain.trigger('expand');
}
else {
collapsibleContain.trigger('collapse');
}
return false;
});
}
});
})( jQuery );/*
* jQuery Mobile Framework: "controlgroup" plugin - corner-rounding for groups of buttons, checks, radios, etc
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.fn.controlgroup = function(options){
return $(this).each(function(){
var o = $.extend({
direction: $( this ).data( "type" ) || "vertical",
shadow: false
},options);
var groupheading = $(this).find('>legend'),
flCorners = o.direction == 'horizontal' ? ['ui-corner-left', 'ui-corner-right'] : ['ui-corner-top', 'ui-corner-bottom'],
type = $(this).find('input:eq(0)').attr('type');
//replace legend with more stylable replacement div
if( groupheading.length ){
$(this).wrapInner('<div class="ui-controlgroup-controls"></div>');
$('<div role="heading" class="ui-controlgroup-label">'+ groupheading.html() +'</div>').insertBefore( $(this).children(0) );
groupheading.remove();
}
$(this).addClass('ui-corner-all ui-controlgroup ui-controlgroup-'+o.direction);
function flipClasses(els){
els
.removeClass('ui-btn-corner-all ui-shadow')
.eq(0).addClass(flCorners[0])
.end()
.filter(':last').addClass(flCorners[1]).addClass('ui-controlgroup-last');
}
flipClasses($(this).find('.ui-btn'));
flipClasses($(this).find('.ui-btn-inner'));
if(o.shadow){
$(this).addClass('ui-shadow');
}
});
};
})(jQuery);/*
* jQuery Mobile Framework : "fieldcontain" plugin - simple class additions to make form row separators
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.fn.fieldcontain = function(options){
var o = $.extend({
theme: 'c'
},options);
return $(this).addClass('ui-field-contain ui-body ui-br');
};
})(jQuery);/*
* jQuery Mobile Framework : "listview" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.listview", $.mobile.widget, {
options: {
theme: "c",
countTheme: "c",
headerTheme: "b",
dividerTheme: "b",
splitIcon: "arrow-r",
splitTheme: "b",
inset: false
},
_create: function() {
var $list = this.element,
o = this.options;
// create listview markup
$list
.addClass( "ui-listview" )
.attr( "role", "listbox" )
if ( o.inset ) {
$list.addClass( "ui-listview-inset ui-corner-all ui-shadow" );
}
$list.delegate( ".ui-li", "focusin", function() {
$( this ).attr( "tabindex", "0" );
});
this._itemApply( $list, $list );
this.refresh( true );
//keyboard events for menu items
$list.keydown(function( e ) {
var target = $( e.target ),
li = target.closest( "li" );
// switch logic based on which key was pressed
switch ( e.keyCode ) {
// up or left arrow keys
case 38:
var prev = li.prev();
// if there's a previous option, focus it
if ( prev.length ) {
target
.blur()
.attr( "tabindex", "-1" );
prev.find( "a" ).first().focus();
}
return false;
break;
// down or right arrow keys
case 40:
var next = li.next();
// if there's a next option, focus it
if ( next.length ) {
target
.blur()
.attr( "tabindex", "-1" );
next.find( "a" ).first().focus();
}
return false;
break;
case 39:
var a = li.find( "a.ui-li-link-alt" );
if ( a.length ) {
target.blur();
a.first().focus();
}
return false;
break;
case 37:
var a = li.find( "a.ui-link-inherit" );
if ( a.length ) {
target.blur();
a.first().focus();
}
return false;
break;
// if enter or space is pressed, trigger click
case 13:
case 32:
target.trigger( "click" );
return false;
break;
}
});
// tapping the whole LI triggers click on the first link
$list.delegate( "li", "click", function(event) {
if ( !$( event.target ).closest( "a" ).length ) {
$( this ).find( "a" ).first().trigger( "click" );
return false;
}
});
},
_itemApply: function( $list, item ) {
// TODO class has to be defined in markup
item.find( ".ui-li-count" )
.addClass( "ui-btn-up-" + ($list.data( "counttheme" ) || this.options.countTheme) + " ui-btn-corner-all" );
item.find( "h1, h2, h3, h4, h5, h6" ).addClass( "ui-li-heading" );
item.find( "p, dl" ).addClass( "ui-li-desc" );
item.find( "img" ).addClass( "ui-li-thumb" ).each(function() {
$( this ).closest( "li" )
.addClass( $(this).is( ".ui-li-icon" ) ? "ui-li-has-icon" : "ui-li-has-thumb" );
});
var aside = item.find( ".ui-li-aside" );
if ( aside.length ) {
aside.each(function(i, el) {
$(el).prependTo( $(el).parent() ); //shift aside to front for css float
});
}
if ( $.support.cssPseudoElement || !$.nodeName( item[0], "ol" ) ) {
return;
}
},
refresh: function( create ) {
this._createSubPages();
var o = this.options,
$list = this.element,
self = this,
dividertheme = $list.data( "dividertheme" ) || o.dividerTheme,
li = $list.children( "li" ),
counter = $.support.cssPseudoElement || !$.nodeName( $list[0], "ol" ) ? 0 : 1;
if ( counter ) {
$list.find( ".ui-li-dec" ).remove();
}
li.attr({ "role": "option", "tabindex": "-1" });
li.first().attr( "tabindex", "0" );
li.each(function( pos ) {
var item = $( this ),
itemClass = "ui-li";
// If we're creating the element, we update it regardless
if ( !create && item.hasClass( "ui-li" ) ) {
return;
}
var a = item.find( "a" );
if ( a.length ) {
item
.buttonMarkup({
wrapperEls: "div",
shadow: false,
corners: false,
iconpos: "right",
icon: a.length > 1 ? false : item.data("icon") || "arrow-r",
theme: o.theme
});
a.first().addClass( "ui-link-inherit" );
if ( a.length > 1 ) {
itemClass += " ui-li-has-alt";
var last = a.last(),
splittheme = $list.data( "splittheme" ) || last.data( "theme" ) || o.splitTheme;
last
.attr( "title", last.text() )
.addClass( "ui-li-link-alt" )
.empty()
.buttonMarkup({
shadow: false,
corners: false,
theme: o.theme,
icon: false,
iconpos: false
})
.find( ".ui-btn-inner" )
.append( $( "<span>" ).buttonMarkup({
shadow: true,
corners: true,
theme: splittheme,
iconpos: "notext",
icon: $list.data( "spliticon" ) || last.data( "icon" ) || o.splitIcon
} ) );
}
} else if ( item.data( "role" ) === "list-divider" ) {
itemClass += " ui-li-divider ui-btn ui-bar-" + dividertheme;
item.attr( "role", "heading" );
//reset counter when a divider heading is encountered
if ( counter ) {
counter = 1;
}
} else {
itemClass += " ui-li-static ui-btn-up-" + o.theme;
}
if ( pos === 0 ) {
if ( o.inset ) {
itemClass += " ui-corner-top";
item
.add( item.find( ".ui-btn-inner" ) )
.find( ".ui-li-link-alt" )
.addClass( "ui-corner-tr" )
.end()
.find( ".ui-li-thumb" )
.addClass( "ui-corner-tl" );
}
} else if ( pos === li.length - 1 ) {
if ( o.inset ) {
itemClass += " ui-corner-bottom";
item
.add( item.find( ".ui-btn-inner" ) )
.find( ".ui-li-link-alt" )
.addClass( "ui-corner-br" )
.end()
.find( ".ui-li-thumb" )
.addClass( "ui-corner-bl" );
}
}
if ( counter && itemClass.indexOf( "ui-li-divider" ) < 0 ) {
item
.find( ".ui-link-inherit" ).first()
.addClass( "ui-li-jsnumbering" )
.prepend( "<span class='ui-li-dec'>" + (counter++) + ". </span>" );
}
item.addClass( itemClass );
if ( !create ) {
self._itemApply( $list, item );
}
});
},
_createSubPages: function() {
var parentList = this.element,
parentPage = parentList.closest( ".ui-page" ),
parentId = parentPage.attr( "id" ),
o = this.options,
persistentFooterID = parentPage.find( "[data-role='footer']" ).data( "id" );
$( parentList.find( "ul, ol" ).toArray().reverse() ).each(function( i ) {
var list = $( this ),
parent = list.parent(),
title = parent.contents()[ 0 ].nodeValue.split("\n")[0],
id = parentId + "&" + $.mobile.subPageUrlKey + "=" + $.mobile.idStringEscape(title + " " + i),
theme = list.data( "theme" ) || o.theme,
countTheme = list.data( "counttheme" ) || parentList.data( "counttheme" ) || o.countTheme,
newPage = list.wrap( "<div data-role='page'><div data-role='content'></div></div>" )
.parent()
.before( "<div data-role='header' data-theme='" + o.headerTheme + "'><div class='ui-title'>" + title + "</div></div>" )
.after( persistentFooterID ? $( "<div>", { "data-role": "footer", "data-id": persistentFooterID, "class": "ui-footer-duplicate" } ) : "" )
.parent()
.attr({
id: id,
"data-theme": theme,
"data-count-theme": countTheme
})
.appendTo( $.pageContainer );
newPage.page();
parent.html( "<a href='#" + id + "'>" + title + "</a>" );
}).listview();
}
});
})( jQuery );
/*
* jQuery Mobile Framework : "listview" filter extension
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.mobile.listview.prototype.options.filter = false;
$( "[data-role='listview']" ).live( "listviewcreate", function() {
var list = $( this ),
listview = list.data( "listview" );
if ( !listview.options.filter ) {
return;
}
var wrapper = $( "<form>", { "class": "ui-listview-filter ui-bar-c", "role": "search" } ),
search = $( "<input>", {
placeholder: "Filter results...",
"data-type": "search"
})
.bind( "keyup change", function() {
var val = this.value.toLowerCase();;
list.children().show();
if ( val ) {
list.children().filter(function() {
return $( this ).text().toLowerCase().indexOf( val ) === -1;
}).hide();
}
//listview._numberItems();
})
.appendTo( wrapper )
.textinput();
wrapper.insertBefore( list );
});
})( jQuery );
/*
* jQuery Mobile Framework : "dialog" plugin.
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.dialog", $.mobile.widget, {
options: {},
_create: function(){
var self = this,
$el = self.element,
$prevPage = $.mobile.activePage,
$closeBtn = $('<a href="#" data-icon="delete" data-iconpos="notext">Close</a>');
$el.delegate("a, submit", "click submit", function(e){
if( e.type == "click" && ( $(e.target).closest('[data-back]') || $(e.target).closest($closeBtn) ) ){
self.close();
return false;
}
//otherwise, assume we're headed somewhere new. set activepage to dialog so the transition will work
$.mobile.activePage = this.element;
});
this.element
.bind("pageshow",function(){
return false;
})
//add ARIA role
.attr("role","dialog")
.addClass('ui-page ui-dialog ui-body-a')
.find('[data-role=header]')
.addClass('ui-corner-top ui-overlay-shadow')
.prepend( $closeBtn )
.end()
.find('.ui-content:not([class*="ui-body-"])')
.addClass('ui-body-c')
.end()
.find('.ui-content,[data-role=footer]')
.last()
.addClass('ui-corner-bottom ui-overlay-shadow');
$(window).bind('hashchange',function(){
if( $el.is('.ui-page-active') ){
self.close();
$el.bind('pagehide',function(){
$.mobile.updateHash( $prevPage.attr('id'), true);
});
}
});
},
close: function(){
$.mobile.changePage([this.element, $.mobile.activePage], undefined, true, true );
}
});
})( jQuery );/*
* jQuery Mobile Framework : "navbar" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.widget( "mobile.navbar", $.mobile.widget, {
options: {
iconpos: 'top',
grid: null
},
_create: function(){
var $navbar = this.element,
$navbtns = $navbar.find("a"),
iconpos = $navbtns.filter('[data-icon]').length ? this.options.iconpos : undefined;
$navbar
.addClass('ui-navbar')
.attr("role","navigation")
.find("ul")
.grid({grid: this.options.grid });
if( !iconpos ){
$navbar.addClass("ui-navbar-noicons");
}
$navbtns
.buttonMarkup({
corners: false,
shadow: false,
iconpos: iconpos
});
$navbar.delegate("a", "click",function(event){
$navbtns.removeClass("ui-btn-active");
});
}
});
})( jQuery );/*
* jQuery Mobile Framework : plugin for creating CSS grids
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($, undefined ) {
$.fn.grid = function(options){
return $(this).each(function(){
var o = $.extend({
grid: null
},options);
var $kids = $(this).children(),
gridCols = {a: 2, b:3, c:4, d:5},
grid = o.grid,
iterator;
if( !grid ){
if( $kids.length <= 5 ){
for(var letter in gridCols){
if(gridCols[letter] == $kids.length){ grid = letter; }
}
}
else{
grid = 'a';
}
}
iterator = gridCols[grid];
$(this).addClass('ui-grid-' + grid);
$kids.filter(':nth-child(' + iterator + 'n+1)').addClass('ui-block-a');
$kids.filter(':nth-child(' + iterator + 'n+2)').addClass('ui-block-b');
if(iterator > 2){
$kids.filter(':nth-child(3n+3)').addClass('ui-block-c');
}
if(iterator> 3){
$kids.filter(':nth-child(4n+4)').addClass('ui-block-d');
}
if(iterator > 4){
$kids.filter(':nth-child(5n+5)').addClass('ui-block-e');
}
});
};
})(jQuery);
/*!
* jQuery Mobile v@VERSION
* http://jquerymobile.com/
*
* Copyright 2010, jQuery Project
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*/
(function( $, window, undefined ) {
//jQuery.mobile configurable options
$.extend( $.mobile, {
//define the url parameter used for referencing widget-generated sub-pages.
//Translates to to example.html&ui-page=subpageIdentifier
//hash segment before &ui-page= is used to make Ajax request
subPageUrlKey: 'ui-page',
//anchor links with a data-rel, or pages with a data-role, that match these selectors will be untrackable in history
//(no change in URL, not bookmarkable)
nonHistorySelectors: 'dialog',
//class assigned to page currently in view, and during transitions
activePageClass: 'ui-page-active',
//class used for "active" button state, from CSS framework
activeBtnClass: 'ui-btn-active',
//automatically handle link clicks through Ajax, when possible
ajaxLinksEnabled: true,
//automatically handle form submissions through Ajax, when possible
ajaxFormsEnabled: true,
//available CSS transitions
transitions: ['slide', 'slideup', 'slidedown', 'pop', 'flip', 'fade'],
//set default transition - 'none' for no transitions
defaultTransition: 'slide',
//show loading message during Ajax requests
//if false, message will not appear, but loading classes will still be toggled on html el
loadingMessage: "loading",
//configure meta viewport tag's content attr:
metaViewportContent: "width=device-width, minimum-scale=1, maximum-scale=1",
//support conditions that must be met in order to proceed
gradeA: function(){
return $.support.mediaquery;
}
});
//trigger mobileinit event - useful hook for configuring $.mobile settings before they're used
$( window.document ).trigger('mobileinit');
//if device support condition(s) aren't met, leave things as they are -> a basic, usable experience,
//otherwise, proceed with the enhancements
if ( !$.mobile.gradeA() ) {
return;
}
//define vars for interal use
var $window = $(window),
$html = $('html'),
$head = $('head'),
//to be populated at DOM ready
$body,
//loading div which appears during Ajax requests
//will not appear if $.mobile.loadingMessage is false
$loader = $.mobile.loadingMessage ?
$('<div class="ui-loader ui-body-a ui-corner-all">'+
'<span class="ui-icon ui-icon-loading spin"></span>'+
'<h1>'+ $.mobile.loadingMessage +'</h1>'+
'</div>')
: undefined,
//define meta viewport tag, if content is defined
$metaViewport = $.mobile.metaViewportContent ? $("<meta>", { name: "viewport", content: $.mobile.metaViewportContent}).prependTo( $head ) : undefined,
//define baseUrl for use in relative url management
baseUrl = getPathDir( location.protocol + '//' + location.host + location.pathname ),
//define base element, for use in routing asset urls that are referenced in Ajax-requested markup
$base = $.support.dynamicBaseTag ? $("<base>", { href: baseUrl }).prependTo( $head ) : undefined,
//will be defined as first page element in DOM
$startPage,
//will be defined as $startPage.parent(), which is usually the body element
//will receive ui-mobile-viewport class
$pageContainer,
//will be defined when a link is clicked and given an active class
$activeClickedLink = null,
//array of pages that are visited during a single page load
//length will grow as pages are visited, and shrink as "back" link/button is clicked
//each item has a url (string matches ID), and transition (saved for reuse when "back" link/button is clicked)
urlStack = [ {
url: location.hash.replace( /^#/, "" ),
transition: undefined
} ],
//define first selector to receive focus when a page is shown
focusable = "[tabindex],a,button:visible,select:visible,input",
//contains role for next page, if defined on clicked link via data-rel
nextPageRole = null,
//enable/disable hashchange event listener
//toggled internally when location.hash is updated to match the url of a successful page load
hashListener = true,
//media-query-like width breakpoints, which are translated to classes on the html element
resolutionBreakpoints = [320,480,768,1024];
//add mobile, initial load "rendering" classes to docEl
$html.addClass('ui-mobile ui-mobile-rendering');
// TODO: don't expose (temporary during code reorg)
$.mobile.urlStack = urlStack;
//consistent string escaping for urls and IDs
function idStringEscape(str){
return str.replace(/[^a-zA-Z0-9]/g, '-');
}
$.mobile.idStringEscape = idStringEscape;
// hide address bar
function silentScroll( ypos ) {
// prevent scrollstart and scrollstop events
$.event.special.scrollstart.enabled = false;
setTimeout(function() {
window.scrollTo( 0, ypos || 0 );
},20);
setTimeout(function() {
$.event.special.scrollstart.enabled = true;
}, 150 );
}
function getPathDir( path ){
var newPath = path.replace(/#/,'').split('/');
newPath.pop();
return newPath.join('/') + (newPath.length ? '/' : '');
}
function getBaseURL( nonHashPath ){
return getPathDir( nonHashPath || location.hash );
}
var setBaseURL = !$.support.dynamicBaseTag ? $.noop : function( nonHashPath ){
//set base url for new page assets
$base.attr('href', baseUrl + getBaseURL( nonHashPath ));
}
var resetBaseURL = !$.support.dynamicBaseTag ? $.noop : function(){
$base.attr('href', baseUrl);
}
//set base href to pathname
resetBaseURL();
//for form submission
$('form').live('submit', function(event){
if( !$.mobile.ajaxFormsEnabled ){ return; }
var type = $(this).attr("method"),
url = $(this).attr( "action" ).replace( location.protocol + "//" + location.host, "");
//external submits use regular HTTP
if( /^(:?\w+:)/.test( url ) ){
return;
}
//if it's a relative href, prefix href with base url
if( url.indexOf('/') && url.indexOf('#') !== 0 ){
url = getBaseURL() + url;
}
changePage({
url: url,
type: type,
data: $(this).serialize()
},
undefined,
undefined,
true
);
event.preventDefault();
});
//click routing - direct to HTTP or Ajax, accordingly
$( "a" ).live( "click", function(event) {
var $this = $(this),
//get href, remove same-domain protocol and host
href = $this.attr( "href" ).replace( location.protocol + "//" + location.host, ""),
//if target attr is specified, it's external, and we mimic _blank... for now
target = $this.is( "[target]" ),
//if it still starts with a protocol, it's external, or could be :mailto, etc
external = target || /^(:?\w+:)/.test( href ) || $this.is( "[rel=external]" ),
target = $this.is( "[target]" );
if( href === '#' ){
//for links created purely for interaction - ignore
return false;
}
$activeClickedLink = $this.closest( ".ui-btn" ).addClass( $.mobile.activeBtnClass );
if( external || !$.mobile.ajaxLinksEnabled ){
//remove active link class if external
removeActiveLinkClass(true);
//deliberately redirect, in case click was triggered
if( target ){
window.open(href);
}
else{
location.href = href;
}
}
else {
//use ajax
var transition = $this.data( "transition" ),
back = $this.data( "back" ),
changeHashOnSuccess = !$this.is( "[data-rel="+ $.mobile.nonHistorySelectors +"]" );
nextPageRole = $this.attr( "data-rel" );
//if it's a relative href, prefix href with base url
if( href.indexOf('/') && href.indexOf('#') !== 0 ){
href = getBaseURL() + href;
}
href.replace(/^#/,'');
changePage(href, transition, back, changeHashOnSuccess);
}
event.preventDefault();
});
// turn on/off page loading message.
function pageLoading( done ) {
if ( done ) {
$html.removeClass( "ui-loading" );
} else {
if( $.mobile.loadingMessage ){
$loader.appendTo($pageContainer).css({top: $(window).scrollTop() + 75});
}
$html.addClass( "ui-loading" );
}
};
//for directing focus to the page title, or otherwise first focusable element
function reFocus(page){
var pageTitle = page.find( ".ui-title:eq(0)" );
if( pageTitle.length ){
pageTitle.focus();
}
else{
page.find( focusable ).eq(0).focus();
}
}
//function for setting role of next page
function setPageRole( newPage ) {
if ( nextPageRole ) {
newPage.attr( "data-role", nextPageRole );
nextPageRole = undefined;
}
}
//update hash, with or without triggering hashchange event
$.mobile.updateHash = function(url, disableListening){
if(disableListening) { hashListener = false; }
location.hash = url;
}
//wrap page and transfer data-attrs if it has an ID
function wrapNewPage( newPage ){
var copyAttrs = ['data-role', 'data-theme', 'data-fullscreen'], //TODO: more page-level attrs?
wrapper = newPage.wrap( "<div>" ).parent();
$.each(copyAttrs,function(i){
if( newPage.attr( copyAttrs[ i ] ) ){
wrapper.attr( copyAttrs[ i ], newPage.attr( copyAttrs[ i ] ) );
newPage.removeAttr( copyAttrs[ i ] );
}
});
return wrapper;
}
//remove active classes after page transition or error
function removeActiveLinkClass(forceRemoval){
if( !!$activeClickedLink && (!$activeClickedLink.closest( '.ui-page-active' ).length || forceRemoval )){
$activeClickedLink.removeClass( $.mobile.activeBtnClass );
}
$activeClickedLink = null;
}
//for getting or creating a new page
function changePage( to, transition, back, changeHash){
//from is always the currently viewed page
var toIsArray = $.type(to) === "array",
from = toIsArray ? to[0] : $.mobile.activePage,
to = toIsArray ? to[1] : to,
url = fileUrl = $.type(to) === "string" ? to.replace( /^#/, "" ) : null,
data = undefined,
type = 'get',
isFormRequest = false,
duplicateCachedPage = null,
back = (back !== undefined) ? back : ( urlStack.length > 1 && urlStack[ urlStack.length - 2 ].url === url ),
transition = (transition !== undefined) ? transition : $.mobile.defaultTransition;
if( $.type(to) === "object" && to.url ){
url = to.url,
data = to.data,
type = to.type,
isFormRequest = true;
//make get requests bookmarkable
if( data && type == 'get' ){
url += "?" + data;
data = undefined;
}
}
//reset base to pathname for new request
resetBaseURL();
// if the new href is the same as the previous one
if ( back ) {
transition = urlStack.pop().transition;
} else {
urlStack.push({ url: url, transition: transition });
}
//function for transitioning between two existing pages
function transitionPages() {
//kill the keyboard
$( window.document.activeElement ).blur();
//get current scroll distance
var currScroll = $window.scrollTop();
//set as data for returning to that spot
from.data('lastScroll', currScroll);
//trigger before show/hide events
from.data("page")._trigger("beforehide", {nextPage: to});
to.data("page")._trigger("beforeshow", {prevPage: from});
function loadComplete(){
pageLoading( true );
//trigger show/hide events, allow preventing focus change through return false
if( from.data("page")._trigger("hide", null, {nextPage: to}) !== false && to.data("page")._trigger("show", null, {prevPage: from}) !== false ){
$.mobile.activePage = to;
}
reFocus( to );
if( changeHash && url ){
$.mobile.updateHash(url, true);
}
removeActiveLinkClass();
//if there's a duplicateCachedPage, remove it from the DOM now that it's hidden
if( duplicateCachedPage != null ){
duplicateCachedPage.remove();
}
//jump to top or prev scroll, if set
silentScroll( to.data( 'lastScroll' ) );
}
if(transition && (transition !== 'none')){
$pageContainer.addClass('ui-mobile-viewport-transitioning');
// animate in / out
from.addClass( transition + " out " + ( back ? "reverse" : "" ) );
to.addClass( $.mobile.activePageClass + " " + transition +
" in " + ( back ? "reverse" : "" ) );
// callback - remove classes, etc
to.animationComplete(function() {
from.add( to ).removeClass(" out in reverse " + $.mobile.transitions.join(' ') );
from.removeClass( $.mobile.activePageClass );
loadComplete();
$pageContainer.removeClass('ui-mobile-viewport-transitioning');
});
}
else{
from.removeClass( $.mobile.activePageClass );
to.addClass( $.mobile.activePageClass );
loadComplete();
}
};
//shared page enhancements
function enhancePage(){
setPageRole( to );
to.page();
}
//get the actual file in a jq-mobile nested url
function getFileURL( url ){
return url.match( '&' + $.mobile.subPageUrlKey ) ? url.split( '&' + $.mobile.subPageUrlKey )[0] : url;
}
//if url is a string
if( url ){
to = $( "[id='" + url + "']" ),
fileUrl = getFileURL(url);
}
else{ //find base url of element, if avail
var toID = to.attr('id'),
toIDfileurl = getFileURL(toID);
if(toID != toIDfileurl){
fileUrl = toIDfileurl;
}
}
// find the "to" page, either locally existing in the dom or by creating it through ajax
if ( to.length && !isFormRequest ) {
if( fileUrl ){
setBaseURL(fileUrl);
}
enhancePage();
transitionPages();
} else {
//if to exists in DOM, save a reference to it in duplicateCachedPage for removal after page change
if( to.length ){
duplicateCachedPage = to;
}
pageLoading();
$.ajax({
url: fileUrl,
type: type,
data: data,
success: function( html ) {
setBaseURL(fileUrl);
var all = $("<div></div>");
//workaround to allow scripts to execute when included in page divs
all.get(0).innerHTML = html;
to = all.find('[data-role="page"]');
//rewrite src and href attrs to use a base url
if( !$.support.dynamicBaseTag ){
var baseUrl = getBaseURL(fileUrl);
to.find('[src],link[href]').each(function(){
var thisAttr = $(this).is('[href]') ? 'href' : 'src',
thisUrl = $(this).attr(thisAttr);
//if full path exists and is same, chop it - helps IE out
thisUrl.replace( location.protocol + '//' + location.host + location.pathname, '' );
if( !/^(\w+:|#|\/)/.test(thisUrl) ){
$(this).attr(thisAttr, baseUrl + thisUrl);
}
});
}
//preserve ID on a retrieved page
if ( to.attr('id') ) {
to = wrapNewPage( to );
}
to
.attr( "id", fileUrl )
.appendTo( $pageContainer );
enhancePage();
transitionPages();
},
error: function() {
pageLoading( true );
removeActiveLinkClass(true);
$("<div class='ui-loader ui-overlay-shadow ui-body-e ui-corner-all'><h1>Error Loading Page</h1></div>")
.css({ "display": "block", "opacity": 0.96, "top": $(window).scrollTop() + 100 })
.appendTo( $pageContainer )
.delay( 800 )
.fadeOut( 400, function(){
$(this).remove();
});
}
});
}
};
$(function() {
$body = $( "body" );
pageLoading();
// needs to be bound at domready (for IE6)
// find or load content, make it active
$window.bind( "hashchange", function(e, triggered) {
if( !hashListener ){
hashListener = true;
return;
}
if( $(".ui-page-active").is("[data-role=" + $.mobile.nonHistorySelectors + "]") ){
return;
}
var to = location.hash,
transition = triggered ? false : undefined;
// either we've backed up to the root page url
// or it's the first page load with no hash present
//there's a hash and it wasn't manually triggered
// > probably a new page, "back" will be figured out by changePage
if ( to ){
changePage( to, transition);
}
//there's no hash, the active page is not the start page, and it's not manually triggered hashchange
// > probably backed out to the first page visited
else if( $.mobile.activePage.length && $startPage[0] !== $.mobile.activePage[0] && !triggered ) {
changePage( $startPage, transition, true );
}
else{
$startPage.trigger("pagebeforeshow", {prevPage: $('')});
$startPage.addClass( $.mobile.activePageClass );
pageLoading( true );
if( $startPage.trigger("pageshow", {prevPage: $('')}) !== false ){
reFocus($startPage);
}
}
});
});
//add orientation class on flip/resize.
$window.bind( "orientationchange.htmlclass", function( event ) {
$html.removeClass( "portrait landscape" ).addClass( event.orientation );
});
//add breakpoint classes for faux media-q support
function detectResolutionBreakpoints(){
var currWidth = $window.width(),
minPrefix = "min-width-",
maxPrefix = "max-width-",
minBreakpoints = [],
maxBreakpoints = [],
unit = "px",
breakpointClasses;
$html.removeClass( minPrefix + resolutionBreakpoints.join(unit + " " + minPrefix) + unit + " " +
maxPrefix + resolutionBreakpoints.join( unit + " " + maxPrefix) + unit );
$.each(resolutionBreakpoints,function( i ){
if( currWidth >= resolutionBreakpoints[ i ] ){
minBreakpoints.push( minPrefix + resolutionBreakpoints[ i ] + unit );
}
if( currWidth <= resolutionBreakpoints[ i ] ){
maxBreakpoints.push( maxPrefix + resolutionBreakpoints[ i ] + unit );
}
});
if( minBreakpoints.length ){ breakpointClasses = minBreakpoints.join(" "); }
if( maxBreakpoints.length ){ breakpointClasses += " " + maxBreakpoints.join(" "); }
$html.addClass( breakpointClasses );
};
//add breakpoints now and on oc/resize events
$window.bind( "orientationchange resize", detectResolutionBreakpoints);
detectResolutionBreakpoints();
//common breakpoints, overrideable, changeable
$.mobile.addResolutionBreakpoints = function( newbps ){
if( $.type( newbps ) === "array" ){
resolutionBreakpoints = resolutionBreakpoints.concat( newbps );
}
else {
resolutionBreakpoints.push( newbps );
}
detectResolutionBreakpoints();
}
//animation complete callback
//TODO - update support test and create special event for transitions
//check out transitionEnd (opera per Paul's request)
$.fn.animationComplete = function(callback){
if($.support.cssTransitions){
return $(this).one('webkitAnimationEnd', callback);
}
else{
callback();
}
};
//TODO - add to jQuery.mobile, not $
$.extend($.mobile, {
pageLoading: pageLoading,
changePage: changePage,
silentScroll: silentScroll
});
//dom-ready
$(function(){
var $pages = $("[data-role='page']");
//set up active page
$startPage = $.mobile.activePage = $pages.first();
//set page container
$pageContainer = $startPage.parent().addClass('ui-mobile-viewport');
$.extend({
pageContainer: $pageContainer
});
//initialize all pages present
$pages.page();
//trigger a new hashchange, hash or not
$window.trigger( "hashchange", [ true ] );
//update orientation
$window.trigger( "orientationchange.htmlclass" );
//remove rendering class
$html.removeClass('ui-mobile-rendering');
});
$window.load(silentScroll);
})( jQuery, this ); | PypiClean |
/Firefly%20III%20API%20Python%20Client-1.5.6.post2.tar.gz/Firefly III API Python Client-1.5.6.post2/firefly_iii_client/api/piggy_banks_api.py | import re # noqa: F401
import sys # noqa: F401
from firefly_iii_client.api_client import ApiClient, Endpoint as _Endpoint
from firefly_iii_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from firefly_iii_client.model.attachment_array import AttachmentArray
from firefly_iii_client.model.piggy_bank_array import PiggyBankArray
from firefly_iii_client.model.piggy_bank_event_array import PiggyBankEventArray
from firefly_iii_client.model.piggy_bank_single import PiggyBankSingle
from firefly_iii_client.model.piggy_bank_store import PiggyBankStore
from firefly_iii_client.model.piggy_bank_update import PiggyBankUpdate
from firefly_iii_client.model.validation_error import ValidationError
class PiggyBanksApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.delete_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks/{id}',
'operation_id': 'delete_piggy_bank',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client
)
self.get_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (PiggyBankSingle,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks/{id}',
'operation_id': 'get_piggy_bank',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json'
],
'content_type': [],
},
api_client=api_client
)
self.list_attachment_by_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (AttachmentArray,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks/{id}/attachments',
'operation_id': 'list_attachment_by_piggy_bank',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
'page',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'page':
(int,),
},
'attribute_map': {
'id': 'id',
'page': 'page',
},
'location_map': {
'id': 'path',
'page': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json'
],
'content_type': [],
},
api_client=api_client
)
self.list_event_by_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (PiggyBankEventArray,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks/{id}/events',
'operation_id': 'list_event_by_piggy_bank',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
'page',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'page':
(int,),
},
'attribute_map': {
'id': 'id',
'page': 'page',
},
'location_map': {
'id': 'path',
'page': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json'
],
'content_type': [],
},
api_client=api_client
)
self.list_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (PiggyBankArray,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks',
'operation_id': 'list_piggy_bank',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'page',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'page':
(int,),
},
'attribute_map': {
'page': 'page',
},
'location_map': {
'page': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json'
],
'content_type': [],
},
api_client=api_client
)
self.store_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (PiggyBankSingle,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks',
'operation_id': 'store_piggy_bank',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'piggy_bank_store',
],
'required': [
'piggy_bank_store',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'piggy_bank_store':
(PiggyBankStore,),
},
'attribute_map': {
},
'location_map': {
'piggy_bank_store': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json',
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded'
]
},
api_client=api_client
)
self.update_piggy_bank_endpoint = _Endpoint(
settings={
'response_type': (PiggyBankSingle,),
'auth': [
'firefly_iii_auth'
],
'endpoint_path': '/api/v1/piggy_banks/{id}',
'operation_id': 'update_piggy_bank',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'piggy_bank_update',
],
'required': [
'id',
'piggy_bank_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'piggy_bank_update':
(PiggyBankUpdate,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'piggy_bank_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json',
'application/json'
],
'content_type': [
'application/json',
'application/x-www-form-urlencoded'
]
},
api_client=api_client
)
def delete_piggy_bank(
self,
id,
**kwargs
):
"""Delete a piggy bank. # noqa: E501
Delete a piggy bank. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_piggy_bank(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The ID of the piggy bank.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.delete_piggy_bank_endpoint.call_with_http_info(**kwargs)
def get_piggy_bank(
self,
id,
**kwargs
):
"""Get a single piggy bank. # noqa: E501
Get a single piggy bank. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_piggy_bank(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The ID of the piggy bank.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PiggyBankSingle
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.get_piggy_bank_endpoint.call_with_http_info(**kwargs)
def list_attachment_by_piggy_bank(
self,
id,
**kwargs
):
"""Lists all attachments. # noqa: E501
Lists all attachments. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_attachment_by_piggy_bank(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The ID of the piggy bank.
Keyword Args:
page (int): Page number. The default pagination is 50.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
AttachmentArray
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.list_attachment_by_piggy_bank_endpoint.call_with_http_info(**kwargs)
def list_event_by_piggy_bank(
self,
id,
**kwargs
):
"""List all events linked to a piggy bank. # noqa: E501
List all events linked to a piggy bank (adding and removing money). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_event_by_piggy_bank(id, async_req=True)
>>> result = thread.get()
Args:
id (str): The ID of the piggy bank
Keyword Args:
page (int): Page number. The default pagination is 50.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PiggyBankEventArray
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.list_event_by_piggy_bank_endpoint.call_with_http_info(**kwargs)
def list_piggy_bank(
self,
**kwargs
):
"""List all piggy banks. # noqa: E501
List all piggy banks. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_piggy_bank(async_req=True)
>>> result = thread.get()
Keyword Args:
page (int): Page number. The default pagination is 50.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PiggyBankArray
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.list_piggy_bank_endpoint.call_with_http_info(**kwargs)
def store_piggy_bank(
self,
piggy_bank_store,
**kwargs
):
"""Store a new piggy bank # noqa: E501
Creates a new piggy bank. The data required can be submitted as a JSON body or as a list of parameters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.store_piggy_bank(piggy_bank_store, async_req=True)
>>> result = thread.get()
Args:
piggy_bank_store (PiggyBankStore): JSON array or key=value pairs with the necessary piggy bank information. See the model for the exact specifications.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PiggyBankSingle
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['piggy_bank_store'] = \
piggy_bank_store
return self.store_piggy_bank_endpoint.call_with_http_info(**kwargs)
def update_piggy_bank(
self,
id,
piggy_bank_update,
**kwargs
):
"""Update existing piggy bank. # noqa: E501
Update existing piggy bank. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_piggy_bank(id, piggy_bank_update, async_req=True)
>>> result = thread.get()
Args:
id (str): The ID of the piggy bank
piggy_bank_update (PiggyBankUpdate): JSON array with updated piggy bank information. See the model for the exact specifications.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
PiggyBankSingle
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['piggy_bank_update'] = \
piggy_bank_update
return self.update_piggy_bank_endpoint.call_with_http_info(**kwargs) | PypiClean |
/KPay_sdk-1.0.1-py3-none-any.whl/api/payment.py | import json
import math
from datetime import datetime
from typing import TypeVar
import requests
from exception.CustomException import CustomException
from model.CancelPayment import *
from model.CheckPayment import *
from model.CreatePayment import *
from service.security import Security
T = TypeVar('T')
class Payment:
__baseUrl = "https://api-umeecore-dev.hcm.unicloud.ai/umee-pay"
__endpointCreate = "/api/payment/v1/create"
__endpointCheck = "/api/payment/v1/check"
__endpointCancel = "/api/payment/v1/cancel"
__clientId: str
__secretKey: str
__encryptKey: str
__security: Security = Security()
def __init__(self, clientId: str, encryptKey: str, secretKey: str):
self.__clientId = clientId
self.__secretKey = secretKey
self.__encryptKey = encryptKey
def __excute(self, url: str, data: T) -> str:
dt = datetime.now()
timestamp = math.floor(datetime.timestamp(dt) * 1000)
print(data.__str__())
dataJson = json.dumps(data.__str__())
payload = self.__security.aesEcrypt(dataJson, self.__encryptKey).decode('UTF-8')
apiValidate = self.__security.genarateSign(payload, self.__clientId, timestamp, self.__secretKey)
requestData = {
"data": payload
}
response = requests.post(url, json.dumps(requestData), headers={
'x-api-client': self.__clientId,
'x-api-validate': apiValidate,
'x-api-time': str(timestamp),
'Content-Type': 'application/json',
})
responseData = response.content.decode('UTF-8')
clientRes = str(response.headers['x-api-client'])
dataValidateRes = str(response.headers['x-api-validate'])
timestampRes = int(response.headers['x-api-time'])
responseData = json.loads(responseData)
dataDescrypt = responseData['data']
if self.__clientId != clientRes:
raise CustomException(1, "Invalid Client")
if responseData['data'] == None:
raise CustomException(responseData['code'], responseData['message'])
if self.__security.verifySign(responseData['data'], dataValidateRes, clientRes, timestampRes, self.__secretKey) == False:
raise CustomException(2, "Invalid Security")
dateDecrypt = self.__security.aesDecrypt(dataDescrypt, self.__encryptKey)
return dateDecrypt
def create_payment(self, data: CreatePaymentRequest) -> CreatePaymentResponse:
url = self.__baseUrl + self.__endpointCreate
response: str = self.__excute(url,data)
return CreatePaymentResponse(response)
def check_payment(self, data: CheckPaymentRequest) -> CheckPaymentResponse:
url = self.__baseUrl + self.__endpointCheck
resposne: str = self.__excute(url, data)
return CheckPaymentResponse(resposne)
def cancel_payment(self, data: CancelPaymentRequest) -> CancelPaymentResponse:
url = self.__baseUrl + self.__endpointCancel
resposne: str = self.__excute(url, data)
return CancelPaymentResponse(resposne) | PypiClean |
/NativDebugging-35.tar.gz/NativDebugging-35/src/RecursiveFind.py | from __future__ import print_function
from builtins import range
from abc import ABCMeta
from .Interfaces import MemReaderInterface, ReadError
from .Utilities import makeQwordsList, makeDwordsList, integer_types
class RecursiveFind( MemReaderInterface ):
""" Search for offsets using a recurisve method """
__metaclass__ = ABCMeta
def printRecursiveFindResult( self, result ):
'''
Description : prints a result returned from a binary search in a human friendly way
Args:
result - list/tuple which holds the result to be displayed (starting address, offsets to data, name ??)
Return Value : None, just prints the string
'''
print(('{0:s}\t{1:s}\t"{2:s}"'.format(hex(result[0]), ''.join(['{0:s}, '.format(hex(x)) for x in result[1]]), str(result[2]))))
def _recursiveFind(self, targetValidator, startAddress, searchLength, pointerSize, targetReader, hops, alignment, limiter, path):
if startAddress % alignment != 0:
raise Exception("Not aligned")
if isinstance(searchLength, list):
nextSearchLength = searchLength[1:]
currentSearchLenght = searchLength[0]
elif hasattr(searchLength, '__call__'):
nextSearchLength = searchLength
currentSearchLenght = searchLength(startAddress)
else:
nextSearchLength = searchLength
currentSearchLenght = searchLength
try:
for offset in range(currentSearchLenght):
addr = startAddress + offset
if 0 == (addr % alignment):
data = targetReader(addr)
if None != limiter and limiter(data):
return
if targetValidator(data):
yield (addr, path + [offset], data)
if 0 == (addr % pointerSize):
pointer = self.readAddr(addr)
if hops > 0 and (0 == (pointer % alignment)) and self.isAddressValid(pointer):
for result in self._recursiveFind(targetValidator, pointer, nextSearchLength, pointerSize, targetReader, hops-1, alignment, limiter, path + [offset]):
yield result
except ReadError as e:
pass
@staticmethod
def isInListChecker(target):
def _isInListChecker(x):
return x in target
return _isInListChecker
@staticmethod
def isInRangeChecker(target):
def _isInRangeChecker(x):
return ((x >= target[0]) and (x < target[1]))
return _isInRangeChecker
@staticmethod
def isEqChecker(target):
def _isEqChecker(x):
return x == target
return _isEqChecker
@staticmethod
def stringCaseInsensetiveCmp(target):
def _stringCaseInsensetiveCmp(x, r):
return x.replace(b'\x00', b'').lower() == target.lower()
return _stringCaseInsensetiveCmp
def recursiveFind( self, target, startAddress, searchLength, hops=1, targetLength=None, alignment=4, limiter=None, isVerbose=False):
'''
Description : Main function for recursive search, calls the appropriate function according to the target data type
Args:
target - target data to be found
startAddress - starting address of the binary data
length - length in bytes of the binary data
hops - depth of recursive hops allowed within data (decreased by recursion)
isVerbose - should data be displayed while searcing ?
Return Type : Yields results upon finding addresses which holds the target data
'''
if isinstance(target, list):
targetValidator = RecursiveFind.isInListChecker(target)
elif isinstance(target, tuple):
targetValidator = RecursiveFind.isInRangeChecker(target)
elif isinstance(target, integer_types):
targetValidator = RecursiveFind.isEqChecker(target)
elif isinstance(target, str):
targetValidator = RecursiveFind.isEqChecker(target)
targetLength = len(target)
else:
targetValidator = target
if isinstance(target, (list, tuple)) or isinstance(target, integer_types):
if None == targetLength:
targetLength = 4
if 8 == targetLength:
targetReader = self.readQword
elif 4 == targetLength:
targetReader = self.readDword
elif 2 == targetLength:
targetReader = self.readWord
elif 1 == targetLength:
targetReader = self.readByte
else:
raise Exception("Target length %s not supported with integer target" % repr(targetLength))
else:
targetReader = lambda addr: self.readMemory(addr, targetLength)
path = []
pointerSize = self.getPointerSize()
for result in self._recursiveFind(targetValidator, startAddress, searchLength, pointerSize, targetReader, hops, alignment, limiter, path):
if isVerbose:
self.printRecursiveFindResult(result)
yield result | PypiClean |
/AutoTransform-1.1.1a8-py3-none-any.whl/autotransform/step/condition/state.py |
# @black_format
"""The implementation for the ChangeStateCondition."""
from __future__ import annotations
from typing import ClassVar, List
from autotransform.change.base import Change, ChangeState, ReviewState, TestState
from autotransform.step.condition.base import ComparisonCondition, ConditionName
from autotransform.step.condition.comparison import ComparisonType
class ChangeStateCondition(ComparisonCondition[ChangeState]):
"""A condition which checks the ChangeState against the state supplied using the supplied
comparison.
Attributes:
comparison (ComparisonType): The type of comparison to perform.
value (ChangeState | List[ChangeState]): The state(s) to compare against.
name (ClassVar[ConditionName]): The name of the Component.
"""
comparison: ComparisonType
value: ChangeState | List[ChangeState]
name: ClassVar[ConditionName] = ConditionName.CHANGE_STATE
def get_val_from_change(self, change: Change) -> ChangeState:
"""Gets the state from the Change.
Args:
change (Change): The Change the Condition is checking.
Returns:
ChangeState: The state of the Change.
"""
return change.get_state()
class MergeableStateCondition(ComparisonCondition[str]):
"""A condition which checks the mergeable state against the state supplied using the supplied
comparison.
Attributes:
comparison (ComparisonType): The type of comparison to perform.
value (str | List[str]): The state(s) to compare against.
name (ClassVar[ConditionName]): The name of the Component.
"""
comparison: ComparisonType
value: str | List[str]
name: ClassVar[ConditionName] = ConditionName.MERGEABLE_STATE
def get_val_from_change(self, change: Change) -> str:
"""Gets the mergeable state from the Change.
Args:
change (Change): The Change the Condition is checking.
Returns:
str: The mergeable state of the Change.
"""
return change.get_mergeable_state()
class ReviewStateCondition(ComparisonCondition[ReviewState]):
"""A condition which checks the ReviewState against the state supplied using the supplied
comparison.
Attributes:
comparison (ComparisonType): The type of comparison to perform.
value (ReviewState | List[ReviewState]): The state(s) to compare against.
name (ClassVar[ConditionName]): The name of the Component.
"""
comparison: ComparisonType
value: ReviewState | List[ReviewState]
name: ClassVar[ConditionName] = ConditionName.REVIEW_STATE
def get_val_from_change(self, change: Change) -> ReviewState:
"""Gets the review state from the Change.
Args:
change (Change): The Change the Condition is checking.
Returns:
ReviewState: The review state of the Change.
"""
return change.get_review_state()
class TestStateCondition(ComparisonCondition[TestState]):
"""A condition which checks the TestState against the state supplied using the supplied
comparison.
Attributes:
comparison (ComparisonType): The type of comparison to perform.
value (TestState | List[TestState]): The state(s) to compare against.
name (ClassVar[ConditionName]): The name of the Component.
"""
comparison: ComparisonType
value: TestState | List[TestState]
name: ClassVar[ConditionName] = ConditionName.TEST_STATE
def get_val_from_change(self, change: Change) -> TestState:
"""Gets the state from the Change.
Args:
change (Change): The Change the Condition is checking.
Returns:
TestState: The test state of the Change.
"""
return change.get_test_state() | PypiClean |
/CubicReport-0.4.18.tar.gz/CubicReport-0.4.18/geraldo/widgets.py | import datetime, types, decimal, re
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
from base import BAND_WIDTH, BAND_HEIGHT, Element, SubReport
from utils import get_attr_value, SYSTEM_FIELD_CHOICES, FIELD_ACTION_VALUE, FIELD_ACTION_COUNT,\
FIELD_ACTION_AVG, FIELD_ACTION_MIN, FIELD_ACTION_MAX, FIELD_ACTION_SUM,\
FIELD_ACTION_DISTINCT_COUNT, cm, black
from exceptions import AttributeNotFound
class Widget(Element):
"""A widget is a value representation on the report"""
_height = 0 #0.5*cm
_width = 5*cm
_line_number = 0
style = {}
truncate_overflow = False
get_value = None # A lambda function to get customized values
instance = None
report = None
generator = None
band = None
borders = None
def __init__(self, **kwargs):
"""This initializer is prepared to set arguments informed as attribute
values."""
for k,v in kwargs.items():
setattr(self, k, v)
def clone(self):
new = super(Widget, self).clone()
new.style = self.style
new.truncate_overflow = self.truncate_overflow
new.get_value = self.get_value
new.instance = self.instance
new.report = self.report
new.generator = self.generator
new.band = self.band
new.borders = self.borders
return new
class Label(Widget):
"""A label is just a simple text.
'get_value' lambda must have 'text' argument."""
_text = ''
_repr_for_cache_attrs = ('text','left','top','height','width','style','visible')
def _get_text(self):
if self.get_value:
try:
return self.get_value(self, self._text)
except TypeError:
return self.get_value(self._text)
return self._text
def _set_text(self, value):
self._text = value
text = property(_get_text, _set_text)
def clone(self):
new = super(Label, self).clone()
if not callable(self._text):
new._text = self._text
return new
EXP_QUOTED = re.compile('\w\(([^\'"].+?[^\'"])(|,.*?)\)')
EXP_QUOTED_SUB = re.compile('\(([^\'"].+?[^\'"])(|,.*?)\)')
EXP_TOKENS = re.compile('([\w\._]+|\*\*|\+|\-|\*|\/)')
class ObjectValue(Label):
"""This shows the value from a method, field or property from objects got
from the queryset.
You can inform an action to show the object value or an aggregation
function on it.
You can also use 'display_format' attribute to set a friendly string
formating, with a mask or additional text.
'get_value' and 'get_text' lambda attributes must have 'instance' argument.
Set 'stores_text_in_cache' to False if you want this widget get its value
and text on render and generate moments."""
attribute_name = None
action = FIELD_ACTION_VALUE
display_format = '%s'
objects = None
get_text = None # A lambda function to get customized display values
stores_text_in_cache = True
expression = None
converts_decimal_to_float = False
converts_float_to_decimal = True
_cached_text = None
on_expression_error = None # Expected arguments:
# - widget
# - instance
# - exception
# - expression
def __init__(self, *args, **kwargs):
super(ObjectValue, self).__init__(*args, **kwargs)
if self.expression:
self.prepare_expression()
def prepare_expression(self):
if not self.expression:
pass
self.expression = self.expression.replace(' ','')
while True:
f = EXP_QUOTED.findall(self.expression)
if not f:
# Replace simple attribute name or method to value("")
if '(' not in self.expression:
self.expression = 'value("%s")' % self.expression
break
self.expression = EXP_QUOTED_SUB.sub('("%s"%s)'%(f[0][0], f[0][1]), self.expression, 1)
def get_object_value(self, instance=None, attribute_name=None):
"""Return the attribute value for just an object"""
instance = instance or self.instance
attribute_name = attribute_name or self.attribute_name
# Checks lambda and instance
if self.get_value and instance:
try:
return self.get_value(self, instance)
except TypeError:
return self.get_value(instance)
# Checks this is an expression
tokens = EXP_TOKENS.split(attribute_name)
tokens = filter(bool, tokens) # Cleans empty parts
if len(tokens) > 1:
values = {}
for token in tokens:
if not token in ('+','-','*','/','**') and not token.isdigit():
values[token] = self.get_object_value(instance, token)
return eval(attribute_name, values)
# Gets value with function
value = get_attr_value(instance, attribute_name)
# For method attributes --- FIXME: check what does this code here, because
# get_attr_value has a code to do that, using
# callable() checking
if type(value) == types.MethodType:
value = value()
return value
def get_queryset_values(self, attribute_name=None):
"""Uses the method 'get_object_value' to get the attribute value from
all objects in the objects list, as a list"""
objects = self.generator.get_current_queryset()
return map(lambda obj: self.get_object_value(obj, attribute_name), objects)
def _clean_empty_values(self, values):
def clean(val):
if not val:
return 0
elif isinstance(val, decimal.Decimal) and self.converts_decimal_to_float:
return float(val)
elif isinstance(val, float) and self.converts_float_to_decimal:
return decimal.Decimal(str(val))
return val
return map(clean, values)
def action_value(self, attribute_name=None):
return self.get_object_value(attribute_name=attribute_name)
def action_count(self, attribute_name=None):
# Returns the total count of objects with valid values on informed attribute
values = self.get_queryset_values(attribute_name)
return len(filter(lambda v: v is not None, values))
def action_avg(self, attribute_name=None):
values = self.get_queryset_values(attribute_name)
# Clear empty values
values = self._clean_empty_values(values)
return sum(values) / len(values)
def action_min(self, attribute_name=None):
values = self.get_queryset_values(attribute_name)
return min(values)
def action_max(self, attribute_name=None):
values = self.get_queryset_values(attribute_name)
return max(values)
def action_sum(self, attribute_name=None):
values = self.get_queryset_values(attribute_name)
# Clear empty values
values = self._clean_empty_values(values)
return sum(values)
def action_distinct_count(self, attribute_name=None):
values = filter(lambda v: v is not None, self.get_queryset_values(attribute_name))
return len(set(values))
def action_coalesce(self, attribute_name=None, default=''):
value = self.get_object_value(attribute_name=attribute_name)
return value or unicode(default)
def _text(self):
if not self.stores_text_in_cache or self._cached_text is None:
try: # Before all, tries to get the value using parent object
value = self.band.get_object_value(obj=self)
except AttributeNotFound:
if self.expression:
value = self.get_value_by_expression()
else:
value = getattr(self, 'action_'+self.action)()
if self.get_text:
try:
self._cached_text = unicode(self.get_text(self, self.instance, value))
except TypeError:
self._cached_text = unicode(self.get_text(self.instance, value))
else:
self._cached_text = unicode(value)
res = self._cached_text
if '{' in self.display_format:
if 'f' in self.display_format:
try:
res = self.display_format.format(float(self._cached_text or 0.0))
except ValueError:
pass
elif 'd' in self.display_format or 'x' in self.display_format or 'o' in self.display_format or 'b' in self.display_format:
try:
res = self.display_format.format(int(self._cached_text or 0.0))
except ValueError:
pass
else:
try:
res = self.display_format.format(self._cached_text)
except ValueError:
pass
elif self.display_format.find('f') > 0:
try:
res = self.display_format % float(self._cached_text or 0.0)
except ValueError:
pass
elif self.display_format.find('d') > 0:
try:
res = self.display_format % int(self._cached_text or 0)
except ValueError:
pass
else:
res = self.display_format % self._cached_text
return res
def _set_text(self, value):
self._cached_text = value
text = property(lambda self: self._text(), _set_text)
def clone(self):
new = super(ObjectValue, self).clone()
new.attribute_name = self.attribute_name
new.action = self.action
new.display_format = self.display_format
new.objects = self.objects
new.stores_text_in_cache = self.stores_text_in_cache
new.expression = self.expression
new.on_expression_error = self.on_expression_error
return new
def get_value_by_expression(self, expression=None):
"""Parses a given expression to get complex calculated values"""
expression = expression or self.expression
if not self.instance:
global_vars = {}
elif isinstance(self.instance, dict):
global_vars = self.instance.copy()
else:
global_vars = self.instance.__dict__.copy()
global_vars.update({
'value': self.action_value,
'count': self.action_count,
'avg': self.action_avg,
'min': self.action_min,
'max': self.action_max,
'sum': self.action_sum,
'distinct_count': self.action_distinct_count,
'coalesce': self.action_coalesce,
})
if isinstance(self.report, SubReport):
global_vars.update({
'parent': self.report.parent_object,
'p': self.report.parent_object, # Just a short alias
})
try:
return eval(expression, global_vars)
except Exception, e:
if not callable(self.on_expression_error):
raise
return self.on_expression_error(self, e, expression, self.instance)
class SystemField(Label):
"""This shows system informations, like the report title, current date/time,
page number, pages count, etc.
'get_value' lambda must have 'expression' and 'fields' argument."""
expression = '%(report_title)s'
fields = {
'report_title': None,
'page_number': None,
'first_page_number': None,
'last_page_number': None,
'page_count': None,
'current_datetime': None,
'report_author': None,
}
def __init__(self, **kwargs):
super(SystemField, self).__init__(**kwargs)
# This is the safe way to use the predefined fields dictionary
self.fields = SystemField.fields.copy()
self.fields['current_datetime'] = datetime.datetime.now()
def _text(self):
page_number = (self.fields.get('page_number') or self.generator._current_page_number) + self.generator.first_page_number - 1
page_count = self.fields.get('page_count') or self.generator.get_page_count()
fields = {
'report_title': self.fields.get('report_title') or self.report.title,
'page_number': page_number,
'first_page_number': self.generator.first_page_number,
'last_page_number': page_count + self.generator.first_page_number - 1,
'page_count': page_count,
'current_datetime': self.fields.get('current_datetime') or datetime.datetime.now(),
'report_author': self.fields.get('report_author') or self.report.author,
}
if self.get_value:
return self.get_value(self.expression, fields)
return self.expression%SystemFieldDict(self, fields)
def text(self): return self._text()
text = property(text)
def clone(self):
new = super(SystemField, self).clone()
new.expression = self.expression
new.fields = self.fields
return new
class SystemFieldDict(dict):
widget = None
fields = None
def __init__(self, widget, fields):
self.widget = widget
self.fields = fields or {}
super(SystemFieldDict, self).__init__(**fields)
def __getitem__(self, key):
if key.startswith('now:'):
return self.widget.report.format_date(
self.fields.get('current_datetime', datetime.datetime.now()),
key[4:]
)
elif key.startswith('var:'):
return self.widget.report.get_variable_value(name=key[4:], system_fields=self)
return self.fields[key] | PypiClean |
/guibits-1.0-py3-none-any.whl/src/guibits1_0/writing.py |
# version 12 Jan 23 18:48
# author RNB
import PyQt6.QtCore
import PyQt6.QtGui
from . import coloring, command_listing, fonting
from . import font_size_checking, font_styling, resolving
from . import type_checking2_0, windowing
"""
Copyright (C) 2013,2014,2015,2016,2017,2018,2020,2021,2022,2023 R.N.Bosworth
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License (gpl.txt) for more details.
"""
# exposed procedures
# ------------------
def clear_text(win):
"""
pre:
win = window whose pane is to be cleared of text
post:
a paint event has been queued for win's frame
after this paint event is processed,
win's pane is clear of text
test:
win is null
win is non-null
win has no frame
win has frame
page has text
"""
type_checking2_0.check_derivative(win,windowing.Window)
if win == None:
raise Exception("Attempt to clear text from a null window")
with win._my_text_command_list._my_lock:
win._my_text_command_list = command_listing.new_command_list()
p = win._my_pane
if p != None:
p.update()
def width_in_points_of(win,s,fname,fss,fsize):
"""
pre:
win = window on which s is to be written
win must be present on the screen
s = string to be measured, as a str
fname = name of font for s, as a str
fss = set of FontStyles for s (empty set implies the string should be measured plain)
fsize = point size of font for s, in range 6.0..72.0, as a float
post:
the width of s in points, for the given window, font name, font styles and font size, has been returned as a float
test:
None window
valid window
None text string
valid text string
None font name
valid font name
None font style
valid font style
None font size
5.9 font size
72.1 font size
72.0 font size
text "hello"
6.0 font size
text "goodbye"
18.0 font size
text "Thankyou"
text "ThankyouThankyou" (should be double)
fss is plain
fss is bold
fss is italic
fss is (bold,italic)
"""
#print("Start of width_in_points_of")
type_checking2_0.check_derivative(win,windowing.Window)
type_checking2_0.check_identical(s,str)
type_checking2_0.check_identical(fname,str)
type_checking2_0.check_derivative(fss,font_styling.FontStyles)
type_checking2_0.check_identical(fsize,float)
font_size_checking.check_pane_font_size(fsize)
if win._my_frame == None:
raise Exception("attempt to find length of string for non-showing window")
else:
_qf = fonting._qfont_of(fname,_STANDARD_FONT_SIZE,fss)
#_italic = font_styling.contains(fss,font_styling.FontStyle.ITALIC)
#_qf = PyQt6.QtGui.QFont(fname,int(fsize),italic=_italic)
#if font_styling.contains(fss,font_styling.FontStyle.BOLD):
# _qf.setBold(True)
#_qfm = PyQt6.QtGui.QFontMetrics(_qf)
#_width = _qfm.horizontalAdvance(s)
#print(" _width="+str(_width))
_qfmf = PyQt6.QtGui.QFontMetricsF(_qf)
_widthf = _qfmf.horizontalAdvance(s)
#print(" _widthf="+str(_widthf))
# this is the width of the standard font in pixels
brf = _qfmf.boundingRect(PyQt6.QtCore.QRectF(),0,s)
#print(" brf="+str(brf))
_pxpt = resolving.pixels_per_point(win._my_frame)
return (_widthf/_pxpt) * fsize/_STANDARD_FONT_SIZE
# scale width up from standard font size
def write_string(win,s,fname,fss,fsize,x,y,c):
"""
pre:
win = window in which Unicode string s is to be written
win must be present on the screen
s = string to be written on win's pane, as a str
fname = font name of required font for string s, as a str
fss = set of FontStyles required for string s
(empty set implies the string should be rendered plain)
fsize = required point size of font for string s, as a float
x = horizontal offset in points of left-hand edge of s from left-hand edge of
win's pane, as a float
y = vertical offset in points of top of s from top of win's pane, as a float
c = color in which s is to be written on win's pane,
as a coloring.Color value
post:
the horizontal offset in points of the right-hand edge of the written string from the left-hand edge of win's pane has been returned as a float
a paint event has been queued for win's pane
after the paint event has been processed,
the string s has been written to win's pane as specified
test:
invalid window
valid window
invalid text string
valid text string
invalid font name
valid font name
invalid font style
valid font style
invalid font size
zero font size
1.0 font size
invalid x
negative x
zero x
invalid y
negative y
zero y
invalid color
black color
positive font size
positive x
positive y
blue color
"""
type_checking2_0.check_derivative(win,windowing.Window)
type_checking2_0.check_derivative(s,str)
type_checking2_0.check_derivative(fname,str)
type_checking2_0.check_derivative(fss,font_styling.FontStyles)
type_checking2_0.check_identical(fsize,float)
font_size_checking.check_pane_font_size(fsize)
type_checking2_0.check_identical(x,float)
if x < 0.0:
raise Exception("specified x-offset is negative")
type_checking2_0.check_identical(y,float)
if y < 0.0:
raise Exception("specified y-offset is negative")
type_checking2_0.check_derivative(c,coloring.Color)
_len = width_in_points_of(win,s,fname,fss,fsize)
_cmd = command_listing.new_text_command(s,fname,fss,fsize,x,y,_len,c)
# update the text command list, in a synchronized fashion
command_listing.insert(win._my_text_command_list,_cmd)
win._my_pane.update() # to queue a paint event
return x + _len
# private constants
# -----------------
_STANDARD_FONT_SIZE = 24.0 | PypiClean |
/Mopidy_YouTube-3.7-py3-none-any.whl/mopidy_youtube/frontend.py | import random
import pykka
from mopidy.core import listener
from mopidy_youtube import logger, youtube
from mopidy_youtube.data import extract_video_id, format_video_uri
autoplay_enabled = False
strict_autoplay = False
max_autoplay_length = None
autoplayed = []
max_degrees_of_separation = 3
class YouTubeAutoplayer(pykka.ThreadingActor, listener.CoreListener):
def __init__(self, config, core):
super().__init__()
self.config = config
self.core = core
self.autoplay_enabled = config["youtube"]["autoplay_enabled"]
self.strict_autoplay = config["youtube"]["strict_autoplay"]
self.max_degrees_of_separation = config["youtube"]["max_degrees_of_separation"]
self.max_autoplay_length = config["youtube"]["max_autoplay_length"]
self.base_track_id = ""
self.degrees_of_separation = 0
# Called by mopidy on start of playback of a URI
# This function emulates the youtube autoplay functionality by retrieving the most
# most related video to a video just played by a youtube API call, adding this new
# video URI to the tracklist
#
# With the option "strict_autoplay" enabled, the next played URI will be the newly
# added video. Without the option "strict_autoplay" enabled [default], the autoplay
# functionality will only be executed if the end of the current tracklist is reached
#
# The autoplay functionality will not work correctly in combination with the repeat
# option and is therefore disabled if repeat is enabled
def track_playback_started(self, tl_track):
if not self.autoplay_enabled:
return None
[tlTrackId, track] = tl_track
if not track.uri.startswith("youtube:") and not track.uri.startswith("yt:"):
return None
try:
tl = self.core.tracklist
if tl.get_repeat().get() is True:
logger.warn("Autoplayer: will not add tracks when repeat is enabled.")
return None
if tl.get_random().get() is True:
logger.warn(
"Autoplayer: shuffle will not work when autoplay is enabled."
)
if self.strict_autoplay is False:
tlTracks = tl.get_tl_tracks().get()
if len(tlTracks) != 0:
if tlTrackId is not tlTracks[-1].tlid:
logger.debug("Autoplayer: called not at end of track list.")
return None
elif tl.get_consume().get() is True:
logger.warning(
"Autoplayer: when having consume track enabled, "
'try with "strict_autoplay" option enabled for '
"better results"
)
return None
logger.debug(f"getting current track id for {track.uri}")
current_track_id = extract_video_id(track.uri)
logger.debug(f"track id {current_track_id}")
if self.max_degrees_of_separation:
if self.degrees_of_separation < self.max_degrees_of_separation:
self.degrees_of_separation += 1
logger.debug("incrementing autoplay degrees of separation")
else:
current_track_id = self.base_track_id
self.degrees_of_separation = 0
logger.debug("resetting to autoplay base track id")
logger.debug(f"degrees of sep {self.degrees_of_separation}")
if current_track_id not in autoplayed:
self.base_track_id = current_track_id
autoplayed.append(current_track_id) # avoid replaying track
self.degrees_of_separation = 0
logger.debug("setting new autoplay base id")
logger.debug(f"base track id {self.base_track_id}")
current_track = youtube.Video.get(current_track_id)
logger.debug(f"triggered related videos for {current_track.id}")
current_track.related_videos
logger.debug("getting related videos")
related_videos = current_track.related_videos.get()
logger.debug(
f"autoplayer is adding a track related to {current_track.title.get()}"
)
logger.debug(f"related videos {related_videos}")
# remove already autoplayed
related_videos[:] = [
related_video
for related_video in related_videos
if related_video.id not in autoplayed
]
logger.debug(f"related videos edit 1 {related_videos}")
# remove if track_length is 0 (probably a live video) or None
related_videos[:] = [
related_video
for related_video in related_videos
if related_video.length.get()
]
logger.debug(f"related videos edit 2 {related_videos}")
# remove if too long
if self.max_autoplay_length:
related_videos[:] = [
related_video
for related_video in related_videos
if related_video.length.get() < self.max_autoplay_length
]
logger.debug(f"related videos edit 3{related_videos}")
if len(related_videos) == 0:
logger.warn(
f"could not get videos related to"
f"{current_track.title.get()}: ending autoplay"
)
return None
else:
next_video = random.choice(related_videos)
logger.debug(f"next video {next_video.id}")
autoplayed.append(next_video.id)
uri = [format_video_uri(next_video.id)]
tl.add(uris=uri).get()
return None
except Exception as e:
logger.error('Autoplayer error "%s"', e)
return None | PypiClean |
/Gbtestapi0.1-0.1a10-py3-none-any.whl/gailbot/core/engines/whisperEngine/parsers.py |
import sys
import os
import json
from typing import Dict, List, Any, Tuple
# from pyannote.core import Segment
from gailbot.core.utils.logger import makelogger
_DEFAULT_SPEAKER = 0
logger = makelogger("parsers")
def parse_into_word_dicts(transcription: Dict) -> List[Dict]:
"""
Parse the results of the transcription into a list of dictionaries
containing the speaker, start time, end time, and text.
Format of the transcription is detailed here: https://github.com/linto-ai/whisper-timestamped
"""
logger.info("parse into word dict")
parsed = list()
segments = transcription["segments"]
try:
for segment in segments:
if not "words" in segment:
continue
word_list = segment["words"]
for item in word_list:
parsed.append(
{
"start": item["start"],
"end": item["end"],
"text": item["text"],
# NOTE: Whisper does not generate speaker but I can
# potentially add that in too.
"speaker": str(_DEFAULT_SPEAKER),
}
)
assert parsed
logger.info("get the segment in to parsed dict")
except Exception as e:
logger.error(f"get the error from parsing word into dict", exc_info=e)
return parsed
def parse_into_timestamped_text(asr_res: Dict) -> List[Tuple]:
"""
Parse results from whisper timestamped in terms of Segment
"""
logger.info("parse to timestamp data ")
timestamp_texts = []
for segment in asr_res["segments"]:
if not "words" in segment:
continue
word_list = segment["words"]
for item in word_list:
start = item["start"]
end = item["end"]
text = item["text"]
# timestamp_texts.append(
# (Segment(start, end), text)
# ) # TODO: CHECK WITH VIVIAN
return timestamp_texts
def parse_into_full_text(asr_res: Dict) -> str:
"""
Parse the transcription output into a string.
"""
return asr_res["text"]
def add_speaker_info_to_text(asr_res: Dict, dir_res: Dict) -> Dict:
"""
Add speaker information to transcription results using speaker
diarization results. Returns dictionaries
"""
logger.info("adding speaker tag into the result")
spk_text = []
timestamp_texts = parse_into_timestamped_text(asr_res)
for seg, text in timestamp_texts:
spk = dir_res.crop(seg).argmax()
spk_text.append(
{"start": seg.start, "end": seg.end, "speaker": spk, "text": text}
)
return spk_text | PypiClean |
/Keg-Auth-0.7.2.tar.gz/Keg-Auth-0.7.2/changelog.rst | Changelog
=========
0.7.2 released 2023-05-22
-------------------------
- handle multiple potential session cookies resulting from werkzeug 2.3 and flask 2.3 changes (8b4680e_)
.. _8b4680e: https://github.com/level12/keg-auth/commit/8b4680e
0.7.1 released 2023-05-12
-------------------------
- allow request loaders to be specified directly to requires decorators (cd42358_)
.. _cd42358: https://github.com/level12/keg-auth/commit/cd42358
0.7.0 released 2023-03-03
-------------------------
- support SQLAlchemy 2.0 (88a6173_)
- support keg testing app context changes (d0ec64f_)
.. _88a6173: https://github.com/level12/keg-auth/commit/88a6173
.. _d0ec64f: https://github.com/level12/keg-auth/commit/d0ec64f
0.6.2 released 2022-12-20
-------------------------
- trap the unknown hash error to prevent invalid password data from causing app errors refs #160 (5f2b721_)
.. _5f2b721: https://github.com/level12/keg-auth/commit/5f2b721
0.6.1 released 2022-12-15
-------------------------
- support multiple db sessions when running auth tests (a5cab4a_)
- fixed upgrade notes in documentation (b537bba_)
.. _a5cab4a: https://github.com/level12/keg-auth/commit/a5cab4a
.. _b537bba: https://github.com/level12/keg-auth/commit/b537bba
0.6.0 released 2022-12-12
-------------------------
- update documentation of breaking changes (1ebb337_)
- **BC break** support keg-elements 0.8.0 (6d4b251_)
- log attempts when form validation fails, and when csrf doesn't validate (60edacb_)
- resolve field order error when disabled_utc missing from user form fields (0e2ae74_)
- document known data migration issue (23ec6fe_)
- pin python-ldap to version in package index (0b1d2b7_)
- apply workaround to support testing with flask-login 0.6.2 (d1446a9_)
- drop deprecated OIDC code and any remaining python 2 references (10b1144_)
.. _1ebb337: https://github.com/level12/keg-auth/commit/1ebb337
.. _6d4b251: https://github.com/level12/keg-auth/commit/6d4b251
.. _60edacb: https://github.com/level12/keg-auth/commit/60edacb
.. _0e2ae74: https://github.com/level12/keg-auth/commit/0e2ae74
.. _23ec6fe: https://github.com/level12/keg-auth/commit/23ec6fe
.. _0b1d2b7: https://github.com/level12/keg-auth/commit/0b1d2b7
.. _d1446a9: https://github.com/level12/keg-auth/commit/d1446a9
.. _10b1144: https://github.com/level12/keg-auth/commit/10b1144
0.5.7 released 2022-08-12
-------------------------
- prevent attempt tests from failing when certain config values are set in app (b2f7e27_)
.. _b2f7e27: https://github.com/level12/keg-auth/commit/b2f7e27
0.5.6 released 2022-08-12
-------------------------
- skip attempt tests during execution to avoid import order issues (8ea6f57_)
.. _8ea6f57: https://github.com/level12/keg-auth/commit/8ea6f57
0.5.5 released 2022-08-10
-------------------------
- flash on login for users disabled by date, autoclear disabled date when re-enabling (9330f62_)
.. _9330f62: https://github.com/level12/keg-auth/commit/9330f62
0.5.4 released 2022-07-08
-------------------------
- case insensitive match on user id (d01c310_)
- use relative URLs in tests (6d6f959_)
.. _d01c310: https://github.com/level12/keg-auth/commit/d01c310
.. _6d6f959: https://github.com/level12/keg-auth/commit/6d6f959
0.5.3 released 2022-02-24
-------------------------
- fix integrated auth tests (4318826_)
.. _4318826: https://github.com/level12/keg-auth/commit/4318826
0.5.2 released 2022-02-24
-------------------------
- add OAuth authenticator to replace deprecated OIDC implementation (606c952_)
- add basic user/group/bundle CRUD tests to the integrated auth tests (0c84a2d_)
- *BC break* require rate-limiting setup by default, simplify configuration (7d7b532_)
.. _606c952: https://github.com/level12/keg-auth/commit/606c952
.. _0c84a2d: https://github.com/level12/keg-auth/commit/0c84a2d
.. _7d7b532: https://github.com/level12/keg-auth/commit/7d7b532
0.5.1 released 2022-02-22
-------------------------
- warn on usage of OIDC authenticator due to current breakage in flask-oidc (c582781_)
- *potential BC break* use keg-elements field ordering scheme on the User form (ee31b79_)
- add class and code options to NavItems for better control of rendering (2842cc2_)
- clear flask session on logout, behavior can be turned off via config setting (71e6b10_)
- stop overriding a title block in templates, use config value to set the proper variable for the app template (210f227_)
- load orm entity in CRUD method (89bc7d4_)
.. _c582781: https://github.com/level12/keg-auth/commit/c582781
.. _ee31b79: https://github.com/level12/keg-auth/commit/ee31b79
.. _2842cc2: https://github.com/level12/keg-auth/commit/2842cc2
.. _71e6b10: https://github.com/level12/keg-auth/commit/71e6b10
.. _210f227: https://github.com/level12/keg-auth/commit/210f227
.. _89bc7d4: https://github.com/level12/keg-auth/commit/89bc7d4
0.5.0 released 2022-02-21
-------------------------
- use the Bootstrap 4 base form template from keg-elements (16c393a_)
- shift to authlib for verification token generate/verify - support generated itsdangerous tokens for now refs #147 (e96ac2e_)
.. _16c393a: https://github.com/level12/keg-auth/commit/16c393a
.. _e96ac2e: https://github.com/level12/keg-auth/commit/e96ac2e
0.4.2 released 2022-01-20
-------------------------
- replace commonmark with markdown-it-py (8b4822d_)
.. _8b4822d: https://github.com/level12/keg-auth/commit/8b4822d
0.4.1 released 2021-11-29
-------------------------
- fix navigation use of callable permissions on classes/blueprints (f19f513_)
- user form: don't assume csrf_token field exists (07fe642_)
- improve testing developer ux (b687c72_)
.. _f19f513: https://github.com/level12/keg-auth/commit/f19f513
.. _07fe642: https://github.com/level12/keg-auth/commit/07fe642
.. _b687c72: https://github.com/level12/keg-auth/commit/b687c72
0.4.0 released 2021-09-13
-------------------------
- ensure grid header posts are supported (e0638dc_)
- shift to use Bootstrap 4 templates by default (39335bc_)
- centralize validation of permission sets in testing (9f04f1d_)
- ViewTestBase no longer delete users in setup, and provide hooks into user creation (7d72fc3_)
- enhance navigation menu options for login/logout cases (667a1ac_)
- rename package for proper semantics (6a6a202_)
.. _e0638dc: https://github.com/level12/keg-auth/commit/e0638dc
.. _39335bc: https://github.com/level12/keg-auth/commit/39335bc
.. _9f04f1d: https://github.com/level12/keg-auth/commit/9f04f1d
.. _7d72fc3: https://github.com/level12/keg-auth/commit/7d72fc3
.. _667a1ac: https://github.com/level12/keg-auth/commit/667a1ac
.. _6a6a202: https://github.com/level12/keg-auth/commit/6a6a202
0.3.0 released 2021-07-06
-------------------------
- click changed output for hidden inputs, resolve for set-password CLI (6cd5a09_)
- update python requirements and pip usage (760da0b_)
- add options to exclude specific HTTP methods from auth checks (b66d090_)
- update JWT usage to reflect flask-jwt-extended 4.0 breaking changes (1cd0895_)
- switch ldap requirement to python-ldap (63485f3_)
.. _6cd5a09: https://github.com/level12/keg-auth/commit/6cd5a09
.. _760da0b: https://github.com/level12/keg-auth/commit/760da0b
.. _b66d090: https://github.com/level12/keg-auth/commit/b66d090
.. _1cd0895: https://github.com/level12/keg-auth/commit/1cd0895
.. _63485f3: https://github.com/level12/keg-auth/commit/63485f3
0.2.28 released 2021-04-20
--------------------------
- support args in http head requests (97f8961_)
- pin flask-jwt-extended < 4 until we support the update
.. _97f8961: https://github.com/level12/keg-auth/commit/97f8961
0.2.27 released 2021-02-02
--------------------------
- fix documentation of internationalization support (8a41f03_)
- make form/crud templates less opinionated about how base templates render page title (0b71303_)
.. _8a41f03: https://github.com/level12/keg-auth/commit/8a41f03
.. _0b71303: https://github.com/level12/keg-auth/commit/0b71303
0.2.26 released 2021-01-29
--------------------------
- Provide Spinx documentation (62aca54_)
- Provide a default JS handler for confirm-delete in crud-list (7b6785a_)
- Use marksafe and jinja templates instead of webhelpers2 (8f68e07_)
- Allow user to prevent sending welcome email after user form (3bb8f7a_)
- Validate that create_form returned a value (83ff034_)
- Trap integrity error on permission sync to mitigate race condition (4d7497c_)
- Move disabled_utc to be with the other fields (dd1bf5e_)
.. _62aca54: https://github.com/level12/keg-auth/commit/62aca54
.. _7b6785a: https://github.com/level12/keg-auth/commit/7b6785a
.. _8f68e07: https://github.com/level12/keg-auth/commit/8f68e07
.. _3bb8f7a: https://github.com/level12/keg-auth/commit/3bb8f7a
.. _83ff034: https://github.com/level12/keg-auth/commit/83ff034
.. _4d7497c: https://github.com/level12/keg-auth/commit/4d7497c
.. _dd1bf5e: https://github.com/level12/keg-auth/commit/dd1bf5e
0.2.25 released 2020-12-08
--------------------------
- CRUD view passes through args set with self.assign (efeb7b7_)
- CRUD view edit/delete performs authorization prior to ID lookup (efeb7b7_)
- CRUD view added webgrid render limit handling (efeb7b7_)
.. _efeb7b7: https://github.com/level12/keg-auth/commit/efeb7b7
0.2.24 released 2020-07-09
--------------------------
- Fix inconsistent CLI argument ordering in tests (d9a62c0_)
.. _d9a62c0: https://github.com/level12/keg-auth/commit/d9a62c0
0.2.23 released 2020-06-11
--------------------------
- Allow applications to enforce custom password policies (7111c20_)
- Check translations in CI (825d32e_)
.. _7111c20: https://github.com/level12/keg-auth/commit/7111c20
.. _825d32e: https://github.com/level12/keg-auth/commit/825d32e
0.2.22 released 2020-04-16
--------------------------
- Allow rate-limiting of login and password resets (d243b75_)
- Add more config flexibility for OIDC (39beae0_)
.. _d243b75: https://github.com/level12/keg-auth/commit/d243b75
.. _39beae0: https://github.com/level12/keg-auth/commit/39beae0
0.2.21 released 2020-04-02
--------------------------
- Resolve fuzzy/missing translations (a78de96_)
- Add inactivation date for users (requires migration to add a field) (0020fbd_)
- Support latest Flask-Login (ba59925_)
- Allow unverified users to reset their passwords (8888386_)
- Pin keg-elements requirement to support CRUD checkboxes (e59fcc1_)
- Include an Allow header for 405 responses (a2a3091_)
- Support multiple LDAP targets (b895aad_)
- Handle HEAD requests (b16a7e4_)
- Remove six dependency (477a415_)
.. _a78de96: https://github.com/level12/keg-auth/commit/a78de96
.. _0020fbd: https://github.com/level12/keg-auth/commit/0020fbd
.. _ba59925: https://github.com/level12/keg-auth/commit/ba59925
.. _8888386: https://github.com/level12/keg-auth/commit/8888386
.. _e59fcc1: https://github.com/level12/keg-auth/commit/e59fcc1
.. _a2a3091: https://github.com/level12/keg-auth/commit/a2a3091
.. _b895aad: https://github.com/level12/keg-auth/commit/b895aad
.. _b16a7e4: https://github.com/level12/keg-auth/commit/b16a7e4
.. _477a415: https://github.com/level12/keg-auth/commit/477a415
0.2.20 released 2020-03-24
--------------------------
- OIDC and related updates (fab68f5_)
- Add OIDC authenticator and login/logout view responders
- Fix missing page header for Permissions view
- Allow passing blueprint kwargs to make_blueprint
- Easier disabling of specific auth views
- Allow view responder flash messages to be disabled
- Drop bulk permission controls (better templating now in keg-elements)
.. _fab68f5: https://github.com/level12/keg-auth/commit/fab68f5
0.2.19 released 2020-02-21
--------------------------
- Improve Usability of Permission Dropdown (479e985_)
- Pin Flask Login (00ea957_)
.. _479e985: https://github.com/level12/keg-auth/commit/479e985
.. _00ea957: https://github.com/level12/keg-auth/commit/00ea957
0.2.18 released 2020-01-10
--------------------------
- add CLI command for dev to set password (d488bc9_)
.. _d488bc9: https://github.com/level12/keg-auth/commit/d488bc9
0.2.17 released 2019-12-12
--------------------------
- ensure token is present for resending verification email (01b566f_)
.. _01b566f: https://github.com/level12/keg-auth/commit/01b566f
0.2.16 released 2019-12-02
--------------------------
- fix CRUD edit form default values for relationships (01893f9_)
.. _01893f9: https://github.com/level12/keg-auth/commit/01893f9
0.2.15 released 2019-11-27
--------------------------
- fix bundle grid setup for CRUD view (b772f01_)
.. _b772f01: https://github.com/level12/keg-auth/commit/b772f01
0.2.14 released 2019-11-21
--------------------------
- fix template issue related to select2 updates (373739b_)
- make auth testing helpers more generic (b90ee96_)
.. _373739b: https://github.com/level12/keg-auth/commit/373739b
.. _b90ee96: https://github.com/level12/keg-auth/commit/b90ee96
0.2.13 released 2019-11-08
--------------------------
- use select2 to render selects on the user management views (30ff332_)
- fix breakage with keg 0.8.1 (3f5668d_)
- adjust CI environments to use (b9b4fb4_)
- auth test helpers use endpoints to find correct url (76a1222_)
.. _30ff332: https://github.com/level12/keg-auth/commit/30ff332
.. _3f5668d: https://github.com/level12/keg-auth/commit/3f5668d
.. _b9b4fb4: https://github.com/level12/keg-auth/commit/b9b4fb4
.. _76a1222: https://github.com/level12/keg-auth/commit/76a1222
0.2.12 released 2019-10-03
--------------------------
- support decorating flask class-based views (3d8a6cb_)
- fix LDAP authenticator for missing user case (19d184e_)
.. _3d8a6cb: https://github.com/level12/keg-auth/commit/3d8a6cb
.. _19d184e: https://github.com/level12/keg-auth/commit/19d184e
0.2.11 released 2019-09-27
--------------------------
- fix permission sync method and test hook (a56eda4_)
- fix FontAwesome usage on CRUD list view template (64f759a_)
- support lazy strings and icons in navigation helpers and templates (4473571_)
- remove flask version pin (ab47362_)
.. _a56eda4: https://github.com/level12/keg-auth/commit/a56eda4
.. _64f759a: https://github.com/level12/keg-auth/commit/64f759a
.. _4473571: https://github.com/level12/keg-auth/commit/4473571
.. _ab47362: https://github.com/level12/keg-auth/commit/ab47362
0.2.10 released 2019-09-18
--------------------------
- fix testing utils mock import to prevent needing mock dependency (da197df_)
.. _da197df: https://github.com/level12/keg-auth/commit/da197df
0.2.9 released 2019-07-27
-------------------------
- Provide a hook on the CRUD base class to allow overriding the default add url generation (#74) (7eea8bb_)
.. _7eea8bb: https://github.com/level12/keg-auth/commit/7eea8bb
0.2.8 released 2019-06-17
-------------------------
- resolve bug in testing permission existence check (feccb98_)
.. _feccb98: https://github.com/level12/keg-auth/commit/feccb98
0.2.7 released 2019-06-07
-------------------------
- make custom action access control easier (63921ee_)
- enforce test permissions are specified to the auth manager (794f320_)
- correct the MRO order in CRUD forms and testing models (2f4c451_)
- add get_current_user helper method (cae02a2_)
- make grid action column link CSS classes customizable (aa1bc21_)
- ensure CRUD view passes in desired template args (aae3dad_)
.. _63921ee: https://github.com/level12/keg-auth/commit/63921ee
.. _794f320: https://github.com/level12/keg-auth/commit/794f320
.. _2f4c451: https://github.com/level12/keg-auth/commit/2f4c451
.. _cae02a2: https://github.com/level12/keg-auth/commit/cae02a2
.. _aa1bc21: https://github.com/level12/keg-auth/commit/aa1bc21
.. _aae3dad: https://github.com/level12/keg-auth/commit/aae3dad
0.2.6 released 2019-02-12
-------------------------
- Merge pull request #60 from level12/move-sync-perms-to-entity (3181691_)
- update readme to remove reference to view-scoped authenticators (514c202_)
.. _3181691: https://github.com/level12/keg-auth/commit/3181691
.. _514c202: https://github.com/level12/keg-auth/commit/514c202
0.2.5 released 2018-11-14
-------------------------
- Allow make_blueprint to accept a custom blueprint class (fe635b2_)
- Add a link to resend verification email (f7a6191_)
- Add optional i18n support using morphi (790d3ab_)
- Fix intermittent test failure resulting from login timestamp (cde083b_)
- Refactor CRUD form/grid render to extract template args (34d4a20_)
.. _fe635b2: https://github.com/level12/keg-auth/commit/fe635b2
.. _f7a6191: https://github.com/level12/keg-auth/commit/f7a6191
.. _790d3ab: https://github.com/level12/keg-auth/commit/790d3ab
.. _cde083b: https://github.com/level12/keg-auth/commit/cde083b
.. _34d4a20: https://github.com/level12/keg-auth/commit/34d4a20
0.2.4
------------------
- Show verification URL on CLI even if mail flag is off
0.2.3
------------------
- Fix requires_user decorator for usage with blueprints
0.2.1
------------------
- Fix nav items to cache on per user basis
- Fix token generated in CLI having an unknown timezone applied
0.2.0
------------------
- Support permissions
- Decorate blueprints, classes, methods for user/permission requirements
- Support request loaders for tokens
0.1.0
------------------
- Initial release
| PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/monitoring/frontend/monitoring/src/components/organisms/alert-list/index.js | import React from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import RaisedButton from 'material-ui/RaisedButton';
import CircularProgress from 'material-ui/CircularProgress';
import SettingsIcon from 'material-ui/svg-icons/action/settings';
import HoverPaper from '../../atoms/hover-paper';
import Alert from '../../cels/alert';
import actions from './actions';
import styles from './styles';
import {withRouter} from 'react-router-dom';
const mapStateToProps = (state) => ({
alerts: state.alertList.response,
interval: state.interval.interval,
status: state.alertList.status,
timestamp: state.interval.timestamp,
});
@connect(mapStateToProps, actions)
class AlertList extends React.Component {
static propTypes = {
alerts: PropTypes.object,
get: PropTypes.func.isRequired,
interval: PropTypes.number,
status: PropTypes.string,
timestamp: PropTypes.instanceOf(Date),
}
constructor(props) {
super(props);
this.handleClick = () => {
this.props.history.push('/alerts-settings');
};
this.get = (interval = this.props.interval) => {
this.props.get(interval);
};
}
componentWillMount() {
this.get();
}
render() {
const rawAlerts = this.props.alerts;
let alerts;
if (this.props.status === 'pending') {
alerts = (
<div style={styles.spinner}>
<CircularProgress size={80} />
</div>
);
} else {
alerts = rawAlerts && rawAlerts.data && rawAlerts.data.problems.length > 0
? rawAlerts.data.problems.map((alert, index) => (
<Alert
key={index}
alert={alert}
/>
))
: [];
}
return (
<HoverPaper style={styles.content}>
<div style={styles.header}>
<h3>Alerts</h3>
<RaisedButton
onClick={this.handleClick}
style={styles.icon}
icon={<SettingsIcon />}
/>
</div>
{alerts}
</HoverPaper>
);
}
}
export default withRouter(AlertList); | PypiClean |
/KataBankOCR-0.8.0.tar.gz/KataBankOCR-0.8.0/parse/test/fixture_methods.py | "methods that provide values for testing and an interface to constants"
import os
import copy
import random
from toolz import pipe, curry
from toolz.curried import nth
from toolz.curried import map as cmap
from parse import settings
import parse
from common_tools import get_one_or_more, adulterate_iterable, bad_length_duplicator
import fixture_constants
_first_elements = cmap(curry(nth, 0))
_second_elements = cmap(curry(nth, 1))
_third_elements = cmap(curry(nth, 2))
_fourth_elements = cmap(curry(nth, 3))
_fifth_elements = cmap(curry(nth, 4))
class Numerals:
"methods that provide Numerals for testing"
@classmethod
def get_random(cls, count=None):
"return random valid Numeral[s]"
getter = lambda: random.choice(cls.valid())
return get_one_or_more(getter, count)
@staticmethod
def valid():
"return ordered tuple of valid Numerals"
return pipe(settings.valid_numerals, sorted, tuple)
@classmethod
def invalid(cls):
"return tuple of arbitrary basestrings that includes no valid numerals"
chars = ArbitraryValues.single_character_basestrings()
not_valid = lambda value: value not in cls.valid()
limit_to_invalid = curry(filter, not_valid)
return pipe(chars, limit_to_invalid, tuple)
class Accounts:
"methods that provide Accounts for testing"
@staticmethod
def get_random(count=None):
"return random Account[s]"
getter = lambda: ''.join(Numerals.get_random(settings.figures_per_entry))
return get_one_or_more(getter, count)
@staticmethod
def of_example_accounts():
"return Accounts from superpositions of example Accounts"
return _second_elements(fixture_constants.example_accounts)
@staticmethod
def of_basic_input_file():
"return Accounts from basic input file"
return fixture_constants.BasicInputFile.accounts
@staticmethod
def of_flawed_accounts():
"return [in]valid Accounts from Superpositions with flaws"
return _fourth_elements(fixture_constants.flawed_accounts)
class Figures:
"methods that provide Figures for testing"
@staticmethod
def get_random(count=None):
"return random valid Figure[s]"
getter = lambda: random.choice(settings.figures.keys())
return get_one_or_more(getter, count)
@staticmethod
def from_numeral(numeral):
"return the Figure that represents the given Numeral"
return settings.figures.keys()[settings.figures.values().index(numeral)]
@classmethod
def from_account(cls, account):
"return the Figures that represent the given Account"
return pipe(account, tuple, cmap(cls.from_numeral), tuple)
@classmethod
def valid(cls):
"return ordered Figures representing Numerals 0-9"
return cmap(cls.from_numeral, Numerals.valid())
@staticmethod
def flawed():
"return flawed Figures from fixture_constants"
return pipe(fixture_constants.flawed_figures, _first_elements)
class Strokes:
"methods that provide Strokes for testing"
@classmethod
def get_random(cls, count=None):
"return random valid Stroke[s]"
getter = lambda: random.choice(cls.valid())
return get_one_or_more(getter, count)
@staticmethod
def valid():
"return ordered tuple of valid Strokes"
return pipe(settings.valid_strokes, sorted, tuple)
@classmethod
def invalid(cls):
"return tuple of arbitrary single-char basestrings with 0 valid strokes"
chars = ArbitraryValues.single_character_basestrings()
not_valid = lambda value: value not in cls.valid()
limit_to_invalid = curry(filter, not_valid)
return pipe(chars, limit_to_invalid, tuple)
class Lines:
"methods that provide Lines for testing"
@staticmethod
def get_random(count=None):
"return random Lines[s] composed of Strokes"
return pipe(settings.strokes_per_line, Strokes.get_random, ''.join)
@staticmethod
def of_invalid_types():
"return arbitrary values that do not include any basestrings"
return ArbitraryValues.non_basestrings()
@classmethod
def of_invalid_lengths(cls):
"return Lines of invalid length"
return bad_length_duplicator(cls.get_random())
@classmethod
def with_invalid_strokes(cls):
"return tuple of Lines that each include an invalid stroke"
return pipe(Strokes.invalid(), cmap(cls._by_invalid_stroke), tuple)
@classmethod
def _by_invalid_stroke(cls, invalid_stroke):
"return a Line that includes an invalid stroke"
return adulterate_iterable(cls.get_random(), invalid_stroke)
class Entries:
"methods that provide Entries for testing"
@classmethod
def get_random(cls, count=None):
"return one or more random Entries"
getter = lambda: cls.from_account(Accounts.get_random())
return get_one_or_more(getter, count)
@classmethod
def from_account(cls, account):
"return the Entry (tuple of Lines) that represents the given Account"
return pipe(account, Figures.from_account, cls.from_figures)
@classmethod
def from_figures(cls, figures):
"return the Entry (tuple of Lines) that contains the given Figures"
lines = cmap(curry(cls._line_by_index, figures))
return pipe(settings.lines_per_entry, range, lines, tuple)
@classmethod
def _line_by_index(cls, figures, line_index):
"return a Line composed of Figures Substrings"
substrings = cmap(curry(cls._substring_by_index, line_index))
return pipe(figures, substrings, ''.join)
@staticmethod
def _substring_by_index(line_index, figure):
"return Substring of Figure by Entry Line index"
start_index = line_index * settings.strokes_per_substring
end_index = start_index + settings.strokes_per_substring
return figure[start_index:end_index]
@classmethod
def of_basic_input_file(cls):
"return Entries from basic input file"
return pipe(Accounts.of_basic_input_file(), cmap(cls.from_account), tuple)
class Superpositions:
"methods that provide Superpositions for testing"
@classmethod
def from_numeral(cls, numeral):
"return Superposition of Figure representing Numeral"
return cls.of_valid_figures()[Numerals.valid().index(numeral)]
@classmethod
def from_account(cls, account):
"return list of Superpositions from Figures in Account's Numerals"
return map(cls.from_numeral, account)
@staticmethod
def of_valid_figures():
"return tuple of Superpositions for all un-flawed Figures"
return copy.deepcopy(fixture_constants.valid_figure_superpositions)
@classmethod
def of_example_accounts(cls):
"return lists of Superpositions made from example accounts"
return pipe(fixture_constants.example_accounts, _first_elements, cmap(cls.from_account))
@staticmethod
def of_flawed_figures():
"return tuple of Superpositions of flawed figures"
return pipe(fixture_constants.flawed_figures, _second_elements, tuple)
@classmethod
def of_flawed_accounts(cls):
"return tuple of Superpositions of Accounts including flawed figures"
superpositions = lambda indexes: (cls._by_flawed_figure_index(i) for i in indexes)
return pipe(fixture_constants.flawed_accounts, len, range, superpositions, tuple)
@classmethod
def _by_flawed_figure_index(cls, flawed_figure_index):
"return Superpositions of an Account including a flawed figure"
flawed_account = fixture_constants.flawed_accounts[flawed_figure_index]
account_prefix, flawed_figure_index, account_suffix, _, _ = flawed_account
flawed_figure_superposition = cls.of_flawed_figures()[flawed_figure_index]
prefix_superpositions = cls.from_account(account_prefix)
suffix_superpositions = cls.from_account(account_suffix)
return prefix_superpositions + [flawed_figure_superposition] + suffix_superpositions
class Results:
"methods that provide Results for testing"
@staticmethod
def of_example_accounts():
"return Results from example accounts"
return pipe(fixture_constants.example_accounts, _third_elements, tuple)
@staticmethod
def of_basic_input_file():
"return Results from the basic input file"
return fixture_constants.BasicInputFile.results
@staticmethod
def of_advanced_input_file():
"return Results from the advanced input file"
return fixture_constants.AdvancedInputFile.results
@staticmethod
def of_flawed_accounts():
"return Results of Accounts including flawed figures"
return pipe(fixture_constants.flawed_accounts, _fifth_elements, tuple)
class ArbitraryValues:
"methods that provide arbitrary values for testing"
_all = (0, 1, -10, -999999999, 123456789, 3.14159, -.00000000001,
False, True, [], (), {}, '', None, object, int, list, dict, bool,
[1, 2, 3], {1: 2}, {0}, (1, 2, 3), {1: 'a', 2: 'b'},
'abc', '|', '-', '\r', 'foo', '1', '0', 'c', '=', '\t', '\r',
u'abc', u'|', u'-', u'\r', u'foo', u'1', u'0', u'c', u'=', u'\t', u'\r',
)
@classmethod
def non_iterables(cls):
"return a list of arbitrary values over which one cannot iterate"
not_iterable = lambda value: not cls._iterable(value)
return filter(not_iterable, cls._all)
@staticmethod
def _iterable(value):
"return True if value iterable"
try:
iterator = iter(value)
return True
except TypeError:
return False
@classmethod
def single_character_basestrings(cls):
"return list of arbitrary single character basestrings"
litmus = lambda value: len(value) == 1
return filter(litmus, cls.basestrings())
@classmethod
def basestrings(cls):
"return list of arbitrary basestrings"
litmus = lambda value: isinstance(value, basestring)
return filter(litmus, cls._all)
@classmethod
def non_basestrings(cls):
"return set of arbitrary values that includes no basestrings"
litmus = lambda value: not isinstance(value, basestring)
return filter(litmus, cls._all)
@classmethod
def of_different_type(cls, value_or_type):
"Return an arbitrary value not of value_or_type"
avoided_type = cls._type(value_or_type)
different_type = lambda value: not isinstance(value, avoided_type)
return filter(different_type, cls._all)
@staticmethod
def _type(value_or_type):
"return expected type"
if isinstance(value_or_type, type):
return value_or_type
else:
return type(value_or_type)
class Paths:
"methods that provide Paths for testing"
_input_files_parent_directory = os.path.dirname(fixture_constants.__file__)
_path_to_input_files = os.path.join(_input_files_parent_directory, 'input_files')
@classmethod
def basic_input_file(cls):
"return the path to the basic input file"
directory = cls._path_to_input_files
file_name = fixture_constants.BasicInputFile.file_name
return os.path.join(directory, file_name)
@classmethod
def advanced_input_file(cls):
"return the path to the advanced input file"
directory = cls._path_to_input_files
file_name = fixture_constants.AdvancedInputFile.file_name
return os.path.join(directory, file_name)
@classmethod
def parse(cls):
"return the path to the parse application"
directory = os.path.dirname(parse.__file__)
file_name = 'parse'
return os.path.join(directory, file_name) | PypiClean |
/Hatak_Toster-0.2.1.tar.gz/Hatak_Toster-0.2.1/src/haplugin/toster/cases.py | from mock import MagicMock
from hatak.unpackrequest import unpack
from toster import TestCase as BaseTestCase
class TestCase(BaseTestCase):
cache = {}
def setUp(self):
super().setUp()
self.request = MagicMock()
self.request.registry = {
'db': MagicMock(),
'unpacker': self.runner.application.unpacker,
'settings': {},
'paths': {},
}
unpack(self, self.request)
class ControllerPluginTests(TestCase):
def setUp(self):
super().setUp()
self.controller = MagicMock()
self.parent = MagicMock()
self.plugin = self.prefix_from(self.parent, self.controller)
class ModelTestCase(TestCase):
def setUp(self):
super().setUp()
self.model = self.prefix_from()
class FormTestCase(TestCase):
def setUp(self):
super().setUp()
self.add_mock('CsrfMustMatch', prefix='haplugin.formskit.models.')
self.form = self.prefix_from(self.request)
def _create_fake_post(self, data):
defaults = {
self.form.form_name_value: [self.form.get_name(), ]
}
defaults.update(data)
self.POST.dict_of_lists.return_value = defaults
class ControllerTestCase(TestCase):
def setUp(self):
super().setUp()
self.request.registry['controller_plugins'] = (
self.runner.application.controller_plugins)
self.root_tree = MagicMock()
self.controller = self.prefix_from(self.root_tree, self.request)
self.data = self.controller.data = {}
self.matchdict = self.controller.matchdict = {}
class SqlTestCase(TestCase):
groups = ('sql',)
def setUp(self):
super().setUp()
self.request.db = self.runner.get_db()
unpack(self, self.request)
class SqlControllerTestCase(ControllerTestCase):
groups = ('sql',)
def setUp(self):
super().setUp()
self.request.db = self.runner.get_db()
unpack(self, self.request)
unpack(self.controller, self.request)
self.matchdict = self.controller.matchdict = {}
class SqlFormTestCase(FormTestCase):
groups = ('sql',)
def setUp(self):
super().setUp()
self.request.db = self.runner.get_db()
unpack(self, self.request)
unpack(self.form, self.request)
class PluginTestCase(TestCase):
def setUp(self):
super().setUp()
self.plugin = self.prefix_from()
self.app = self.plugin.app = MagicMock()
self.config = self.app.config | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/mmdetection/mmdet/models/dense_heads/reppoints_head.py | import numpy as np
import torch
import torch.nn as nn
from mmcv.cnn import ConvModule
from mmcv.ops import DeformConv2d
from mmdet.core import (build_assigner, build_sampler, images_to_levels,
multi_apply, unmap)
from mmdet.core.anchor.point_generator import MlvlPointGenerator
from mmdet.core.utils import filter_scores_and_topk
from ..builder import HEADS, build_loss
from .anchor_free_head import AnchorFreeHead
@HEADS.register_module()
class RepPointsHead(AnchorFreeHead):
"""RepPoint head.
Args:
point_feat_channels (int): Number of channels of points features.
gradient_mul (float): The multiplier to gradients from
points refinement and recognition.
point_strides (Iterable): points strides.
point_base_scale (int): bbox scale for assigning labels.
loss_cls (dict): Config of classification loss.
loss_bbox_init (dict): Config of initial points loss.
loss_bbox_refine (dict): Config of points loss in refinement.
use_grid_points (bool): If we use bounding box representation, the
reppoints is represented as grid points on the bounding box.
center_init (bool): Whether to use center point assignment.
transform_method (str): The methods to transform RepPoints to bbox.
init_cfg (dict or list[dict], optional): Initialization config dict.
""" # noqa: W605
def __init__(self,
num_classes,
in_channels,
point_feat_channels=256,
num_points=9,
gradient_mul=0.1,
point_strides=[8, 16, 32, 64, 128],
point_base_scale=4,
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox_init=dict(
type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=0.5),
loss_bbox_refine=dict(
type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0),
use_grid_points=False,
center_init=True,
transform_method='moment',
moment_mul=0.01,
init_cfg=dict(
type='Normal',
layer='Conv2d',
std=0.01,
override=dict(
type='Normal',
name='reppoints_cls_out',
std=0.01,
bias_prob=0.01)),
**kwargs):
self.num_points = num_points
self.point_feat_channels = point_feat_channels
self.use_grid_points = use_grid_points
self.center_init = center_init
# we use deform conv to extract points features
self.dcn_kernel = int(np.sqrt(num_points))
self.dcn_pad = int((self.dcn_kernel - 1) / 2)
assert self.dcn_kernel * self.dcn_kernel == num_points, \
'The points number should be a square number.'
assert self.dcn_kernel % 2 == 1, \
'The points number should be an odd square number.'
dcn_base = np.arange(-self.dcn_pad,
self.dcn_pad + 1).astype(np.float64)
dcn_base_y = np.repeat(dcn_base, self.dcn_kernel)
dcn_base_x = np.tile(dcn_base, self.dcn_kernel)
dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape(
(-1))
self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1)
super().__init__(
num_classes,
in_channels,
loss_cls=loss_cls,
init_cfg=init_cfg,
**kwargs)
self.gradient_mul = gradient_mul
self.point_base_scale = point_base_scale
self.point_strides = point_strides
self.prior_generator = MlvlPointGenerator(
self.point_strides, offset=0.)
self.sampling = loss_cls['type'] not in ['FocalLoss']
if self.train_cfg:
self.init_assigner = build_assigner(self.train_cfg.init.assigner)
self.refine_assigner = build_assigner(
self.train_cfg.refine.assigner)
# use PseudoSampler when sampling is False
if self.sampling and hasattr(self.train_cfg, 'sampler'):
sampler_cfg = self.train_cfg.sampler
else:
sampler_cfg = dict(type='PseudoSampler')
self.sampler = build_sampler(sampler_cfg, context=self)
self.transform_method = transform_method
if self.transform_method == 'moment':
self.moment_transfer = nn.Parameter(
data=torch.zeros(2), requires_grad=True)
self.moment_mul = moment_mul
self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False)
if self.use_sigmoid_cls:
self.cls_out_channels = self.num_classes
else:
self.cls_out_channels = self.num_classes + 1
self.loss_bbox_init = build_loss(loss_bbox_init)
self.loss_bbox_refine = build_loss(loss_bbox_refine)
def _init_layers(self):
"""Initialize layers of the head."""
self.relu = nn.ReLU(inplace=True)
self.cls_convs = nn.ModuleList()
self.reg_convs = nn.ModuleList()
for i in range(self.stacked_convs):
chn = self.in_channels if i == 0 else self.feat_channels
self.cls_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
self.reg_convs.append(
ConvModule(
chn,
self.feat_channels,
3,
stride=1,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg))
pts_out_dim = 4 if self.use_grid_points else 2 * self.num_points
self.reppoints_cls_conv = DeformConv2d(self.feat_channels,
self.point_feat_channels,
self.dcn_kernel, 1,
self.dcn_pad)
self.reppoints_cls_out = nn.Conv2d(self.point_feat_channels,
self.cls_out_channels, 1, 1, 0)
self.reppoints_pts_init_conv = nn.Conv2d(self.feat_channels,
self.point_feat_channels, 3,
1, 1)
self.reppoints_pts_init_out = nn.Conv2d(self.point_feat_channels,
pts_out_dim, 1, 1, 0)
self.reppoints_pts_refine_conv = DeformConv2d(self.feat_channels,
self.point_feat_channels,
self.dcn_kernel, 1,
self.dcn_pad)
self.reppoints_pts_refine_out = nn.Conv2d(self.point_feat_channels,
pts_out_dim, 1, 1, 0)
def points2bbox(self, pts, y_first=True):
"""Converting the points set into bounding box.
:param pts: the input points sets (fields), each points
set (fields) is represented as 2n scalar.
:param y_first: if y_first=True, the point set is represented as
[y1, x1, y2, x2 ... yn, xn], otherwise the point set is
represented as [x1, y1, x2, y2 ... xn, yn].
:return: each points set is converting to a bbox [x1, y1, x2, y2].
"""
pts_reshape = pts.view(pts.shape[0], -1, 2, *pts.shape[2:])
pts_y = pts_reshape[:, :, 0, ...] if y_first else pts_reshape[:, :, 1,
...]
pts_x = pts_reshape[:, :, 1, ...] if y_first else pts_reshape[:, :, 0,
...]
if self.transform_method == 'minmax':
bbox_left = pts_x.min(dim=1, keepdim=True)[0]
bbox_right = pts_x.max(dim=1, keepdim=True)[0]
bbox_up = pts_y.min(dim=1, keepdim=True)[0]
bbox_bottom = pts_y.max(dim=1, keepdim=True)[0]
bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom],
dim=1)
elif self.transform_method == 'partial_minmax':
pts_y = pts_y[:, :4, ...]
pts_x = pts_x[:, :4, ...]
bbox_left = pts_x.min(dim=1, keepdim=True)[0]
bbox_right = pts_x.max(dim=1, keepdim=True)[0]
bbox_up = pts_y.min(dim=1, keepdim=True)[0]
bbox_bottom = pts_y.max(dim=1, keepdim=True)[0]
bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom],
dim=1)
elif self.transform_method == 'moment':
pts_y_mean = pts_y.mean(dim=1, keepdim=True)
pts_x_mean = pts_x.mean(dim=1, keepdim=True)
pts_y_std = torch.std(pts_y - pts_y_mean, dim=1, keepdim=True)
pts_x_std = torch.std(pts_x - pts_x_mean, dim=1, keepdim=True)
moment_transfer = (self.moment_transfer * self.moment_mul) + (
self.moment_transfer.detach() * (1 - self.moment_mul))
moment_width_transfer = moment_transfer[0]
moment_height_transfer = moment_transfer[1]
half_width = pts_x_std * torch.exp(moment_width_transfer)
half_height = pts_y_std * torch.exp(moment_height_transfer)
bbox = torch.cat([
pts_x_mean - half_width, pts_y_mean - half_height,
pts_x_mean + half_width, pts_y_mean + half_height
],
dim=1)
else:
raise NotImplementedError
return bbox
def gen_grid_from_reg(self, reg, previous_boxes):
"""Base on the previous bboxes and regression values, we compute the
regressed bboxes and generate the grids on the bboxes.
:param reg: the regression value to previous bboxes.
:param previous_boxes: previous bboxes.
:return: generate grids on the regressed bboxes.
"""
b, _, h, w = reg.shape
bxy = (previous_boxes[:, :2, ...] + previous_boxes[:, 2:, ...]) / 2.
bwh = (previous_boxes[:, 2:, ...] -
previous_boxes[:, :2, ...]).clamp(min=1e-6)
grid_topleft = bxy + bwh * reg[:, :2, ...] - 0.5 * bwh * torch.exp(
reg[:, 2:, ...])
grid_wh = bwh * torch.exp(reg[:, 2:, ...])
grid_left = grid_topleft[:, [0], ...]
grid_top = grid_topleft[:, [1], ...]
grid_width = grid_wh[:, [0], ...]
grid_height = grid_wh[:, [1], ...]
intervel = torch.linspace(0., 1., self.dcn_kernel).view(
1, self.dcn_kernel, 1, 1).type_as(reg)
grid_x = grid_left + grid_width * intervel
grid_x = grid_x.unsqueeze(1).repeat(1, self.dcn_kernel, 1, 1, 1)
grid_x = grid_x.view(b, -1, h, w)
grid_y = grid_top + grid_height * intervel
grid_y = grid_y.unsqueeze(2).repeat(1, 1, self.dcn_kernel, 1, 1)
grid_y = grid_y.view(b, -1, h, w)
grid_yx = torch.stack([grid_y, grid_x], dim=2)
grid_yx = grid_yx.view(b, -1, h, w)
regressed_bbox = torch.cat([
grid_left, grid_top, grid_left + grid_width, grid_top + grid_height
], 1)
return grid_yx, regressed_bbox
def forward(self, feats):
return multi_apply(self.forward_single, feats)
def forward_single(self, x):
"""Forward feature map of a single FPN level."""
dcn_base_offset = self.dcn_base_offset.type_as(x)
# If we use center_init, the initial reppoints is from center points.
# If we use bounding bbox representation, the initial reppoints is
# from regular grid placed on a pre-defined bbox.
if self.use_grid_points or not self.center_init:
scale = self.point_base_scale / 2
points_init = dcn_base_offset / dcn_base_offset.max() * scale
bbox_init = x.new_tensor([-scale, -scale, scale,
scale]).view(1, 4, 1, 1)
else:
points_init = 0
cls_feat = x
pts_feat = x
for cls_conv in self.cls_convs:
cls_feat = cls_conv(cls_feat)
for reg_conv in self.reg_convs:
pts_feat = reg_conv(pts_feat)
# initialize reppoints
pts_out_init = self.reppoints_pts_init_out(
self.relu(self.reppoints_pts_init_conv(pts_feat)))
if self.use_grid_points:
pts_out_init, bbox_out_init = self.gen_grid_from_reg(
pts_out_init, bbox_init.detach())
else:
pts_out_init = pts_out_init + points_init
# refine and classify reppoints
pts_out_init_grad_mul = (1 - self.gradient_mul) * pts_out_init.detach(
) + self.gradient_mul * pts_out_init
dcn_offset = pts_out_init_grad_mul - dcn_base_offset
cls_out = self.reppoints_cls_out(
self.relu(self.reppoints_cls_conv(cls_feat, dcn_offset)))
pts_out_refine = self.reppoints_pts_refine_out(
self.relu(self.reppoints_pts_refine_conv(pts_feat, dcn_offset)))
if self.use_grid_points:
pts_out_refine, bbox_out_refine = self.gen_grid_from_reg(
pts_out_refine, bbox_out_init.detach())
else:
pts_out_refine = pts_out_refine + pts_out_init.detach()
if self.training:
return cls_out, pts_out_init, pts_out_refine
else:
return cls_out, self.points2bbox(pts_out_refine)
def get_points(self, featmap_sizes, img_metas, device):
"""Get points according to feature map sizes.
Args:
featmap_sizes (list[tuple]): Multi-level feature map sizes.
img_metas (list[dict]): Image meta info.
Returns:
tuple: points of each image, valid flags of each image
"""
num_imgs = len(img_metas)
# since feature map sizes of all images are the same, we only compute
# points center for one time
multi_level_points = self.prior_generator.grid_priors(
featmap_sizes, device=device, with_stride=True)
points_list = [[point.clone() for point in multi_level_points]
for _ in range(num_imgs)]
# for each image, we compute valid flags of multi level grids
valid_flag_list = []
for img_id, img_meta in enumerate(img_metas):
multi_level_flags = self.prior_generator.valid_flags(
featmap_sizes, img_meta['pad_shape'])
valid_flag_list.append(multi_level_flags)
return points_list, valid_flag_list
def centers_to_bboxes(self, point_list):
"""Get bboxes according to center points.
Only used in :class:`MaxIoUAssigner`.
"""
bbox_list = []
for i_img, point in enumerate(point_list):
bbox = []
for i_lvl in range(len(self.point_strides)):
scale = self.point_base_scale * self.point_strides[i_lvl] * 0.5
bbox_shift = torch.Tensor([-scale, -scale, scale,
scale]).view(1, 4).type_as(point[0])
bbox_center = torch.cat(
[point[i_lvl][:, :2], point[i_lvl][:, :2]], dim=1)
bbox.append(bbox_center + bbox_shift)
bbox_list.append(bbox)
return bbox_list
def offset_to_pts(self, center_list, pred_list):
"""Change from point offset to point coordinate."""
pts_list = []
for i_lvl in range(len(self.point_strides)):
pts_lvl = []
for i_img in range(len(center_list)):
pts_center = center_list[i_img][i_lvl][:, :2].repeat(
1, self.num_points)
pts_shift = pred_list[i_lvl][i_img]
yx_pts_shift = pts_shift.permute(1, 2, 0).view(
-1, 2 * self.num_points)
y_pts_shift = yx_pts_shift[..., 0::2]
x_pts_shift = yx_pts_shift[..., 1::2]
xy_pts_shift = torch.stack([x_pts_shift, y_pts_shift], -1)
xy_pts_shift = xy_pts_shift.view(*yx_pts_shift.shape[:-1], -1)
pts = xy_pts_shift * self.point_strides[i_lvl] + pts_center
pts_lvl.append(pts)
pts_lvl = torch.stack(pts_lvl, 0)
pts_list.append(pts_lvl)
return pts_list
def _point_target_single(self,
flat_proposals,
valid_flags,
gt_bboxes,
gt_bboxes_ignore,
gt_labels,
stage='init',
unmap_outputs=True):
inside_flags = valid_flags
if not inside_flags.any():
return (None, ) * 7
# assign gt and sample proposals
proposals = flat_proposals[inside_flags, :]
if stage == 'init':
assigner = self.init_assigner
pos_weight = self.train_cfg.init.pos_weight
else:
assigner = self.refine_assigner
pos_weight = self.train_cfg.refine.pos_weight
assign_result = assigner.assign(proposals, gt_bboxes, gt_bboxes_ignore,
None if self.sampling else gt_labels)
sampling_result = self.sampler.sample(assign_result, proposals,
gt_bboxes)
num_valid_proposals = proposals.shape[0]
bbox_gt = proposals.new_zeros([num_valid_proposals, 4])
pos_proposals = torch.zeros_like(proposals)
proposals_weights = proposals.new_zeros([num_valid_proposals, 4])
labels = proposals.new_full((num_valid_proposals, ),
self.num_classes,
dtype=torch.long)
label_weights = proposals.new_zeros(
num_valid_proposals, dtype=torch.float)
pos_inds = sampling_result.pos_inds
neg_inds = sampling_result.neg_inds
if len(pos_inds) > 0:
pos_gt_bboxes = sampling_result.pos_gt_bboxes
bbox_gt[pos_inds, :] = pos_gt_bboxes
pos_proposals[pos_inds, :] = proposals[pos_inds, :]
proposals_weights[pos_inds, :] = 1.0
if gt_labels is None:
# Only rpn gives gt_labels as None
# Foreground is the first class
labels[pos_inds] = 0
else:
labels[pos_inds] = gt_labels[
sampling_result.pos_assigned_gt_inds]
if pos_weight <= 0:
label_weights[pos_inds] = 1.0
else:
label_weights[pos_inds] = pos_weight
if len(neg_inds) > 0:
label_weights[neg_inds] = 1.0
# map up to original set of proposals
if unmap_outputs:
num_total_proposals = flat_proposals.size(0)
labels = unmap(labels, num_total_proposals, inside_flags)
label_weights = unmap(label_weights, num_total_proposals,
inside_flags)
bbox_gt = unmap(bbox_gt, num_total_proposals, inside_flags)
pos_proposals = unmap(pos_proposals, num_total_proposals,
inside_flags)
proposals_weights = unmap(proposals_weights, num_total_proposals,
inside_flags)
return (labels, label_weights, bbox_gt, pos_proposals,
proposals_weights, pos_inds, neg_inds)
def get_targets(self,
proposals_list,
valid_flag_list,
gt_bboxes_list,
img_metas,
gt_bboxes_ignore_list=None,
gt_labels_list=None,
stage='init',
label_channels=1,
unmap_outputs=True):
"""Compute corresponding GT box and classification targets for
proposals.
Args:
proposals_list (list[list]): Multi level points/bboxes of each
image.
valid_flag_list (list[list]): Multi level valid flags of each
image.
gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image.
img_metas (list[dict]): Meta info of each image.
gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be
ignored.
gt_bboxes_list (list[Tensor]): Ground truth labels of each box.
stage (str): `init` or `refine`. Generate target for init stage or
refine stage
label_channels (int): Channel of label.
unmap_outputs (bool): Whether to map outputs back to the original
set of anchors.
Returns:
tuple:
- labels_list (list[Tensor]): Labels of each level.
- label_weights_list (list[Tensor]): Label weights of each level. # noqa: E501
- bbox_gt_list (list[Tensor]): Ground truth bbox of each level.
- proposal_list (list[Tensor]): Proposals(points/bboxes) of each level. # noqa: E501
- proposal_weights_list (list[Tensor]): Proposal weights of each level. # noqa: E501
- num_total_pos (int): Number of positive samples in all images. # noqa: E501
- num_total_neg (int): Number of negative samples in all images. # noqa: E501
"""
assert stage in ['init', 'refine']
num_imgs = len(img_metas)
assert len(proposals_list) == len(valid_flag_list) == num_imgs
# points number of multi levels
num_level_proposals = [points.size(0) for points in proposals_list[0]]
# concat all level points and flags to a single tensor
for i in range(num_imgs):
assert len(proposals_list[i]) == len(valid_flag_list[i])
proposals_list[i] = torch.cat(proposals_list[i])
valid_flag_list[i] = torch.cat(valid_flag_list[i])
# compute targets for each image
if gt_bboxes_ignore_list is None:
gt_bboxes_ignore_list = [None for _ in range(num_imgs)]
if gt_labels_list is None:
gt_labels_list = [None for _ in range(num_imgs)]
(all_labels, all_label_weights, all_bbox_gt, all_proposals,
all_proposal_weights, pos_inds_list, neg_inds_list) = multi_apply(
self._point_target_single,
proposals_list,
valid_flag_list,
gt_bboxes_list,
gt_bboxes_ignore_list,
gt_labels_list,
stage=stage,
unmap_outputs=unmap_outputs)
# no valid points
if any([labels is None for labels in all_labels]):
return None
# sampled points of all images
num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list])
num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list])
labels_list = images_to_levels(all_labels, num_level_proposals)
label_weights_list = images_to_levels(all_label_weights,
num_level_proposals)
bbox_gt_list = images_to_levels(all_bbox_gt, num_level_proposals)
proposals_list = images_to_levels(all_proposals, num_level_proposals)
proposal_weights_list = images_to_levels(all_proposal_weights,
num_level_proposals)
return (labels_list, label_weights_list, bbox_gt_list, proposals_list,
proposal_weights_list, num_total_pos, num_total_neg)
def loss_single(self, cls_score, pts_pred_init, pts_pred_refine, labels,
label_weights, bbox_gt_init, bbox_weights_init,
bbox_gt_refine, bbox_weights_refine, stride,
num_total_samples_init, num_total_samples_refine):
# classification loss
labels = labels.reshape(-1)
label_weights = label_weights.reshape(-1)
cls_score = cls_score.permute(0, 2, 3,
1).reshape(-1, self.cls_out_channels)
cls_score = cls_score.contiguous()
loss_cls = self.loss_cls(
cls_score,
labels,
label_weights,
avg_factor=num_total_samples_refine)
# points loss
bbox_gt_init = bbox_gt_init.reshape(-1, 4)
bbox_weights_init = bbox_weights_init.reshape(-1, 4)
bbox_pred_init = self.points2bbox(
pts_pred_init.reshape(-1, 2 * self.num_points), y_first=False)
bbox_gt_refine = bbox_gt_refine.reshape(-1, 4)
bbox_weights_refine = bbox_weights_refine.reshape(-1, 4)
bbox_pred_refine = self.points2bbox(
pts_pred_refine.reshape(-1, 2 * self.num_points), y_first=False)
normalize_term = self.point_base_scale * stride
loss_pts_init = self.loss_bbox_init(
bbox_pred_init / normalize_term,
bbox_gt_init / normalize_term,
bbox_weights_init,
avg_factor=num_total_samples_init)
loss_pts_refine = self.loss_bbox_refine(
bbox_pred_refine / normalize_term,
bbox_gt_refine / normalize_term,
bbox_weights_refine,
avg_factor=num_total_samples_refine)
return loss_cls, loss_pts_init, loss_pts_refine
def loss(self,
cls_scores,
pts_preds_init,
pts_preds_refine,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]
device = cls_scores[0].device
label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1
# target for initial stage
center_list, valid_flag_list = self.get_points(featmap_sizes,
img_metas, device)
pts_coordinate_preds_init = self.offset_to_pts(center_list,
pts_preds_init)
if self.train_cfg.init.assigner['type'] == 'PointAssigner':
# Assign target for center list
candidate_list = center_list
else:
# transform center list to bbox list and
# assign target for bbox list
bbox_list = self.centers_to_bboxes(center_list)
candidate_list = bbox_list
cls_reg_targets_init = self.get_targets(
candidate_list,
valid_flag_list,
gt_bboxes,
img_metas,
gt_bboxes_ignore_list=gt_bboxes_ignore,
gt_labels_list=gt_labels,
stage='init',
label_channels=label_channels)
(*_, bbox_gt_list_init, candidate_list_init, bbox_weights_list_init,
num_total_pos_init, num_total_neg_init) = cls_reg_targets_init
num_total_samples_init = (
num_total_pos_init +
num_total_neg_init if self.sampling else num_total_pos_init)
# target for refinement stage
center_list, valid_flag_list = self.get_points(featmap_sizes,
img_metas, device)
pts_coordinate_preds_refine = self.offset_to_pts(
center_list, pts_preds_refine)
bbox_list = []
for i_img, center in enumerate(center_list):
bbox = []
for i_lvl in range(len(pts_preds_refine)):
bbox_preds_init = self.points2bbox(
pts_preds_init[i_lvl].detach())
bbox_shift = bbox_preds_init * self.point_strides[i_lvl]
bbox_center = torch.cat(
[center[i_lvl][:, :2], center[i_lvl][:, :2]], dim=1)
bbox.append(bbox_center +
bbox_shift[i_img].permute(1, 2, 0).reshape(-1, 4))
bbox_list.append(bbox)
cls_reg_targets_refine = self.get_targets(
bbox_list,
valid_flag_list,
gt_bboxes,
img_metas,
gt_bboxes_ignore_list=gt_bboxes_ignore,
gt_labels_list=gt_labels,
stage='refine',
label_channels=label_channels)
(labels_list, label_weights_list, bbox_gt_list_refine,
candidate_list_refine, bbox_weights_list_refine, num_total_pos_refine,
num_total_neg_refine) = cls_reg_targets_refine
num_total_samples_refine = (
num_total_pos_refine +
num_total_neg_refine if self.sampling else num_total_pos_refine)
# compute loss
losses_cls, losses_pts_init, losses_pts_refine = multi_apply(
self.loss_single,
cls_scores,
pts_coordinate_preds_init,
pts_coordinate_preds_refine,
labels_list,
label_weights_list,
bbox_gt_list_init,
bbox_weights_list_init,
bbox_gt_list_refine,
bbox_weights_list_refine,
self.point_strides,
num_total_samples_init=num_total_samples_init,
num_total_samples_refine=num_total_samples_refine)
loss_dict_all = {
'loss_cls': losses_cls,
'loss_pts_init': losses_pts_init,
'loss_pts_refine': losses_pts_refine
}
return loss_dict_all
# Same as base_dense_head/_get_bboxes_single except self._bbox_decode
def _get_bboxes_single(self,
cls_score_list,
bbox_pred_list,
score_factor_list,
mlvl_priors,
img_meta,
cfg,
rescale=False,
with_nms=True,
**kwargs):
"""Transform outputs of a single image into bbox predictions.
Args:
cls_score_list (list[Tensor]): Box scores from all scale
levels of a single image, each item has shape
(num_priors * num_classes, H, W).
bbox_pred_list (list[Tensor]): Box energies / deltas from
all scale levels of a single image, each item has shape
(num_priors * 4, H, W).
score_factor_list (list[Tensor]): Score factor from all scale
levels of a single image. RepPoints head does not need
this value.
mlvl_priors (list[Tensor]): Each element in the list is
the priors of a single level in feature pyramid, has shape
(num_priors, 2).
img_meta (dict): Image meta info.
cfg (mmcv.Config): Test / postprocessing configuration,
if None, test_cfg would be used.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
Returns:
tuple[Tensor]: Results of detected bboxes and labels. If with_nms
is False and mlvl_score_factor is None, return mlvl_bboxes and
mlvl_scores, else return mlvl_bboxes, mlvl_scores and
mlvl_score_factor. Usually with_nms is False is used for aug
test. If with_nms is True, then return the following format
- det_bboxes (Tensor): Predicted bboxes with shape \
[num_bboxes, 5], where the first 4 columns are bounding \
box positions (tl_x, tl_y, br_x, br_y) and the 5-th \
column are scores between 0 and 1.
- det_labels (Tensor): Predicted labels of the corresponding \
box with shape [num_bboxes].
"""
cfg = self.test_cfg if cfg is None else cfg
assert len(cls_score_list) == len(bbox_pred_list)
img_shape = img_meta['img_shape']
nms_pre = cfg.get('nms_pre', -1)
mlvl_bboxes = []
mlvl_scores = []
mlvl_labels = []
for level_idx, (cls_score, bbox_pred, priors) in enumerate(
zip(cls_score_list, bbox_pred_list, mlvl_priors)):
assert cls_score.size()[-2:] == bbox_pred.size()[-2:]
bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4)
cls_score = cls_score.permute(1, 2,
0).reshape(-1, self.cls_out_channels)
if self.use_sigmoid_cls:
scores = cls_score.sigmoid()
else:
scores = cls_score.softmax(-1)[:, :-1]
# After https://github.com/open-mmlab/mmdetection/pull/6268/,
# this operation keeps fewer bboxes under the same `nms_pre`.
# There is no difference in performance for most models. If you
# find a slight drop in performance, you can set a larger
# `nms_pre` than before.
results = filter_scores_and_topk(
scores, cfg.score_thr, nms_pre,
dict(bbox_pred=bbox_pred, priors=priors))
scores, labels, _, filtered_results = results
bbox_pred = filtered_results['bbox_pred']
priors = filtered_results['priors']
bboxes = self._bbox_decode(priors, bbox_pred,
self.point_strides[level_idx],
img_shape)
mlvl_bboxes.append(bboxes)
mlvl_scores.append(scores)
mlvl_labels.append(labels)
return self._bbox_post_process(
mlvl_scores,
mlvl_labels,
mlvl_bboxes,
img_meta['scale_factor'],
cfg,
rescale=rescale,
with_nms=with_nms)
def _bbox_decode(self, points, bbox_pred, stride, max_shape):
bbox_pos_center = torch.cat([points[:, :2], points[:, :2]], dim=1)
bboxes = bbox_pred * stride + bbox_pos_center
x1 = bboxes[:, 0].clamp(min=0, max=max_shape[1])
y1 = bboxes[:, 1].clamp(min=0, max=max_shape[0])
x2 = bboxes[:, 2].clamp(min=0, max=max_shape[1])
y2 = bboxes[:, 3].clamp(min=0, max=max_shape[0])
decoded_bboxes = torch.stack([x1, y1, x2, y2], dim=-1)
return decoded_bboxes | PypiClean |
/FITS_tools-0.2.tar.gz/FITS_tools-0.2/astropy_helpers/ez_setup.py | import os
import shutil
import sys
import tempfile
import zipfile
import optparse
import subprocess
import platform
import textwrap
import contextlib
from distutils import log
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
# 33.1.1 is the last version that supports setuptools self upgrade/installation.
DEFAULT_VERSION = "33.1.1"
DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/"
DEFAULT_SAVE_DIR = os.curdir
DEFAULT_DEPRECATION_MESSAGE = "ez_setup.py is deprecated and when using it setuptools will be pinned to {0} since it's the last version that supports setuptools self upgrade/installation, check https://github.com/pypa/setuptools/issues/581 for more info; use pip to install setuptools"
MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.'
log.warn(DEFAULT_DEPRECATION_MESSAGE.format(DEFAULT_VERSION))
def _python_cmd(*args):
"""
Execute a command.
Return True if the command succeeded.
"""
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _install(archive_filename, install_args=()):
"""Install Setuptools."""
with archive_context(archive_filename):
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
def _build_egg(egg, archive_filename, to_dir):
"""Build Setuptools egg."""
with archive_context(archive_filename):
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
class ContextualZipFile(zipfile.ZipFile):
"""Supplement ZipFile class to support context manager for Python 2.6."""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""Construct a ZipFile or ContextualZipFile as appropriate."""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
@contextlib.contextmanager
def archive_context(filename):
"""
Unzip filename to a temporary directory, set to the cwd.
The unzipped target is cleaned up after.
"""
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
try:
with ContextualZipFile(filename) as archive:
archive.extractall()
except zipfile.BadZipfile as err:
if not err.args:
err.args = ('', )
err.args = err.args + (
MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename),
)
raise
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
yield
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _do_download(version, download_base, to_dir, download_delay):
"""Download Setuptools."""
py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys)
tp = 'setuptools-{version}-{py_desig}.egg'
egg = os.path.join(to_dir, tp.format(**locals()))
if not os.path.exists(egg):
archive = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, archive, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
_unload_pkg_resources()
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=DEFAULT_SAVE_DIR, download_delay=15):
"""
Ensure that a setuptools version is installed.
Return None. Raise SystemExit if the requested version
or later cannot be installed.
"""
to_dir = os.path.abspath(to_dir)
# prior to importing, capture the module state for
# representative modules.
rep_modules = 'pkg_resources', 'setuptools'
imported = set(sys.modules).intersection(rep_modules)
try:
import pkg_resources
pkg_resources.require("setuptools>=" + version)
# a suitable version is already installed
return
except ImportError:
# pkg_resources not available; setuptools is not installed; download
pass
except pkg_resources.DistributionNotFound:
# no version of setuptools was found; allow download
pass
except pkg_resources.VersionConflict as VC_err:
if imported:
_conflict_bail(VC_err, version)
# otherwise, unload pkg_resources to allow the downloaded version to
# take precedence.
del pkg_resources
_unload_pkg_resources()
return _do_download(version, download_base, to_dir, download_delay)
def _conflict_bail(VC_err, version):
"""
Setuptools was imported prior to invocation, so it is
unsafe to unload it. Bail out.
"""
conflict_tmpl = textwrap.dedent("""
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""")
msg = conflict_tmpl.format(**locals())
sys.stderr.write(msg)
sys.exit(2)
def _unload_pkg_resources():
sys.meta_path = [
importer
for importer in sys.meta_path
if importer.__class__.__module__ != 'pkg_resources.extern'
]
del_modules = [
name for name in sys.modules
if name.startswith('pkg_resources')
]
for mod_name in del_modules:
del sys.modules[mod_name]
def _clean_check(cmd, target):
"""
Run the command to download target.
If the command fails, clean up before re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell.
Powershell will validate trust.
Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
ps_cmd = (
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
"[System.Net.CredentialCache]::DefaultCredentials; "
'(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")'
% locals()
)
cmd = [
'powershell',
'-Command',
ps_cmd,
]
_clean_check(cmd, target)
def has_powershell():
"""Determine if Powershell is available."""
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--location', '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""Use Python to download the file, without connection authentication."""
src = urlopen(url)
try:
# Read all the data in one block.
data = src.read()
finally:
src.close()
# Write all the data in one block to avoid creating a partial file.
with open(target, "wb") as dst:
dst.write(data)
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = (
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
)
viable_downloaders = (dl for dl in downloaders if dl.viable())
return next(viable_downloaders, None)
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=DEFAULT_SAVE_DIR, delay=15,
downloader_factory=get_best_downloader):
"""
Download setuptools from a specified location and return its filename.
`version` should be a valid setuptools version number that is available
as an sdist for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
zip_name = "setuptools-%s.zip" % version
url = download_base + zip_name
saveto = os.path.join(to_dir, zip_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package.
Returns list of command line arguments.
"""
return ['--user'] if options.user_install else []
def _parse_args():
"""Parse the command line for options."""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
parser.add_option(
'--version', help="Specify which version to download",
default=DEFAULT_VERSION,
)
parser.add_option(
'--to-dir',
help="Directory to save (and re-use) package",
default=DEFAULT_SAVE_DIR,
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def _download_args(options):
"""Return args for download_setuptools function from cmdline args."""
return dict(
version=options.version,
download_base=options.download_base,
downloader_factory=options.downloader_factory,
to_dir=options.to_dir,
)
def main():
"""Install or upgrade setuptools and EasyInstall."""
options = _parse_args()
archive = download_setuptools(**_download_args(options))
return _install(archive, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main()) | PypiClean |
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/command/run.py | import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import time
import logging
import logging.config
import argparse
from datetime import datetime
from apidoc import __version__
from apidoc.lib.fswatcher.observer import Observer
from apidoc.lib.fswatcher.callbackHandler import CallbackHandler
from apidoc.lib.util.serialize import json_repr
from apidoc.factory.config import Config as ConfigFactory
from apidoc.factory.source import Source as SourceFactory
from apidoc.factory.template import Template as TemplateFactory
from apidoc.object.config import Config as ConfigObject
from apidoc.service.source import Source as SourceService
from apidoc.service.config import Config as ConfigService
from apidoc.service.parser import Parser as FileParser
class ApiDoc():
"""Base command-line interface for ApiDoc
"""
dry_run = False
watch = False
traceback = False
config = None
def __init__(self):
"""Initialyze the command
"""
self.parser = argparse.ArgumentParser(description=self.__doc__)
self.parser.add_argument(
"-c", "--config", type=str, metavar="CONFIG",
help="configuration file"
)
self.parser.add_argument(
"-i", "--input", nargs='+', type=str, metavar="DIRECTORY OR FILE",
help="directories and/or files containing documentation\'s source files"
)
self.parser.add_argument(
"-o", "--output", type=str, metavar="FILE",
help="rendered output file"
)
self.parser.add_argument(
"-v", "--version", action='version', version='%(prog)s ' + __version__
)
self.parser.add_argument(
"-n", "--no-validate", action='store_const', const=True,
help="disable validation"
)
self.parser.add_argument(
"-a", "--arguments", nargs='+', type=str, metavar="ARGUMENT",
help="documentation\'s arguments arg1=value1 arg2=value2"
)
self.parser.add_argument(
"-y", "--dry-run", action='store_const', const=True,
help="analyse config's and source's files without building the documentation"
)
self.parser.add_argument(
"-w", "--watch", action='store_const', const=True,
help="re-render the documentation each time a source's file or a template's file changes"
)
self.parser.add_argument(
"-q", "--quiet", action='store_const', const=True,
help="does not display logging information below warning level"
)
self.parser.add_argument(
"-qq", "--silence", action='store_const', const=True,
help="does not display any logging information"
)
self.parser.add_argument(
"-t", "--traceback", action='store_const', const=True,
help="display traceback when an exception raised"
)
self.logger = logging.getLogger()
def _init_config(self):
"""return command's configuration from call's arguments
"""
options = self.parser.parse_args()
if options.config is None and options.input is None:
self.parser.print_help()
sys.exit(2)
if options.config is not None:
configFactory = ConfigFactory()
config = configFactory.load_from_file(options.config)
else:
config = ConfigObject()
if options.input is not None:
config["input"]["locations"] = [str(x) for x in options.input]
if options.arguments is not None:
config["input"]["arguments"] = dict((x.partition("=")[0], x.partition("=")[2]) for x in options.arguments)
if options.output is not None:
config["output"]["location"] = options.output
if options.no_validate is not None:
config["input"]["validate"] = not options.no_validate
if options.dry_run is not None:
self.dry_run = options.dry_run
if options.watch is not None:
self.watch = options.watch
if options.traceback is not None:
self.traceback = options.traceback
if options.quiet is not None:
self.logger.setLevel(logging.WARNING)
if options.silence is not None:
logging.disable(logging.CRITICAL)
configService = ConfigService()
configService.validate(config)
self.config = config
"""Build documentation from sources each time a source or template files is modified
"""
def main(self):
"""Run the command
"""
self._init_config()
if self.dry_run:
return self.run_dry_run()
elif self.watch:
return self.run_watch()
else:
return self.run_render()
def _get_sources(self):
now = datetime.now()
try:
sourceService = SourceService()
sourceFactory = SourceFactory()
sources = sourceFactory.create_from_config(self.config)
sourceService.validate(sources)
self.logger.debug("Parse sources in %s." % (datetime.now() - now))
except:
if self.traceback:
self.logger.exception("Failed to parse sources")
else:
self.logger.error("Failed to parse sources")
raise
return sources
def _render_template(self, sources):
now = datetime.now()
try:
templateFactory = TemplateFactory()
template = templateFactory.create_from_config(self.config)
template.render(sources, self.config)
self.logger.debug("Render template in %s." % (datetime.now() - now))
except:
if self.traceback:
self.logger.exception("Failed to render template")
else:
self.logger.error("Failed to render template")
raise
def run_dry_run(self):
try:
sources = self._get_sources()
except:
pass
print(json_repr(sources))
def run_render(self):
try:
sources = self._get_sources()
self._render_template(sources)
except:
pass
def run_watch(self):
configService = ConfigService()
self.logger.info("Start watching")
self._watch_refresh_source(None)
observer = Observer()
template_handler = CallbackHandler(self._watch_refresh_template)
source_handler = CallbackHandler(self._watch_refresh_source)
template_path = os.path.dirname(configService.get_template_from_config(self.config))
observer.add_handler(template_path, template_handler)
if (self.config["input"]["locations"] is not None):
for location in self.config["input"]["locations"]:
observer.add_handler(location, source_handler)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
def _watch_refresh_source(self, event):
"""Refresh sources then templates
"""
self.logger.info("Sources changed...")
try:
self.sources = self._get_sources()
self._render_template(self.sources)
except:
pass
def _watch_refresh_template(self, event):
"""Refresh template's contents
"""
self.logger.info("Template changed...")
try:
self._render_template(self.sources)
except:
pass
def main():
"""Main function to run command
"""
configParser = FileParser()
logging.config.dictConfig(
configParser.load_from_file(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'settings', 'logging.yml'))
)
ApiDoc().main()
if __name__ == '__main__':
main() | PypiClean |
/Deliverance-0.6.1.tar.gz/Deliverance-0.6.1/deliverance/editor/media/editarea/edit_area/reg_syntax/coldfusion.js | editAreaLoader.load_syntax["coldfusion"] = {
'COMMENT_SINGLE' : {1 : '//', 2 : '#'}
,'COMMENT_MULTI' : {'<!--' : '-->'}
,'COMMENT_MULTI2' : {'<!---' : '--->'}
,'QUOTEMARKS' : {1: "'", 2: '"'}
,'KEYWORD_CASE_SENSITIVE' : false
,'KEYWORDS' : {
'statements' : [
'include', 'require', 'include_once', 'require_once',
'for', 'foreach', 'as', 'if', 'elseif', 'else', 'while', 'do', 'endwhile',
'endif', 'switch', 'case', 'endswitch',
'return', 'break', 'continue'
]
,'reserved' : [
'AND', 'break', 'case', 'CONTAIN', 'CONTAINS', 'continue', 'default', 'do',
'DOES', 'else', 'EQ', 'EQUAL', 'EQUALTO', 'EQV', 'FALSE', 'for', 'GE',
'GREATER', 'GT', 'GTE', 'if', 'IMP', 'in', 'IS', 'LE', 'LESS', 'LT', 'LTE',
'MOD', 'NEQ', 'NOT', 'OR', 'return', 'switch', 'THAN', 'TO', 'TRUE', 'var',
'while', 'XOR'
]
,'functions' : [
'Abs', 'ACos', 'ArrayAppend', 'ArrayAvg', 'ArrayClear', 'ArrayDeleteAt', 'ArrayInsertAt',
'ArrayIsEmpty', 'ArrayLen', 'ArrayMax', 'ArrayMin', 'ArrayNew', 'ArrayPrepend', 'ArrayResize',
'ArraySet', 'ArraySort', 'ArraySum', 'ArraySwap', 'ArrayToList', 'Asc', 'ASin', 'Atn', 'AuthenticatedContext',
'AuthenticatedUser', 'BitAnd', 'BitMaskClear', 'BitMaskRead', 'BitMaskSet', 'BitNot', 'BitOr',
'BitSHLN', 'BitSHRN', 'BitXor', 'Ceiling', 'Chr', 'CJustify', 'Compare', 'CompareNoCase', 'Cos',
'CreateDate', 'CreateDateTime', 'CreateODBCDate', 'CreateODBCDateTime', 'CreateODBCTime',
'CreateTime', 'CreateTimeSpan', 'DateAdd', 'DateCompare', 'DateConvert', 'DateDiff',
'DateFormat', 'DatePart', 'Day', 'DayOfWeek', 'DayOfWeekAsString', 'DayOfYear', 'DaysInMonth',
'DaysInYear', 'DE', 'DecimalFormat', 'DecrementValue', 'Decrypt', 'DeleteClientVariable',
'DirectoryExists', 'DollarFormat', 'Duplicate', 'Encrypt', 'Evaluate', 'Exp', 'ExpandPath',
'FileExists', 'Find', 'FindNoCase', 'FindOneOf', 'FirstDayOfMonth', 'Fix', 'FormatBaseN',
'GetBaseTagData', 'GetBaseTagList', 'GetBaseTemplatePath', 'GetClientVariablesList',
'GetCurrentTemplatePath', 'GetDirectoryFromPath', 'GetException', 'GetFileFromPath',
'GetFunctionList', 'GetHttpTimeString', 'GetHttpRequestData', 'GetLocale', 'GetMetricData',
'GetProfileString', 'GetTempDirectory', 'GetTempFile', 'GetTemplatePath', 'GetTickCount',
'GetTimeZoneInfo', 'GetToken', 'Hash', 'Hour', 'HTMLCodeFormat', 'HTMLEditFormat', 'IIf',
'IncrementValue', 'InputBaseN', 'Insert', 'Int', 'IsArray', 'IsAuthenticated', 'IsAuthorized',
'IsBoolean', 'IsBinary', 'IsCustomFunction', 'IsDate', 'IsDebugMode', 'IsDefined', 'IsLeapYear',
'IsNumeric', 'IsNumericDate', 'IsProtected', 'IsQuery', 'IsSimpleValue', 'IsStruct', 'IsWDDX',
'JavaCast', 'JSStringFormat', 'LCase', 'Left', 'Len', 'ListAppend', 'ListChangeDelims',
'ListContains', 'ListContainsNoCase', 'ListDeleteAt', 'ListFind', 'ListFindNoCase', 'ListFirst',
'ListGetAt', 'ListInsertAt', 'ListLast', 'ListLen', 'ListPrepend', 'ListQualify', 'ListRest',
'ListSetAt', 'ListSort', 'ListToArray', 'ListValueCount', 'ListValueCountNoCase', 'LJustify',
'Log', 'Log10', 'LSCurrencyFormat', 'LSDateFormat', 'LSEuroCurrencyFormat', 'LSIsCurrency',
'LSIsDate', 'LSIsNumeric', 'LSNumberFormat', 'LSParseCurrency', 'LSParseDateTime', 'LSParseNumber',
'LSTimeFormat', 'LTrim', 'Max', 'Mid', 'Min', 'Minute', 'Month', 'MonthAsString', 'Now', 'NumberFormat',
'ParagraphFormat', 'ParameterExists', 'ParseDateTime', 'Pi', 'PreserveSingleQuotes', 'Quarter',
'QueryAddRow', 'QueryNew', 'QuerySetCell', 'QuotedValueList', 'Rand', 'Randomize', 'RandRange',
'REFind', 'REFindNoCase', 'RemoveChars', 'RepeatString', 'Replace', 'ReplaceList', 'ReplaceNoCase',
'REReplace', 'REReplaceNoCase', 'Reverse', 'Right', 'RJustify', 'Round', 'RTrim', 'Second', 'SetLocale',
'SetProfileString', 'SetVariable', 'Sgn', 'Sin', 'SpanExcluding', 'SpanIncluding', 'Sqr', 'StripCR',
'StructAppend', 'StructClear', 'StructCopy', 'StructCount', 'StructDelete', 'StructFind', 'StructFindKey',
'StructFindValue', 'StructGet', 'StructInsert', 'StructIsEmpty', 'StructKeyArray', 'StructKeyExists',
'StructKeyList', 'StructNew', 'StructSort', 'StructUpdate', 'Tan', 'TimeFormat', 'ToBase64', 'ToBinary',
'ToString', 'Trim', 'UCase', 'URLDecode', 'URLEncodedFormat', 'Val', 'ValueList', 'Week', 'WriteOutput',
'XMLFormat', 'Year', 'YesNoFormat'
]
}
,'OPERATORS' :[
'+', '-', '/', '*', '%', '!', '&&', '||'
]
,'DELIMITERS' :[
'(', ')', '[', ']', '{', '}'
]
,'REGEXPS' : {
'doctype' : {
'search' : '()(<!DOCTYPE[^>]*>)()'
,'class' : 'doctype'
,'modifiers' : ''
,'execute' : 'before' // before or after
}
,'cftags' : {
'search' : '(<)(/cf[a-z][^ \r\n\t>]*)([^>]*>)'
,'class' : 'cftags'
,'modifiers' : 'gi'
,'execute' : 'before' // before or after
}
,'cftags2' : {
'search' : '(<)(cf[a-z][^ \r\n\t>]*)([^>]*>)'
,'class' : 'cftags2'
,'modifiers' : 'gi'
,'execute' : 'before' // before or after
}
,'tags' : {
'search' : '(<)(/?[a-z][^ \r\n\t>]*)([^>]*>)'
,'class' : 'tags'
,'modifiers' : 'gi'
,'execute' : 'before' // before or after
}
,'attributes' : {
'search' : '( |\n|\r|\t)([^ \r\n\t=]+)(=)'
,'class' : 'attributes'
,'modifiers' : 'g'
,'execute' : 'before' // before or after
}
}
,'STYLES' : {
'COMMENTS': 'color: #AAAAAA;'
,'QUOTESMARKS': 'color: #6381F8;'
,'KEYWORDS' : {
'reserved' : 'color: #48BDDF;'
,'functions' : 'color: #0000FF;'
,'statements' : 'color: #60CA00;'
}
,'OPERATORS' : 'color: #E775F0;'
,'DELIMITERS' : ''
,'REGEXPS' : {
'attributes': 'color: #990033;'
,'cftags': 'color: #990033;'
,'cftags2': 'color: #990033;'
,'tags': 'color: #000099;'
,'doctype': 'color: #8DCFB5;'
,'test': 'color: #00FF00;'
}
}
}; | PypiClean |
/MegEngine-1.13.1-cp37-cp37m-macosx_10_14_x86_64.whl/megengine/module/qat/module.py | from abc import abstractmethod
# avoid circular reference
from ...quantization.fake_quant import FakeQuantize
from ...quantization.observer import Observer
from ...quantization.qconfig import QConfig
from ...quantization.utils import fake_quant_bias
from ...tensor import Tensor
from ..module import Module
class QATModule(Module):
r"""Base class of quantized-float related :class:`~.Module`, basically for QAT and Calibration.
Use :meth:`from_float_module` to generate a instance from float :class:`~.Module`.
Or use :func:`~.quantize.quantize_qat` to do it recursively and automatically.
Can also be converted to :class:`~.QuantizedModule` for deployment using
:func:`~.quantize.quantize` further.
"""
with_weight = True
with_act = True
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.weight_observer = None # type: Observer
self.act_observer = None # type: Observer
self.weight_fake_quant = None # type: FakeQuantize
self.act_fake_quant = None # type: FakeQuantize
def __repr__(self):
return "QAT." + super().__repr__()
def set_qconfig(self, qconfig: QConfig):
r"""Set quantization related configs with ``qconfig``, including
observer and fake_quant for weight and activation.
"""
def safe_call(func):
return func() if func is not None else None
if self.with_act:
self.act_observer = safe_call(qconfig.act_observer)
self.act_fake_quant = safe_call(qconfig.act_fake_quant)
if self.with_weight:
self.weight_observer = safe_call(qconfig.weight_observer)
self.weight_fake_quant = safe_call(qconfig.weight_fake_quant)
def _enable_exec(self, with_module, func, enable):
if not with_module or not func:
return
if enable:
func.enable()
else:
func.disable()
def set_fake_quant(self, enable):
self._enable_exec(self.with_act, self.act_fake_quant, enable)
self._enable_exec(self.with_weight, self.weight_fake_quant, enable)
def set_observer(self, enable):
self._enable_exec(self.with_act, self.act_observer, enable)
self._enable_exec(self.with_weight, self.weight_observer, enable)
def _apply_fakequant_with_observer(
self, target: Tensor, fake_quant: FakeQuantize, observer: Observer
):
# do observer
if observer is None:
oup = target
qparams = None
else:
oup = observer(target)
qparams = observer.get_qparams()
# do fake quant
if fake_quant is not None:
oup = fake_quant(oup, qparams)
# use qparams of fake_quant if have.
if hasattr(fake_quant, "get_qparams"):
qparams = fake_quant.get_qparams()
# set to tensor qparams.
if qparams is not None:
oup.qparams.update(qparams)
return oup
def apply_quant_weight(self, target: Tensor):
r"""Apply weight's observer and fake_quant from ``qconfig`` on ``target``."""
return self._apply_fakequant_with_observer(
target, self.weight_fake_quant, self.weight_observer
)
def apply_quant_activation(self, target: Tensor):
r"""Apply weight's observer and fake_quant from ``qconfig`` on ``target``."""
return self._apply_fakequant_with_observer(
target, self.act_fake_quant, self.act_observer
)
def apply_quant_bias(self, target: Tensor, inp: Tensor, w_qat: Tensor):
r"""Use :func:`~.fake_quant_bias` to process ``target``. Only valid when
``act_fake_quant`` and ``weight_fake_quant`` are both enabled.
"""
# bias should have the same dtype as activation, so act_fake_quant can also
# decide whether to do bias fakequant
if (
self.act_fake_quant
and self.act_fake_quant.enabled
and self.weight_fake_quant
and self.weight_fake_quant.enabled
):
b_qat = fake_quant_bias(target, inp, w_qat)
else:
b_qat = target
return b_qat
def _get_method_result(
self, method: str, fake_quant: FakeQuantize, observer: Observer
):
if hasattr(fake_quant, method):
return getattr(fake_quant, method)()
elif hasattr(observer, method):
return getattr(observer, method)()
return None
def get_weight_dtype(self):
r"""Get weight's quantization dtype as the method from ``qconfig``."""
return self._get_method_result(
"get_quantized_dtype", self.weight_fake_quant, self.weight_observer
)
def get_activation_dtype(self):
r"""Get activation's quantization dtype as the method from ``qconfig``."""
return self._get_method_result(
"get_quantized_dtype", self.act_fake_quant, self.act_observer
)
def get_weight_qparams(self):
r"""Get weight's quantization parameters."""
return self._get_method_result(
"get_qparams", self.weight_fake_quant, self.weight_observer
)
def get_activation_qparams(self):
r"""Get activation's quantization parameters."""
return self._get_method_result(
"get_qparams", self.act_fake_quant, self.act_observer
)
@classmethod
@abstractmethod
def from_float_module(cls, float_module: Module):
r"""Return a :class:`~.QATModule` instance converted from
a float :class:`~.Module` instance.
""" | PypiClean |
/observations-0.1.4.tar.gz/observations-0.1.4/observations/r/help_full.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def help_full(path):
"""Health Evaluation and Linkage to Primary Care
The HELP study was a clinical trial for adult inpatients recruited from
a detoxification unit. Patients with no primary care physician were
randomized to receive a multidisciplinary assessment and a brief
motivational intervention or usual care, with the goal of linking them
to primary medical care.
A data frame with 1472 observations on the following variables.
- `ID` Subject ID
- `TIME` Interview time point
- `NUM_INTERVALS` Number of 6-month intervals from previous to
current interview
- `INT_TIME1` # of months from baseline to current interview
- `DAYS_SINCE_BL` # of days from baseline to current interview
- `INT_TIME2` # of months from previous to current interview
- `DAYS_SINCE_PREV` # of days from previous to current interview
- `PREV_TIME` Previous interview time
- `DEAD` a numeric vector
- `A1` Gender (1=Male, 2=Female)
- `A9` Years of education completed
- `A10` Marital Status (1=Married, 2=Remarried, 3=Widowed, 4=
Separated, 5=Divorced, 6=Never Married
- `A11A` Do you currently have a living mother? (0=No, 1= Yes
- `A11B` Do you currently have a living father? (0=No, 1=Yes
- `A11C` Do you currently have siblings? (0=No, 1=Yes
- `A11D` Do you currently have a partner (0=No, 1=Yes)
- `A11E` Do you currently have children? (0=No, 1=Yes)
- `A12B` Hollingshead categories (1=Major profess, 2= Lesser profess,
3=Minor profess, 4=Clerical/sales, 5=Skilled manual, 6=Semi-skilled,
7=Unskilled, 8= Homemaker, 9=No occupation)
- `A13` Usual employment pattern in last 6 months (1=Full time, 2=
Part time, 3=Student, 4=Unemployed, 5=Control envir)
- `A14A` Loved alone-last 6 mos (0=No, 1=Yes)
- `A14B` Lived w/a partner-last 6 mos (0=No, 1=Yes
- `A14C` Lived with parent(s)-last 6 mos (0=No, 1=Yes)
- `A14D` Lived w/children-last 6 mos (0=No, 1=Yes)
- `A14E` Lived w/other family-last 6 mos (0=No, 1=Yes
- `A14F` Lived w/friend(s)-last 6 mos (0=No, 1=Yes)
- `A14G` Lived w/other-last 6 mos (0=No, 1=Yes)
- `A14G_T` a factor with levels `1/2 WAY HOUSE` `3/4 HOUSE`
`ANCHOR INN` `ARMY` `ASSOCIATES` `BOARDERS`
`BOYFRIENDS MOM` `CORRECTIONAL FACILIT` `CRACK HOUSE`
`DEALER` `ENTRE FAMILIA` `FENWOOD` `GAVIN HSE`
`GIRLFRIENDS DAUGHTE` `GIRLFRIENDS SON` `GIRLFRIENDS CHILDREN`
`GIRLFRIENDS DAUGHTER` `GROUP HOME` `HALF-WAY HOUSE`
`HALFWAY HOUSE` `HALFWAY HOUSES` `HALFWAY HSE` `HOLDING UNIT`
`HOME BORDER` `HOMELESS` `HOMELESS SHELTER` `IN JAIL`
`IN PROGRAMS` `INCARCERATED` `JAIL` `JAIL HALFWAY HOUSE`
`JAIL, SHELTER` `JAIL, STREET` `JAIL/PROGRAM` `JAIL/SHELTER`
`JAILS` `LANDLADY` `LANDLORD` `LODGING HOUSE`
`MERIDIAN HOUSE` `NURSING HOME` `ON THE STREET`
`PARTNERS MOTHER` `PARTNERS CHILD` `PARTNERS CHILDREN`
`PRDGRAMS` `PRISON` `PROGRAM` `PROGRAM MTHP`
`PROGRAM ROOMMATES` `PROGRAM SOBER HOUSE` `PROGRAM-RESIDENTIAL`
`PROGRAM/HALFWAY HOUS` `PROGRAM/JAIL` `PROGRAM/SHELTER`
`PROGRAM/SHELTERS` `PROGRAMS` `PROGRAMS SUBSTANCE`
`PROGRAMS/SHELTER` `PROGRAMS/SHELTERS` `PROGRAMS/SHELTERS/DE`
`PROJECT SOAR` `RESIDENTIAL FACILITY` `RESIDENTIAL PROGRAM`
`ROOMING HOUSE` `ROOMING HOUSE (RELIG` `ROOMMATE` `ROOMMATES`
`ROOMMATES AT TRANSIT` `RYAN HOUSE` `SALVATION ARMY`
`SHELTER` `SHELTER/HALFWAY HSE` `SHELTER/HOTEL`
`SHELTER/PROGRAM` `SHELTERS` `SHELTERS/HOSPITALS`
`SHELTERS/JAIL` `SHELTERS/PROGRAMS` `SHELTERS/STREETS`
`SOBER HOUSE` `SOBER HOUSING` `SOUTH BAY JAIL` `STEPSON`
`STREET` `STREETS` `SUBSTANCE ABUSE TREA`
`TRANSITIONAL HOUSE` `VA SHELTER`
- `A15A` #nights in ovrnight shelter-last 6 mos
- `A15B` # nights on street-last 6 mos
- `A15C` #months in jail-last 6 mos
- `A16A` # months in ovrnight shelter-last 5 yrs
- `A16B` #moths on street-last 5 yrs
- `A16C` #months in jail-last 5 yrs
- `A17A` Received SSI-past 6 mos (0=No, 1=Yes)
- `A17B` Received SSDI-past 6 mos (0=No, 1=Yes)
- `A17C` Received AFDC-past 6 mos (0=No, 1=Yes)
- `A17D` Received EAEDC-past 6 mos (0=No, 1=Yes)
- `A17E` Received WIC-past 6 mos (0=No, 1=Yes)
- `A17F` Received unemployment benefits-past 6 mos (0=No, 1=Yes)
- `A17G` Received Workman's Comp-past 6 mos (0=No, 1=Yes)
- `A17H` Received Child Support-past 6 mos (0=No, 1=Yes)
- `A17I` Received other income-past 6 mos (0=No, 1=Yes)
- `A17I_T` a factor with levels `DISABLED VETERAN`
`EBT (FOOD STAMPS)` `EMERGENCY FOOD STAMP` `FOOD STAMP`
`FOOD STAMPS` `FOOD STAMPS/VETERAN` `FOOD STAMPS/VETERANS`
`INSURANCE SETTLEMENT` `PENSION CHECK` `SECTION 8`
`SERVICE CONNECTED DI` `SOCIAL SECURITY` `SSDI FOR SON`
`SURVIVORS BENEFITS` `TEMPORARY DISABILITY`
`VA BENEFITS-DISABILI` `VA COMPENSATION` `VA DISABILITY PENSIO`
`VETERAN BENEFITS` `VETERANS SERVICES` `VETERANS AFFAIRS`
- `A18` Most money made in any 1 year-last 5 yrs (1=<5000,
2=5000-10000, 3=11000-19000, 4=20000-29000, 5=30000-39000,
6=40000-49000, 7=50000+
- `B1` In general, how is your health (1=Excellent, 2=Very Good,
3=Good, 4=Fair, 5=Poor)
- `B2` Comp to 1 yr ago, how is your health now (1=Much better,
2=Somewhat better, 3=About the same, 4=Somewhat worse, 5=Much worse)
- `B3A` Does health limit you in vigorous activity (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3B` Does your health limit you in moderate activity (1=Limited a
lot, 2=Limited a little, 3=Not limited)
- `B3C` Does health limit you in lift/carry groceries (1=Limited a
lot, 2=Limited a little, 3=Not limited)
- `B3D` Hlth limit you in climb sev stair flights (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3E` Health limit you in climb 1 stair flight (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3F` Health limit you in bend/kneel/stoop (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3G` Does health limit you in walking >1 mile (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3H` Hlth limit you in walking sevrl blocks (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3I` Does health limit you in walking 1 block (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B3J` Hlth limit you in bathing/dressing self (1=Limited a lot,
2=Limited a little, 3=Not limited)
- `B4A` Cut down wrk/act due to phys hlth-lst 4 wks (0=No, 1=Yes)
- `B4B` Accomplish less due to phys hlth-lst 4 wks (0=No, 1=Yes)
- `B4C` Lim wrk/act type due to phys hlth-lst 4 wks (0=No, 1=Yes)
- `B4D` Diff perf work due to phys hlth-lst 4 wks (0=No, 1=Yes)
- `B5A` Cut wrk/act time due to emot prbs-lst 4 wks (0=No, 1=Yes)
- `B5B` Accomplish ess due to emot probs-lst 4 wks (0=No, 1=Yes)
- `B5C` <carefl w/wrk/act due to em prb-lst 4 wks (0=No, 1=Yes)
- `B6` Ext phys/em intf w/norm soc act-lst 4 wk (1-Not al all,
2=Slightly, 3=Moderately, 4=Quite a bit, 5=Extremely)
- `B7` Amount of bodily pain-past 4 wks (1=None, 2=Very mild, 3=
Mild, 4=Moderate, 5= Severe, 6= Very severe)
- `B8` Amt pain interf with norm work-last 4 wks (1=Not at all, 2=A
little bit, 3=Moderately, 4=Quite a bit, 5=Extremely
- `B9A` Did you feel full of pep-past 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9B` Have you been nervous-past 4 wks (1=All of the time, 2=Most
of the time, 3 = Good bit of the time, 4=Some of the time, 5=A little
of time, 6=None of the time)
- `B9C` Felt nothing could cheer you-lst 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9D` Have you felt calm/peaceful-past 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9E` Did you have a lot of energy-past 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9F` Did you feel downhearted-past 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9G` Did you feel worn out-past 4 wks (1=All of the time, 2=Most
of the time, 3 = Good bit of the time, 4=Some of the time, 5=A little
of time, 6=None of the time)
- `B9H` Have you been a happy pers-past 4 wks (1=All of the time,
2=Most of the time, 3 = Good bit of the time, 4=Some of the time, 5=A
little of time, 6=None of the time)
- `B9I` Did you feel tired-past 4 wks (1=All of the time, 2=Most of
the time, 3 = Good bit of the time, 4=Some of the time, 5=A little of
time, 6=None of the time)
- `B10` Amyphys/em prb intf w/soc act-lst 4 wks (1All of the time,
2=Most of the time, 3=Some of the time, 4= A lttle of time, 5= Non of
the time)
- `B11A` I seem to get sick easier than oth peop (1=Definitely true,
2=Mostly True, 3=Don't know, 4=Mostly false, 5=Definitely false)
- `B11B` I am as healthy as anybody I know (1=Definitely true,
2=Mostly true, 3=Don't know, 4=Mostly false, 5=Definitely False)
- `B11C` I expect my health to get worse (1=Definitely true, 2=Mostly
true, 3=Don't know, 3=Mostly false, 5=Definitely false)
- `B11D` My health is excellent (1=Definitely true, 2=Mostly true,
3=Don't know, 4=Mostly false, 5=Definitely false)
- `C1A` Tolf by MD had seix, epil, convuls (0=No, 1=Yes)
- `C1B` Told by MD had asth, emphys, chr lung dis (0=No, 1=Yes)
- `C1C` Told by MD had MI (0=No, 1=Yes)
- `C1D` Told by MD had CHF (0=No, 1=Yes)
- `C1E` Told by MD had other heart dis (req med) (0=No, 1=Yes)
- `C1F` Told by MD had HBP (0=No, 1=Yes)
- `C1G` Told by MD had chronic liver disease (0=No, 1=Yes)
- `C1H` Told by MD had kidney failure (0=No, 1=Yes)
- `C1I` Told by MD had chronic art, osteoarth (0=No, 1=Yes)
- `C1J` Told by MD had peripheral neuropathy (0=No, 1=Yes)
- `C1K` Ever told by MD had cancer (0=No, 1=Yes)
- `C1L` Ever told by MD had diabetes (0=No, 1=Yes)
- `C1M` Ever told by MD had stroke (0=No, 1=Yes)
- `C2A1` Have you ever had skin infections (0=No, 1=Yes)
- `C2A2` Have you had skin infections-past 6 mos (0=No, 1=Yes)
- `C2B1` Have you ever had pneumonia (0=No, 1=Yes)
- `C2B2` Have you had pneumonia-past 6 mos (0=No, 1=Yes)
- `C2C1` Have you ever had septic arthritis (0=No, 1=Yes)
- `C2C2` Have you had septic arthritis-past 6 mos (0=No, 1=Yes)
- `C2D1` Have you ever had TB (0=No, 1=Yes)
- `C2D2` Have you had TB-last 6 mos (0=No, 1=Yes)
- `C2E1` Have you ever had endocarditis (0=No, 1=Yes)
- `C2E2` Have you had endocarditis-past 6 mos (0=No, 1=Yes)
- `C2F1` Have you ever had an ulcer (0=No, 1=Yes)
- `C2F2` Have you had an ulcer-past 6 mos (0=No, 1=Yes)
- `C2G1` Have you ever had pancreatitis (0=No, 1=Yes)
- `C2G2` Have you had pancreatitis-past 6 mos (0=No, 1=Yes)
- `C2H1` Ever had abdom pain req overnt hosp stay (0=No, 1=Yes)
- `C2H2` Abdom pain req ovrnt hosp stay-lst 6 mos (0=No, 1=Yes)
- `C2I1` Have you ever vomited blood (0=No, 1=Yes)
- `C2I2` Have you vomited blood-past 6 mos (0=No, 1=Yes)
- `C2J1` Have you ever had hepatitis (0=No, 1=Yes)
- `C2J2` Have you had hepatitis-past 6 mos (0=No, 1=Yes)
- `C2K1` Ever had blood clots in legs/lungs (0=No, 1=Yes)
- `C2K2` Blood clots in legs/lungs-past 6 mos (0=No, 1=Yes)
- `C2L1` Have you ever had osteomyelitis (0=No, 1=Yes)
- `C2L2` Have you had osteomyelitis-past 6 mos (0=No, 1=Yes)
- `C2M1` Chst pain using cocaine req ER/hosp (0=No, 1=Yes)
- `C2M2` Chst pain using coc req ER/hosp-lst 6 mos (0=No, 1=Yes)
- `C2N1` Have you ever had jaundice (0=No, 1=Yes)
- `C2N2` Have you had jaundice-past 6 mos (0=No, 1=Yes)
- `C2O1` Lower back pain > 3mos req med attn (0=No, 1=Yes)
- `C2O2` Lwr bck pain >3mos req med attn-last 6 mos (0=No, 1=Yes)
- `C2P1` Ever had seizures or convulsions (0=No, 1=Yes)
- `C2P2` Had seizures or convulsions-past 6 mos (0=No, 1=Yes)
- `C2Q1` Ever had drug/alc overdose req ER attn (0=No, 1=Yes)
- `C2Q2` Drug/alc overdose req ER attn (0=No, 1=Yes)
- `C2R1` Have you ever had a gunshot wound (0=No, 1=Yes)
- `C2R2` Had a gunshot wound-past 6 mos (0=No, 1=Yes)
- `C2S1` Have you ever had a stab wound (0=No, 1=Yes)
- `C2S2` Have you had a stab wound-past 6 mos (0=No, 1=Yes)
- `C2T1` Ever had accid/falls req med attn (0=No, 1=Yes)
- `C2T2` Had accid/falls req med attn-past 6 mos (0=No, 1=Yes)
- `C2U1` Ever had fract/disloc to bones/joints (0=No, 1=Yes)
- `C2U2` Fract/disloc to bones/joints-past 6 mos (0=No, 1=Yes)
- `C2V1` Ever had injury from traffic accident (0=No, 1=Yes)
- `C2V2` Had injury from traffic accid-past 6 mos (0=No, 1=Yes)
- `C2W1` Have you ever had a head injury (0=No, 1=Yes)
- `C2W2` Have you had a head injury-past 6 mos (0=No, 1=Yes)
- `C3A1` Have you ever had syphilis (0=No, 1=Yes)
- `C3A2` # times had syphilis
- `C3A3` Have you had syphilis in last 6 mos (0=No, 1=Yes)
- `C3B1` Have you ever had gonorrhea (0=No, 1=Yes)
- `C3B2` # times had gonorrhea
- `C3B3` Have you had gonorrhea in last 6 mos (0=No, 1=Yes)
- `C3C1` Have you ever had chlamydia (0=No, 1=Yes)
- `C3C2` # of times had Chlamydia
- `C3C3` Have you had chlamydia in last 6 mos (0=No, 1=Yes)
- `C3D` Have you ever had genital warts (0=No, 1=Yes)
- `C3E` Have you ever had genital herpes (0=No, 1=Yes)
- `C3F1` Have you ever had other STD's (not HIV) (0=No, 1=Yes)
- `C3F2` # of times had other STD's (not HIV)
- `C3F3` Had other STD's (not HIV)-last 6 mos (0=No, 1=Yes)
- `C3F_T` a factor with levels `7` `CRABS`
`CRABS - TRICHONOMIS` `CRABS, HEP B` `DOESNT KNOW NAME`
`HAS HAD ALL 3 ABC` `HEP B` `HEP B, TRICAMONAS` `HEP. B`
`HEPATITIS B` `HEPATITS B` `TRICHAMONAS VAGINALA`
`TRICHAMONIS` `TRICHOMONAS` `TRICHOMONIASIS` `TRICHOMONIS`
`TRICHOMONIS VAGINITI` `TRICHOMORAS` `TRICHONOMIS`
- `C3G1` Have you ever been tested for HIV/AIDS (0=No, 1=Yes)
- `C3G2` # times tested for HIV/AIDS
- `C3G3` Have you been tested for HIV/AIDS-lst 6 mos (0=No, 1=Yes)
- `C3G4` What was the result of last test (1=Positive, 2=Negative,
3=Refued, 4=Never got result, 5=Inconclusive
- `C3H1` Have you ever had PID (0=No, 1=Yes)
- `C3H2` # of times had PID
- `C3H3` Have you had PID in last 6 mos (0=No, 1=Yes)
- `C3I` Have you ever had a Pap smear (0=No, 1=Yes)
- `C3J` Have you had a Pap smear in last 3 years (0=No, 1=Yes)
- `C3K` Are you pregnant (0=No, 1=Yes)
- `C3K_M` How many mos pregnant
- `D1` $ of times hospitalized for med probs
- `D2` Take prescr med regularly for phys prob (0=No, 1=Yes)
- `D3` # days had med probs-30 days bef detox
- `D4` How bother by med prob-30days bef detox (0=Not at all,
1=Slightly, 2=Moderately, 3=Considerably, 4=Extremely)
- `D5` How import is trtmnt for these med probs (0=Not at all,
1=Slightly, 2= Moderately, 3= Considerably, 4= Extremely
- `E2A` Detox prog for alc or drug prob-lst 6 mos (0=No, 1=Yes)
- `E2B` # times entered a detox prog-lst 6 mos
- `E2C` # nights ovrnight in detox prg-lst 6 mos
- `E3A` Holding unit for drug/alc prob-lst 6 mos (0=No, 1=Yes)
- `E3B` # times in holding unity=lst 6 mos
- `E3C` # total nights in holding unit-lst 6 mos
- `E4A` In halfway hse/resid facil-lst 6 mos (0=No, 1=Yes)
- `E4B` # times in hlfwy hse/res facil-lst 6 mos
- `E4C` Ttl nites in hlfwy hse/res fac-last 6 mos
- `E5A` In day trtmt prg for alcohol/drug-lst 6 mos (0=No, 1=Yes)
- `E5B` Total # days in day trtmt prg-lst 6 mos
- `E6` In methadone maintenance prg-lst 6 mos (0=No, 1=Yes)
- `E7A` Visit outpt prg subst ab couns-lst 6 mos (0=No, 1=Yes)
- `E7B` # visits outpt prg subst ab couns-lst 6 mos
- `E8A1` Saw MD/H care wkr re alcohol/drugs-lst 6 mos (0=No, 1=Yes)
- `E8A2` Saw Prst/Min/Rabbi re alcohol/drugs-lst 6 mos (0=No, 1=Yes)
- `E8A3` Employ Asst Prg for alcohol/drug prb-lst 6 mos (0=No, 1=Yes)
- `E8A4` Oth source cnsl for alcohol/drug prb-lst 6 mos (0=No, 1=Yes)
- `E9A` AA/NA/slf-hlp for drug/alcohol/emot-lst 6 mos (0=No, 1=Yes)
- `E9B` How often attend AA/NA/slf-hlp-lst 6 mos (1=Daily, 2=2-3
Times/week, 3=Weekly, 4=Every 2 weeks, 5=Once/month
- `E10A` have you been to med clinic-lst 6 mos (0=No, 1=Yes)
- `E10B1` # x visit ment hlth clin/prof-lst 6 mos
- `E10B2` # x visited med clin/priv MD-lst 6 mos
- `E10C19` Visited private MD-last 6 mos (0=No, 1=Yes)
- `E11A` Did you stay ovrnite/+ in hosp-lst 6 mos (0=No, 1=Yes)
- `E11B` # times ovrnight/+ in hosp-last 6 mos
- `E11C` Total # nights in hosp-last 6 mos
- `E12A` Visited Hosp ER for med care-past 6 mos (0=No, 1=Yes)
- `E12B` # times visited hosp ER-last 6 mos
- `E13` Tlt # visits to MDs-lst 2 wks bef detox
- `E14A` Recd trtmt from acupuncturist-last 6 mos (0=No, 1=Yes)
- `E14B` Recd trtmt from chiropractor-last 6 mos (0=No, 1=Yes)
- `E14C` Trtd by hol/herb/hom med prac-lst 6 mos (0=No, 1=Yes)
- `E14D` Recd trtmt from spirit healer-lst 6 mos (0=No, 1=Yes)
- `E14E` Have you had biofeedback-last 6 mos (0=No, 1=Yes)
- `E14F` Have you underwent hypnosis-lst 6 mos (0=No, 1=Yes)
- `E14G` Received other treatment-last 6 mos (0=No, 1=Yes)
- `E15A` Tried to get subst ab services-lst 6 mos (0=No, 1=Yes)
- `E15B` Always able to get subst ab servies (0=No, 1=Yes)
- `E15C1` I could not pay for services (0=No, 1=Yes)
- `E15C2` I did not know where to go for help (0=No, 1=Yes)
- `E15C3` Couldn't get to services due to transp prob (0=No, 1=Yes)
- `E15C4` The offie/clinic hrs were inconvenient (0=No, 1=Yes)
- `E15C5` Didn't speak/understnd Englsh well enough (0=No, 1=Yes)
- `E15C6` Afraid other might find out about prob (0=No, 1=Yes)
- `E15C7` My substance abuse interfered (0=No, 1=Yes)
- `E15C8` Didn't have someone to watch my children (0=No, 1=Yes)
- `E15C9` I did not want to lose my job (0=No, 1=Yes)
- `E15C10` My insurance didn't cover services (0=No, 1=Yes)
- `E15C11` There were no beds available at the prog (0=No, 1=Yes)
- `E15C12` Other reason not get sub ab services (0=No, 1=Yes)
- `E16A1` I cannot pay for services (0=No, 1=Yes)
- `E16A2` I am not eligible for free care (0=No, 1=Yes)
- `E16A3` I do not know where to go (0=No, 1=Yes)
- `E16A4` Can't get to services due to trans prob (0=No, 1=Yes)
- `E16A5` a numeric vectorOffice/clinic hours are inconvenient (0=No,
1=Yes)
- `E16A6` I don't speak/understnd enough English (0=No, 1=Yes)
- `E16A7` Afraid othrs find out about my hlth prob (0=No, 1=Yes)
- `E16A8` My substance abuse interferes (0=No, 1=Yes)
- `E16A9` I don't have someone to watch my childrn (0=No, 1=Yes)
- `E16A10` I do not want to lose my job (0=No, 1=Yes)
- `E16A11` My insurance doesn't cover charges (0=No, 1=Yes)
- `E16A12` I do not feel I need a regular MD (0=No, 1=Yes)
- `E16A13` Other reasons don't have regular MD (0=No, 1=Yes)
- `E18A` I could not pay for services (0=No, 1=Yes)
- `E18B` I did not know where to go for help (0=No, 1=Yes)
- `E18C` Couldn't get to services due to transp prob (0=No, 1=Yes)
- `E18D` The office/clinic hrs were inconvenient (0=No, 1=Yes)
- `E18F` Afraid others might find out about prob (0=No, 1=Yes)
- `E18G` My substance abuse interfered (0=No, 1=Yes)
- `E18H` Didn't have someone to watch my children (0=No, 1=Yes)
- `E18I` I did not want to lose my job (0=No, 1=Yes)
- `E18J` My insurance didn't cover services (0=No, 1=Yes)
- `E18K` There were no beds available at the prog (0=No, 1=Yes)
- `E18L` I do not need substance abuse services (0=No, 1=Yes)
- `E18M` Other reason not get sub ab services (0=No, 1=Yes)
- `F1A` Bothered by thngs not gen boethered by (0=Rarely/never,
1=Some of the time, 2=Occas/moderately, 3=Most of the time)
- `F1B` My appretite was poor (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1C` Couldn't shake blues evn w/fam+frnds hlp (0=Rarely/never,
1=Some of the time, 2=Occas/moderately, 3=Most of the time)
- `F1D` Felt I was just as good as other people (0=Rarely/never,
1=Some of the time, 2=Occas/moderately, 3=Most of the time)
- `F1E` Had trouble keeping mind on what doing (0=Rarely/never,
1=Some of the time, 2=Occas/moderately, 3=Most of the time)
- `F1F` I felt depressed (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1G` I felt everthing I did was an effort (0=Rarely/never, 1=Some
of the time, 2=Occas/moderately, 3=Most of the time)
- `F1H` I felt hopeful about the future (0=Rarely/never, 1=Some of
the time, 2=Occas/moderately, 3=Most of the time)
- `F1I` I thought my life had been a failure (0=Rarely/never, 1=Some
of the time, 2=Occas/moderately, 3=Most of the time)
- `F1J` I felt fearful (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1K` My sleep was restless (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1L` I was happy (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1M` I talked less than usual (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1N` I felt lonely (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1O` People were unfriendly (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1P` I enoyed life (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1Q` I had crying spells (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1R` I felt sad (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `F1S` I felt that people dislike me (0=Rarely/never, 1=Some of the
time, 2=Occas/moderately, 3=Most of the time)
- `F1T` I could not get going (0=Rarely/never, 1=Some of the time,
2=Occas/moderately, 3=Most of the time)
- `G1A` Diff contr viol beh for sig time per evr (0=No, 1=Yes)
- `G1A_30` Diff contr viol beh-sig per lst 30 days (0=No, 1=Yes)
- `G1B` Ever had thoughts of suicide (0=No, 1=Yes)
- `G1B_30` Had thoughts of suicide-lst 30 days (0=No, 1=Yes)
- `G1C` Attempted suicide ever (0=No, 1=Yes)
- `G1C_30` Attempted suicide-lst 30 days (0=No, 1=Yes)
- `G1D` Prescr med for pst/emot prob ever (0=No, 1=Yes)
- `G1D_30` Prescr med for psy/emot prob-lst 30 days (0=No, 1=Yes)
- `H1_30` # days in past 30 bef detox used alcohol
- `H1_LT` # yrs regularly used alcohol
- `H1_RT` Route of administration use alcohol (0=N/A. 1=Oral,
2=Nasal, 3=Smoking, 4=Non-IV injection, 5=IV)
- `H2_30` #days in 3- bef detox use alc to intox
- `H2_LT` # yrs regularly used alcohol to intox
- `H2_RT` Route of admin use alcohol to intox (0=N/A. 1=Oral,
2=Nasal, 3=Smoking, 4=Non-IV injection, 5=IV)
- `H3_30` # days in past 30 bef detox used heroin
- `H3_LT` # yrs regularly used heroin
- `H3_RT` Route of administration of heroin (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H4_30` # days used methadone-lst 30 bef detox
- `H4_LT` # yrs regularly used methadone
- `H4_RT` Route of administration of methadone (0=N/A. 1=Oral,
2=Nasal, 3=Smoking, 4=Non-IV injection, 5=IV)
- `H5_30` # days used opi/analg-lst 30 bef detox
- `H5_LT` # yrs regularly used oth opiates/analg
- `H5_RT` Route of admin of oth opiates/analg (0=N/A. 1=Oral,
2=Nasal, 3=Smoking, 4=Non-IV injection, 5=IV)
- `H6_30` # days in past 30 bef detox used barbit
- `H6_LT` # yrs regularly used barbiturates
- `H6_RT` Route of admin of barbiturates (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H7_30` # days used sed/hyp/trnq-lst 30 bef det
- `H7_LT` # yrs regularly used sed/hyp/trnq
- `H7_RT` Route of admin of sed/hyp/trnq (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H8_30` # days in lst 30 bef detox used cocaine
- `H8_LT` # yrs regularly used cocaine
- `H8_RT` Route of admin of cocaine (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H9_30` # days in lst 30 bef detox used amphet
- `H9_LT` # yrs regularly used amphetamines
- `H9_RT` Route of admin of amphetamines (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H10_30` # days in lst 30 bef detox used cannabis
- `H10_LT` # yrs regularly used cannabis
- `H10_RT` Route of admin of cannabis (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H11_30` # days in lst 30 bef detox used halluc
- `H11_LT` # yrs regularly used hallucinogens
- `H11_RT` Route of admin of hallucinogens (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H12_30` # days in lst 30 bef detox used inhalant
- `H12_LT` # yrs regularly used inhalants
- `H12_RT` Route of admin of inhalants (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H13_30` # days used >1 sub/day-lst 30 bef detox
- `H13_LT` # yrs regularly used >1 subst/day
- `H13_RT` Route of admin of >1 subst/day (0=N/A. 1=Oral, 2=Nasal,
3=Smoking, 4=Non-IV injection, 5=IV)
- `H14` Accord to interview w/c subst is main prob (0=No problem,
1=Alcohol, 2=Alcool to intox, 3=Heroin 4=Methadone, 5=Oth
opiate/analg, 6=Barbituates, 7=Sed/hyp/tranq, 8=Cocaine,
9=Amphetamines, 10=Marij/cannabis
- `H15A` # times had alchol DTs
- `H15B` # times overdosed on drugs
- `H16A` $ spent on alc-lst 30 days bef detox
- `H16B` $ spent on drugs-lst 30 days bef detox
- `H17A` # days had alc prob-lst 30 days bef det
- `H17B` # days had drug prob-lst 30 days bef det
- `H18A` How troubled by alc probs-lst 30 days (0=Not at all,
1=Slightly, 2=Moderately, 3=Considerably, 4=Extremely)
- `H18B` How troubled by drug probs-lst 30 days (0=Not at all,
1=Slightly, 2=Moderately, 3=Considerably, 4=Extremely)
- `H19A` How import is trtmnt for alc probs now (0=Not at all,
1=Slightly, 2=Moderately, 3=Considerably, 4=Extremely)
- `H19B` How importy is trtmnt for drug probs now (0=Not at all,
1=Slightly, 2=Moderately, 3=Considerably, 4=Extremely)
- `I1` Avg # drinks in lst 30 days bef detox
- `I2` Most drank any 1 day in lst 30 bef detox
- `I3` On days used heroin, avg # bags used
- `I4` Most bgs heroin use any 1 day-30 bef det
- `I5` Avg $ amt of heorin used per day
- `I6A` On days used cocaine, avg # bags used
- `I6B` On days used cocaine, avg # rocks used
- `I7A` Mst bgs cocaine use any 1 day-30 bef det
- `I7B` Mst rcks cocaine use any 1 day-30 bef det
- `I8` Avg $ amt of cocaine used per day
- `J1` Evr don't stop using cocaine when should (0=No, 1=Yes)
- `J2` Ever tried to cut down on cocaine (0=No, 1=Yes)
- `J3` Does cocaine take up a lot of your time (0=No, 1=Yes)
- `J4` Need use > cocaine to get some feeling (0=No, 1=Yes)
- `J5A` Get phys sick when stop using cocaine (0=No, 1=Yes)
- `J5B` Ever use cocaine to prevent getting sick (0=No, 1=Yes)
- `J6` Ever don't stop using heroin when should (0=No, 1=Yes)
- `J7` Ever tried to cut down on heroin (0=No, 1=Yes)
- `J8` Does heroin take up a lot of your time (0=No, 1=Yes)
- `J9` Need use > heroin to get some feeling (0=No, 1=Yes)
- `J10A` Get phys sick when stop using heroin (0=No, 1=Yes)
- `J10B` Ever use heroin to prevent getting sick (0=No, 1=Yes)
- `K1` Do you currently smoke cigarettes (1=Yes-every day, 2=Yes-some
days, 3=No-former smoker, 4=No-never>100 cigs
- `K2` Avg # cigarettes smoked per day
- `K3` Considering quitting cigs w/in next 6 mo (0=No, 1=Yes)
- `L1` How often drink last time drank (1=To get high/less, 2=To get
drunk, 3=To pass out)
- `L2` Often have hangovrs Sun or Mon mornings (0=No, 1=Yes)
- `L3` Have you had the shakes when sobering (0=No, 1=Sometimes,
2=Alm evry time drink)
- `L4` Do you get phys sick as reslt of drinking (0=No, 1=Sometimes,
2=Alm evry time drink)
- `L5` have you had the DTs (0=No, 1=Once, 2=Several times
- `L6` When drink do you stumble/stagger/weave (0=No, 1=Sometimes,
2=Often)
- `L7` D/t drinkng felt overly hot/sweaty (0=No, 1=Once, 2=Several
times)
- `L8` As result of drinkng saw thngs not there (0=No, 1=Once,
2=Several times)
- `L9` Panic because fear not have drink if need it (0=No, 1=Yes)
- `L10` Have had blkouts as result of drinkng (0=No, never,
1=Sometimes, 2=Often, 3=Alm evry time drink)
- `L11` Do you carry bottle or keep close by (0=No, 1=Some of the
time, 2=Most of the time)
- `L12` After abstin end up drink heavily again (0=No, 1=Sometimes,
2=Almost evry time)
- `L13` Passed out due to drinking-lst 12 mos (0=No, 1=Once, 2=More
than once)
- `L14` Had convuls following period of drinkng (0=No, 1=Once,
2=Several times)
- `L15` Do you drink throughout the day (0=No, 1=Yes)
- `L16` Aftr drinkng heavily was thinkng unclear (0=No, 1=Yes, few
hrs, 2=Yes,1-2 days, 3=Yes, many days)
- `L17` D/t drinkng felt heart beat rapidly (0=No, 1=Once, 2=Several
times)
- `L18` Do you constntly think about drinkng/alc (0=No, 1=Yes)
- `L19` D/t drinkng heard things not there (0=No, 1=Once, 2= Several
times)
- `L20` Had weird/fright sensations when drinkng (0=No, 1=Once or
twice, 2=Often)
- `L21` When drinkng felt things rawl not there (0=No, 1=Once,
2=Several times)
- `L22` With respect to blackouts (0=Never had one, 1=Had for <1hr,
2=Had several hrs, 3=Had for day/+)
- `L23` Ever tried to cut down on drinking & failed (0=No, 1=Once,
2=Several times)
- `L24` Do you gulp drinks (0=No, 1=Yes)
- `L25` After taking 1 or 2 drinks can you stop (0=No, 1=Yes)
- `M1` Had hangover/felt bad aftr using alcohol/drugs (0=No, 1=Yes)
- `M2` Felt bad about self because of alcohol/drug use (0=No, 1=Yes)
- `M3` Missed days wrk/sch because of alcohol/drug use (0=No, 1=Yes)
- `M4` Fam/frinds worry/compl about alcohol/drug use (0=No, 1=Yes)
- `M5` I have enjoyed drinking/using drugs (0=No, 1=Yes)
- `M6` Qual of work suffered because of alcohol/drug use (0=No,
1=Yes)
- `M7` Parenting ability harmed by alcohol/drug use (0=No, 1=Yes)
- `M8` Trouble sleeping/nightmares aftr alcohol/drugs (0=No, 1=Yes)
- `M9` Driven motor veh while undr inf alcohol/drugs (0=No, 1=Yes)
- `M10` Using alcohol/1 drug caused > use othr drugs (0=No, 1=Yes)
- `M11` I have been sick/vomited aft alcohol/drug use (0=No, 1=Yes)
- `M12` I have been unhappy because of alcohol/drug use (0=No, 1=Yes)
- `M13` Lost weight/eaten poorly due to alcohol/drug use (0=No,
1=Yes)
- `M14` Fail to do what expected due to alcohol/drug use (0=No,
1=Yes)
- `M15` Using alcohol/drugs has helped me to relax (0=No, 1=Yes)
- `M16` Felt guilt/ashamed because of my alc drug use (0=No, 1=Yes)
- `M17` Said/done emarras thngs when on alcohol/drug (0=No, 1=Yes)
- `M18` Personality changed for worse on alcohol/drug (0=No, 1=Yes)
- `M19` Taken foolish risk when using alcohol/drugs (0=No, 1=Yes)
- `M20` Gotten into trouble because of alcohol/drug use (0=No, 1=Yes)
- `M21` Said cruel things while using alcohol/drugs (0=No, 1=Yes)
- `M22` Done impuls thngs regret due to alcohol/drug use (0=No,
1=Yes)
- `M23` Gotten in phys fights when use alcohol/drugs (0=No, 1=Yes)
- `M24` My phys health was harmed by alcohol/drug use (0=No, 1=Yes)
- `M25` Using alcohol/drug helped me have more + outlook (0=No,
1=Yes)
- `M26` I have had money probs because of my alcohol/drug use (0=No,
1=Yes)
- `M27` My love relat harmed due to my alcohol/drug use (0=No, 1=Yes)
- `M28` Smoked tobacco more when using alcohol/drugs (0=No, 1=Yes)
- `M29` <y phys appearance harmed by alcohol/drug use (0=No, 1=Yes)
- `M30` My family hurt because of my alc drug use (0=No, 1=Yes)
- `M31` Close relationsp damaged due to alcohol/drug use (0=No,
1=Yes)
- `M32` Spent time in jail because of my alcohol/drug use (0=No,
1=Yes)
- `M33` My sex life suffered due to my alcohol/drug use (0=No, 1=Yes)
- `M34` Lost interst in activity due to my alcohol/drug use (0=No,
1=Yes)
- `M35` Soc life> enjoyable when using alcohol/drug (0=No, 1=Yes)
- `M36` Spirit/moral life harmed by alcohol/drug use (0=No, 1=Yes)
- `M37` Not had kind life want due to alcohol/drug use (0=No, 1=Yes)
- `M38` My alcohol/drug use in way of personal growth (0=No, 1=Yes)
- `M39` My alcohol/drug use damaged soc life/reputat (0=No, 1=Yes)
- `M40` Spent/lost too much $ because alcohol/drug use (0=No, 1=Yes)
- `M41` Arrested for DUI of alc or oth drugs (0=No, 1=Yes)
- `M42` Arrested for offenses rel to alcohol/drug use (0=No, 1=Yes)
- `M43` Lost marriage/love relat due to alcohol/drug use (0=No,
1=Yes)
- `M44` Susp/fired/left job/sch due to alcohol/drug use (0=No, 1=Yes)
- `M45` I used drugs moderately w/o having probs (0=No, 1=Yes)
- `M46` I have lost a friend due to my alcohol/drug use (0=No, 1=Yes)
- `M47` Had an accident while using alcohol/drugs (0=No, 1=Yes)
- `M48` Phys hurt/inj/burned when using alcohol/drugs (0=No, 1=Yes)
- `M49` I injured someone while using alcohol/drugs (0=No, 1=Yes)
- `M50` Damaged things/prop when using alcohol/drugs (0=No, 1=Yes)
- `N1A` My friends give me the moral support I need (0=No, 1=Yes)
- `N1B` Most people closer to friends than I am (0=No, 1=Yes)
- `N1C` My friends enjoy hearing what I think (0=No, 1=Yes)
- `N1D` I rely on my friends for emot support (0=No, 1=Yes)
- `N1E` Friend go to when down w/o feel funny later (0=No, 1=Yes)
- `N1F` Frnds and I open re what thnk about things (0=No, 1=Yes)
- `N1G` My friends sensitive to my pers needs (0=No, 1=Yes)
- `N1H` My friends good at helping me solve probs (0=No, 1=Yes)
- `N1I` have deep sharing relat w/ a # of frnds (0=No, 1=Yes)
- `N1J` When confide in frnds makes me uncomfort (0=No, 1=Yes)
- `N1K` My friends seek me out for companionship (0=No, 1=Yes)
- `N1L` Not have as int relat w/frnds as others (0=No, 1=Yes)
- `N1M` Recent good idea how to do somethng frm frnd (0=No, 1=Yes)
- `N1N` I wish my friends were much different (0=No, 1=Yes)
- `N2A` My family gives me the moral support I need (0=No, 1=Yes)
- `N2B` Good ideas of how do/make thngs from fam (0=No, 1=Yes)
- `N2C` Most peop closer to their fam than I am (0=No, 1=Yes)
- `N2D` When confide make close fam membs uncomf (0=No, 1=Yes)
- `N2E` My fam enjoys hearing about what I think (0=No, 1=Yes)
- `N2F` Membs of my fam share many of my intrsts (0=No, 1=Yes)
- `N2G` I rely on my fam for emot support (0=No, 1=Yes)
- `N2H` Fam memb go to when down w/o feel funny (0=No, 1=Yes)
- `N2I` Fam and I open about what thnk about thngs (0=No, 1=Yes)
- `N2J` My fam is sensitive to my personal needs (0=No, 1=Yes)
- `N2K` Fam memb good at helping me solve probs (0=No, 1=Yes)
- `N2L` Have deep sharing relat w/# of fam membs (0=No, 1=Yes)
- `N2M` Makes me uncomf to confide in fam membs (0=No, 1=Yes)
- `N2N` I wish my family were much different (0=No, 1=Yes)
- `O1A` # people spend tx w/who drink alc (1=None, 2= A few, 3=About
half, 4= Most, 5=All)
- `O1B` # people spend tx w/who are heavy drinkrs (1=None, 2= A few,
3=About half, 4= Most, 5=All)
- `O1C` # people spend tx w/who use drugs (1=None, 2= A few, 3=About
half, 4= Most, 5=All)
- `O1D` # peop spend tx w/who supprt your abstin (1=None, 2= A few,
3=About half, 4= Most, 5=All)
- `O2` Does live-in part/spouse drink/use drugs (0=No, 1=Yes, 2=N/A)
- `P1A` Phys abuse/assaul by fam memb/pers know (0=No, 1=Yes, 7=Not
sure)
- `P1B` Age first phys assaulted by pers know
- `P1C` Phys assaulted by pers know-last 6 mos (0=No, 1=Yes)
- `P2A` Phys abuse/assaul by stranger (0=No, 1=Yes, 7=Not sure)
- `P2B` Age first phys assaulted by stranger
- `P2C` Phys assaulted by stranger-last 6 mos (0=No, 1=Yes)
- `P3` Using drugs/alc when phys assaulted (1=Don't know, 2=Never,
3=Some cases, 4=Most cases, 5=All cases, 9=Never assaulted)
- `P4` Pers who phys assault you using alcohol/drugs (1=Don't know,
2=Never, 3=Some cases, 4=Most cases, 5=All cases, 9=Never assaulted)
- `P5A` Sex abuse/assual by fam memb/pers know (0=No, 1= Yes, 7=Not
sure)
- `P5B` Age first sex assaulted by pers know
- `P5C` Sex assaulted by pers know-last 6 mos (0=No, 1=Yes)
- `P6A` Sex abuse/assaul by stranger (0=No, 1=Yes, 7=Not sure)
- `P6B` Age first sex assaulted by stranger
- `P6C` Sex assaulted by stranger-last 6 mos (0=No, 1=Yes)
- `P7` Using drugs/alc when sex assaulted (1=Don't know, 2=Never,
3=Some cases, 4=Most cases, 5=All cases, 9=Never assaulted)
- `P8` Person who sex assaulted you using alcohol/drugs (1=Don't
know, 2=Never, 3=Some cases, 4=Most cases, 5=All cases, 9=Never
assaulted)
- `Q1A` Have you ever injected drugs (0=No, 1=Yes)
- `Q1B` Have you injected drugs-lst 6 mos (0=No, 1=Yes)
- `Q2` Have you shared needles/works-last 6 mos (0=No/Not shot up,
3=Yes)
- `Q3` # people shared needles w/past 6 mos (0=No/Not shot up, 1=1
other person, 2=2-3 diff people, 3=4/+ diff people)
- `Q4` How often been to shoot gall/hse-lst 6 mos (0=Never, 1=Few
times or less, 2= Few times/month, 3= Once or more/week)
- `Q5` How often been to crack house-last 6 mos (0=Never, 1=Few times
or less, 2=Few times/month, 3=Once or more/week)
- `Q6` How often shared rinse-water-last 6 mos (0=Nevr/Not shot up,
1=Few times or less, 2=Few times/month, 3=Once or more/week)
- `Q7` How often shared a cooker-last 6 mos (0=Nevr/Not shot up,
1=Few times or less, 2=Few times/month, 3=Once or more/week)
- `Q8` How often shared a cotton-last 6 mos (0=Nevr/Not shot up,
1=Few times or less, 2=Few times/month, 3=Once or more/week)
- `Q9` How often use syringe to div drugs-lst 6 mos (0=Nevr/Not shot
up, 1=Few times or less, 2=Few times/month, 3=Once or more/week)
- `Q10` How would you describe yourself (0=Straight, 1=Gay/bisexual)
- `Q11` # men had sex w/in past 6 months (0=0 men, 1=1 man, 2=2-3
men, 3=4+ men
- `Q12` # women had sex w/in past 6 months (0=0 women, 1=1woman,
2=2-3 women, 3=4+ women
- `Q13` # times had sex In past 6 mos (0=Never, 1=Few times or less,
2=Few times/month, 3=Once or more/week)
- `Q14` How often had sex to get drugs-last 6 mos (0=Never, 1=Few
times or less, 2=Few times/month, 3=Once or more/week)
- `Q15` How often given drugs to have sex-lst 6 mos (0=Never, 1=Few
times or less, 2=Few times/month, 3=Once or more/week)
- `Q16` How often were you paid for sex-lst 6 mos (0=Never, 1=Few
times or less, 2=Few times/month, 3=Once or more/week)
- `Q17` How often you pay pers for sex-lst 6 mos (0=Never, 1=Few
times or less, 2=Few times/month, 3=Once or more/week)
- `Q18` How often use condomes during sex=lst 6 mos (0=No sex/always,
1=Most of the time, 2=Some of the time, 3=None of the time)
- `Q19` Condoms are too much of a hassle to use (1=Strongly disagree,
2=Disagree, 3= Agree, 4=Strongly agree)
- `Q20` Safer sex is always your responsibility (1=Strongly disagree,
2=Disagree, 3= Agree, 4=Strongly agree)
- `R1A` I really want to hange my alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1B` Sometimes I wonder if I'm an alcohol/addict (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1C` Id I don't chng alcohol/drug probs will worsen (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1D` I started making changes in alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1E` Was using too much but managed to change (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1F` I wonder if my alcohol/drug use hurting othrs (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1G` I am a prob drinker or have drug prob (1=Strongly disagree,
2=Disagree, 3= Agree, 4=Strongly agree)
- `R1H` Already doing thngs to chnge alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1I` have changed use-trying to not slip back (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1J` I have a serious problem w/ alcohol/drugs (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1K` I wonder if I'm in contrl of alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1L` My alcohol/drug use is causing a lot of harm (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1M` Actively curring down/stopping alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1N` Want help to not go back to alcohol/drugs (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1O` I know that I have an alcohol/drug problem (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1P` I wonder if I use alcohol/drugs too much (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1Q` I am an alcoholic or drug addict (1=Strongly disagree,
2=Disagree, 3= Agree, 4=Strongly agree)
- `R1R` I am working hard to change alcohol/drug use (1=Strongly
disagree, 2=Disagree, 3= Agree, 4=Strongly agree)
- `R1S` Some changes-want help from going back (1=Strongly disagree,
2=Disagree, 3= Agree, 4=Strongly agree)
- `S1A` At interview pt obviously depressed/withdrawn (0=No, 1=Yes)
- `S1B` at interview pt obviously hostile (0=No, 1=Yes)
- `S1C` At interview pt obviouslt anx/nervous (0=No, 1=Yes)
- `S1D` Trouble w/real tst/thght dis/par at interview (0=No, 1=Yes)
- `S1E` At interview pt trbl w/ compr/concen/rememb (0=No, 1=Yes)
- `S1F` At interview pt had suicidal thoughts (0=No, 1=Yes)
- `T1` Have used alc since leaving River St. (0=No, 1=Yes)
- `T1B` # days in row continued to drink
- `T1C` Longest period abstain-lst 6 mos (alc)
- `T2` Have used heroin since leaving River St (0=No, 1=Yes)
- `T2B` # days in row continued to use heroin
- `T2C` Longest period abstain-lst 6 mos (heroin)
- `T3` Have used cocaine since leaving River St (0=No, 1=Yes)
- `T3B` # days in row continued to use cocaine
- `T3C` Lngest period abstain-lst 6 mos (cocaine)
- `U1` It is important to have a regular MD (1=Strongly agree,
2=Agree, 3=Uncertain, 4=Disagree, 5=Strongly Disagree)
- `U2A` I cannot pay for services (0=No, 1=Yes)
- `U2B` I am not eligible for free care (0=No, 1=Yes)
- `U2C` I do not know where to go (0=No, 1=Yes)
- `U2D` Can't get services due to transport probs (0=No, 1=Yes)
- `U2E` Office/clinic hours are inconvenient (0=No, 1=Yes)
- `U2F` I do not speak/understand English well (0=No, 1=Yes)
- `U2G` Afraid others discover hlth prb I have (0=No, 1=Yes)
- `U2H` My substance abuse interferes (0=No, 1=Yes)
- `U2I` I do not have a babysitter (0=No, 1=Yes)
- `U2J` I do not want to lose my job (0=No, 1=Yes)
- `U2K` My insurance does not cover services (0=No, 1=Yes)
- `U2L` Medical care is not important to me (0=No, 1=Yes)
- `U2M` I do not have time (0=No, 1=Yes)
- `U2N` Med staff do not treat me with respect (0=No, 1=Yes)
- `U2O` I do not trust my doctors or nurses (0=No, 1=Yes)
- `U2P` Often been unsatisfied w/my med care (0=No, 1=Yes)
- `U2Q` Other reason hard to get regular med care (0=No, 1=Yes)
- `U2Q_T` a factor with many levels
- `U2R` a factor with levels `7` `A` `B` `C` `D` `E`
`F` `G` `H` `I` `J` `K` `L` `M` `N` `O` `P`
`Q`
- `U3A` Has MD evr talked to you about drug use (0=No, 1=Yes)
- `U3B` Has MD evr talked to you about alc use (0=No, 1=Yes)
- `U4` Is there an MD you consider your regular MD (0=No, 1=Yes)
- `U5` Have you seen any MDs in last 6 mos (0=No, 1=Yes)
- `U6A` Would you go to this MD if med prb not emer (0=No, 1=Yes)
- `U6B` Think one of these could be your regular MD (0=No, 1=Yes)
- `PCP_ID` a numeric vector
- `U7A` What type of MD is your regular MD/this MD (1=OB/GYN,
2=Family medicine, 3=Pediatrician, 4=Adolescent medicine, 5=Internal
medicine, 6=AIDS doctor, 7=Asthma doctor, 8=Pulmonary doctor,
9=Cardiologist, 10=Gastroen)
- `U7A_T` a factor with levels `ARTHRITIS DOCTOR` `CHIROPRACTOR`
`COCAINE STUDY` `DETOX DOCTOR` `DO` `EAR DOCTOR`
`EAR SPECIALIST` `EAR, NOSE, & THROAT.` `EAR/NOSE/THROAT`
`ENT` `FAMILY PHYSICIAN` `GENERAL MEDICINE`
`GENERAL PRACTICE` `GENERAL PRACTIONER` `GENERAL PRACTITIONER`
`HEAD & NECK SPECIALIST` `HERBAL/HOMEOPATHIC/ACUPUNCTURE`
`ID DOCTOR` `MAYBE GENERAL PRACTITIONER` `MEDICAL STUDENT`
`NEUROLOGIST` `NURSE` `NURSE PRACTICIONER`
`NURSE PRACTITIONER` `ONCOLOGIST` `PRENATAL` `PRIMARY`
`PRIMARY CAAE` `PRIMARY CARE` `PRIMARY CARE DOCTOR`
`PRIMERY CARE` `THERAPIST` `UROLOGIST` `WOMENS CLINIC BMC`
- `U8A` Only saw this person once (=Only saw once)
- `U8B` Saw this person for <6 mos (1=<6 mos)
- `U8C` Saw tis person for 6 mos-1year (2=Betwn 6 mos & 1 yr)
- `U8D` Saw this person for 1-2 years (3=1-2 years)
- `U8E` Saw this person for 3-5 years (4=3-5 years)
- `U8F` Saw this person for more than 5 years (5=>5 years)
- `U10A` # times been to regular MDs office-pst 6 mos
- `U10B` # times saw regular MD in office-pst 6 mos
- `U10C` # times saw oth prof in office-pst 6 mos
- `U11` Rate convenience of MD office location (1=Very poor, 2=Poor,
3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U12` Rate hours MD office open for med appts (1=Very poor, 2=Poor,
3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U13` Usual wait for appt when sick (unsched) (1=Very poor, 2=Poor,
3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U14` Time wait for appt to start at MD office (1=Very poor,
2=Poor, 3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U15A` DO you pay for any/all of MD visits (0=No, 1=Yes)
- `U15B` How rate amt of $ you pay for MD visits (1=Very poor,
2=Poor, 3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U16A` Do you pay for any/all of prescript meds (0=No, 1=Yes)
- `U16B` Rate amt $ pay for meds/prescript trtmnts (1=Very poor,
2=Poor, 3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U17` Ever skip meds/trtmnts because too expensive (1=Yes, often,
2=Yes, occasionally, 3=No, never)
- `U18A` Ability to reach MC office by phone (1=Very poor, 2=Poor,
3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U18B` Ability to speak to MD by phone if need (1=Very poor,
2=Poor, 3=Fair, 4=Good, 5=Very good, 6=Excellent)
- `U19` How often see regular MD when have regular check-up
(1=Always, 2=Almost always, 3=A lot of the time, 4=Some of the time,
5=Almost never, 6=Never)
- `U20` When sick + go to MD how often see regular MD (1=Always,
2=Almost always, 3=A lot of the time, 4=Some of the time, 5=Almost
never, 6=Never)
- `U21A` How thorough MD exam to check hlth prb (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U21B` How often question if MD diagnosis right (1=Always, 2=Almost
always, 3=A lot of the time, 4=Some of the time, 5=Almost never,
6=Never)
- `U22A` Thoroughness of MD questions re symptoms (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U22B` Attn MD gives to what you have to say (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U22C` MD explanations of hlth prbs/trtmnts need (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U22D` MD instrcts re sympt report/further care (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U22E` MD advice in decisions about your care (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U23` How often leave MD office w/unanswd quests (1=Always,
2=Almost always, 3=A lot of the time, 4=Some of the time, 5=Almost
never, 6=Never)
- `U24A` Amount of time your MD spends w/you (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U24B` MDs patience w/ your questions/worries (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U24C` MDs friendliness and warmth toward you (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U24D` MDs caring and concern for you (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U24E` MDs respect for you (1=Very poor, 2= Poor, 3=Fair, 4=Good,
5= Very good, 6= Excellent)
- `U25A` Reg MD ever talked to you about smoking (0=No, 1=Yes)
- `U25B` Reg MD ever talked to you about alc use (0=No, 1=Yes)
- `U25C` Reg MD ever talk to you about seat belt use (0=No, 1=Yes)
- `U25D` Reg MD ever talked to you about diet (0=No, 1=Yes)
- `U25E` Reg Mdever talked to you about exercise (0=No, 1=Yes)
- `U25F` Reg MD ever talked to you about stress (0=No, 1=Yes)
- `U25G` Reg MD ever talked to you about safe sex (0=No, 1=Yes)
- `U25H` Reg MD ever talked to you about drug use (0=No, 1=Yes)
- `U25I` Reg MD ever talked to you about HIV testing (0=No, 1=Yes)
- `U26A` Cut/quit smoking because of MDs advice (0=No, 1=Yes)
- `U26B` Tried to drink less alcohol because of MD advice (0=No,
1=Yes)
- `U26C` Wore my seat belt more because of MDs advice (0=No, 1=Yes)
- `U26D` Changed diet because of MDs advice (0=No, 1=Yes)
- `U26E` Done more exercise because MDs advice (0=No, 1=Yes)
- `U26F` Relax/reduce stress because of MDs advice (0=No, 1=Yes)
- `U26G` Practiced safer sex because of MDs advice (0=No, 1=Yes)
- `U26H` Tried to cut down/quit drugs because MD advice (0=No,
1=Yes)"
- `U26I` Got HIV tested because of MDs advice (0=No, 1=Yes)"
- `U27A` I can tell my MD anything (1=Strongly agree, 2= Agree, 3=
Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27B` My MD pretends to know thngs if not sure (1=Strongly agree,
2= Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27C` I trust my MDs judgement re my med care (1=Strongly agree,
2= Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27D` My MD cares > about < costs than my hlth (1=Strongly agree,
2= Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27E` My MD always tell truth about my health (1=Strongly agree,
2= Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27F` My MD cares as much as I about my hlth (1=Strongly agree, 2=
Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U27G` My MD would try to hide a mistake in trtmt (1=Strongly
agree, 2= Agree, 3= Not sure, 4=Disagree, 5=Strongly disagree)"
- `U28` How much to you trst this MD (0=Not at all, 1=1, 2=2, 3=3,
4=4, 5=5, 6=6, 7=7, 8=8, 9=9, 10=Completely)"
- `U29A` MDs knowledge of your entire med history (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)"
- `U29B` MD knowldg of your respons-home/work/sch (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)"
- `U29C` MD knowldg of what worries you most-hlth (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)"
- `U29D` MDs knowledge of you as a person (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)"
- `U30` MD would know what want done if unconsc (1=Strongly agree,
2=Agree, 3=Not sure, 4= Disagree, 5=Strongly disagree)"
- `U31` Oth MDs/RNs who play roel in your care (0=No, 1=Yes)" \*
- `U32A` Their knowledge of you as a person (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U32B` The quality of care they provide (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U32C` Coordination betw them and your regular MD (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U32D` Their expl of your hlth prbs/trtmts need (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U32D_T` N/A, only my regular MD does this
- `U33` Amt regular MD knows about care from others (1=Knows
everything, 2=Knows almost everything, 3=Knows some things, 4=Knows
very little, 5=Knows nothing)
- `U34` Has MD ever recommended you see MD sepcialist (0=No, 1=Yes)
- `U35A` How helpful MD in deciding on specialist (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U35B` How helpful MD getting appt w/specialist (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U35C` MDs involvmt when you trtd by specialist (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U35D` MDs communic w/your specialists/oth MDs (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U35E` MD help in explain what specialists said (1=Very poor, 2=
Poor, 3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U35F` Quality of specialists MD sent you to (1=Very poor, 2= Poor,
3=Fair, 4=Good, 5= Very good, 6= Excellent)
- `U36` How many minutes to get to MDs office (1=<15, 2=16-30.
3=31-60, 4=More than 60)
- `U37` When sick+call how long take to see you (1=Same day, 2=Next
day, 3=In 2-3 days, 4=In 4-5 days, 5=in >5 days)
- `U38` How mant minutes late appt usually begin (1=None, 2=<5
minutes, 3=6-10 minutes, 4=11-20 minutes, 5=21-30 minutes, 6=31-45
minutes, 7=>45 minutes)
- `U39` How satisfied are you w/your regular MD (1=Completely
satisfied, 2=Very satisfied, 3=Somewhat satisfied, 4=Neither,
5=Somewhat dissatisfied, 6=Very dissatisfied, 7=Completely
dissatisfied)
- `V1` Evr needed to drink much more to get effect (0=No, 1=Yes)
- `V2` Evr find alc had < effect than once did (0=No, 1=Yes)
- `Z1` Breath Alcohol Concentration:1st test
- `Z2` Breath Alcohol Concentration:2nd test
- `AGE` Age in years
- `REALM` REALM score
- `E16A_RT` Barrier to regular MD: red tape (0=No, 1=Yes)
- `E16A_IB` Barrier to regular MD: internal barriers (0=No, 1=Yes)
- `E16A_TM` Barrier to regular MD: time restrictions (0=No, 1=Yes)
- `E16A_DD` Barrier to regular MD: dislike docs/system (0=No, 1=Yes)
- `GROUP` Randomization Group (0=Control, 1=Clinic)
- `MMSEC` MMSEC
- `PRIM_SUB` First drug of choice (0=None, 1=Alcohol, 3=Cocaine,
3=Heroine, 4=Barbituates, 5=Benzos, 6=Marijuana, 7=Methadone,
8=Opiates)
- `SECD_SUB` Second drug of choice (0=None, 1=Alcohol, 3=Cocaine,
3=Heroine, 4=Barbituates, 5=Benzos, 6=Marijuana, 7=Methadone,
8=Opiates)
- `ALCOHOL` 1st/2nd drug of coice=Alcohol (0=No, 1=Yes)
- `COC_HER` 1st/2nd drug of choice=cocaine or heroine (0=No, 1=Yes)
- `REALM2` REALM score (dichotomous) (1=0-60, 2=61-66)
- `REALM3` REALM score (categorical) (1=0-44), 2=45-60), 3=61-66)
- `RACE` Race (recode) (1=Afr Amer/Black, 2=White, 3=Hispanic,
4=Other)
- `RACE2` Race (recode) (1=White, 2=Minority)
- `BIRTHPLC` Where born (recode) (0=USA, 1=Foreign)
- `PRIMLANG` First language (recode) (0=English, 1=Other lang)
- `MD_LANG` Lang prefer to speak to MD (recode) (0=English, 1=Other
lang)
- `HS_GRAD` High school graduate (0=No, 1=Yes)
- `MAR_STAT` Marital status (recode) (0=Married, 1=Not married)
- `A12B_REC` Hollingshead category (recode) (0=Cat 1,2,3, 1=Cat
4,5,6, 2=Cat 7,8,9)
- `UNEMPLOY` Usually unemployed last 6m (0=No, 1=Yes)
- `ALONE6M` Usually lived alone past 6m y/n (0=No, 1=Yes)
- `HOMELESS` Homeless-shelter/street past 6 m (0=No, 1=Yes)
- `JAIL_MOS` Total months in jail past 5 years
- `JAIL_5YR` Any jail time past 5 years y/n (0=No, 1=Yes)
- `GOV_SUPP` Received governemtn support past 6 m (0=No, 1=Yes)
- `A18_REC1` Most money made in 1 yr (recode) (0=$19,000 or less,
1=$20,000-$49,000, 2=$50,000 or more)
- `A18_REC2` Most money made-continuous recode
- `STD_EVER` Ever had an STD y/n (0=No, 1=Yes)
- `STD_6M` Had an STD past 6m y/n (0=No, 1=Yes)
- `CHR_SUM` Sum chronic medican conds/HIV ever
- `CHR_EVER` Chronic medical conds/HIV-ever y/n (0=No, 1=Yes)
- `EPI_SUM` Sum episodic (C2A-C2O, C2R-C2U, STD)-6m
- `EPI_6M` Episodic (C2A-C2O,C2R-C2U, STD)-6m y/n (0=No, 1=Yes)
- `EPI_6M2B` Episodic(C2A-C2O)-6m y/n (0=No, 1=Yes)
- `SER_INJ` Recent (6m) serious injury y/n (0=No, 1=Yes)
- `D3_REC` Any medical problems past 30d y/n (0=No, 1=Yes)
- `D4_REC` Bothered by medical problems y/n (0=No, 1=Yes)
- `D5_REC` Medical trtmt is important y/n (0=No, 1=Yes)
- `ANY_INS` Did you have health insurance past 6 m (0=No, 1=Yes)
- `FRML_SAT` Formal substance abuse treatment y/n (0=No, 1=Yes)
- `E10B1_R` Mental health treatment past 6m y/n (0=No, 1=Yes)
- `E10B2_R` Med clinic/private MD past 6m y/n (0=No, 1=Yes)
- `ALT_TRT` Alternative tratments y/n (0=No, 1=Yes)
- `ANY_UTIL` Amy recent health utilization (0=No, 1=Yes)
- `NUM_BARR` # of perceived barriers to linkage
- `G1B_REC` Suicidal thoughs past 30 days y/n (0=No, 1=Yes)
- `G1D_REC` Prescribed psych meds past 30 daus y/n (0=No, 1=Yes)
- `PRIMSUB2` First drug of choice (no marijuana) (0=None, 1=Alcohol,
2=Cocaine, 3=Heroin, 4=Barbituates, 5=Benzos, 6=Marijuana,
7=Methadone, 8=Opiates)
- `ALCQ_30` Total number drinks past 30 days
- `H2_PRB` Problem sub: alc to intox (0=No, 1=Yes)
- `H3_PRB` Problem sub: heroin (0=No, 1=Yes)
- `H4_PRB` Problem sub: methadone (0=No, 1=Yes)
- `H5_PRB` Problem sub: oth opiates/analg (0=No, 1=Yes)
- `H6_PRB` Problem sub: barbituates (0=No, 1=Yes)
- `H7_PRB` Problem sub: sedat/hyp/tranq (0=No, 1=Yes)
- `H8_PRB` Problem sub: cocaine (0=No, 1=Yes)
- `H9_PRB` Problem sub: amphetamines (0=No, 1=Yes)
- `H10_PRB` Problem sub: marijuana, cannabis (0=No, 1=Yes)
- `H11_PRB` Problem sub: hallucinogens (0=No, 1=Yes)
- `H12_PRB` Problem sub: inhalants (0=No, 1=Yes)
- `POLYSUB` Polysubstance abuser y/n (0=No, 1=Yes)
- `SMOKER` Current smoker (every/some days) y/n (0=No, 1=Yes)
- `O1B_REC` Family/friends heavy drinkers y/n (0=No, 1=Yes)
- `O1C_REC` Family/friends use drugs y/n (0=No, 1=Yes)
- `O1D_REC` Family/fiends support abst. y/n (0=No, 1=Yes)
- `O2_REC` Live-in partner drinks/drugs y/n (0=No, 1=Yes)
- `PHYABUSE` Physical abuse-stranger or family (0=No, 1=Yes)
- `SEXABUSE` Sexual abuse-stranger or family (0=No, 1=Yes)
- `PHSXABUS` Any abuse (0=No, 1=Yes)
- `ABUSE2` Type of abuse (0=No abuse, 1=Physical only, 2=Sexual only,
3=Physical and sexual)
- `ABUSE3` Type of abuse (0=No abuse, 1=Physical only, 2=Sexual +/-
physical (0=No, 1=Yes)
- `CURPHYAB` Current abuse-physical (0=No, 1=Yes)
- `CURSEXAB` Current abuse-sexual (0=No, 1=Yes)
- `CURPHYSEXAB` Curent abuse-physical or sexual (0=No abuse,
1=Physical only, 2=Sexual +/- physical)
- `FAMABUSE` Family abuse-physical or sexual (0=No, 1=Yes)
- `STRABUSE` Stranger abuse-physical or sexual (0=No, 1=Yes)
- `ABUSE` Abuse-physical or sexual (0=No abuse, 1= Family abuse, 2=
Stranger only abuse)
- `RAWPF` Raw SF-36 physical functioning
- `PF` SF-36 physical functioning (0-100)
- `RAWRP` Raw SF-36 role-physical
- `RP` SF-36 role physical (0-100)
- `RAWBP` Raw SF-36 pain index
- `BP` SF-36 pain index (0-100)
- `RAWGH` Raw SF-36 general health perceptions
- `GH` SF-36 general health perceptions (0-100)
- `RAWVT` Raw SF-36 vitality
- `VT` SF-36 vitality 0-100)
- `RAWSF` Raw SF-36 social functioning
- `SF` SF-36 social functioning (0-100)
- `RAWRE` Raw SF-36 role-emotional
- `RE` SF-36 role-emotional (0-100)
- `RAWMH` Raw SF-36 mental health index
- `MH` SF-36 mental health index (0-100)
- `HT` Raw SF-36 health transition item
- `PCS` Standardized physical component scale-00
- `MCS` Standardized mental component scale-00
- `CES_D` CES-D score, measure of depressive symptoms, high scores
are worse
- `CESD_CUT` CES-D score > 21 y/n (0=No, 1=Yes)
- `C_MS` ASI-Composite medical status
- `C_AU` ASI-Composite score for alcohol use
- `C_DU` ASI-Composite score for drug use
- `CUAD_C` CUAD-Cocaine
- `CUAD_H` CUAD-Heroin
- `RAW_RE` SOCRATES-Rocognition-Raw
- `DEC_RE` SOCRATES-Recognition-Decile
- `RAW_AM` SOCRATES-Ambivalence-Raw
- `DEC_AM` SOCRATES-Ambivalence-Decile
- `RAW_TS` SOCRATES-Taking steps-Raw
- `DEC_TS` SOCRATES-Taking steps-Decile
- `RAW_ADS` ADS score
- `PHYS` InDUC-2L-Physical-Raw
- `PHYS2` InDUC-2L-Physical 9Raw (w/o M48)
- `INTER` InDUC-2L-Interpersonal-Raw
- `INTRA` InDUC-2L-Intrapersonal-Raw
- `IMPUL` InDUL-2L-Impulse control-Raw
- `IMPUL2` InDUC-2L-Impulse control-Raw (w/0 M23)
- `SR` InDUC-2L-Social responsibility-Raw
- `CNTRL` InDUC-2L-Control score
- `INDTOT` InDUC-2LTotal drlnC sore-Raw
- `INDTOT2` InDUC-2L-Total drlnC-Raw- w/o M23 and M48
- `PSS_FR` Perceived social support-friends
- `PSS_FA` Perceived social support-family
- `DRUGRISK` RAB-Drug risk total
- `SEXRISK` RAB-Sex risk total
- `TOTALRAB` RAB-Total RAB sore
- `RABSCALE` RAB scale sore
- `CHR_6M` Chronic medical conds/HIV-past 6m y/n (0=No, 1=Yes)
- `RCT_LINK` Did subject link to primary care (RCT)–This time point
(0=No, 1=Yes)
- `REG_MD` Did subject report having regular doctor–This time point
(0=No, 1=Yes)
- `ANY_VIS` # visits to regular doctor's office–This time point
- `ANY_VIS_CUMUL` Cumulative # visits to regular doctor's office
- `PC_REC` Primary care received: Linked & #visits (0=Not linked,
1=Linked, 1 visit, 2=Linked, 2+ visits)
- `PC_REC7` Primary cared received: linked & # visits (0=Not linked,
1=Linked, 1 visit, 2=Linked, 2 visits, 3=Linked, 3 visits, 4=Linked,
4 visits, 5= Linked, 5 visits, 6=Linked, 6+visits)
- `SATREAT` Any BSAS substance abuse this time point (0=No, 1=Yes)
- `DRINKSTATUS` Drank alcohol since leaving detox-6m
- `DAYSDRINK` Time (days) from baseline to first drink since leaving
detox-6m
- `ANYSUBSTATUS` Used alcohol, heroin, or cocaine since leaving
detox-6m
- `DAYSANYSUB` time (days) from baseline to first alcohol, heroin, or
cocaine since leaving detox-6m
- `LINKSTATUS` Linked to primary care within 12 months (by
administrative record)
- `DAYSLINK` Time (days) to linkage to primary care within 12 months
(by administrative record)
http://www.math.smith.edu/help
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `help_full.csv`.
Returns:
Tuple of np.ndarray `x_train` with 1472 rows and 788 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'help_full.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/mosaicData/HELPfull.csv'
maybe_download_and_extract(path, url,
save_file_name='help_full.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata | PypiClean |
/CombatWiz-0.12.tar.gz/CombatWiz-0.12/scripts/combatwiz_runner.py |
from __future__ import division
import sys
import csv
import fileinput
import math
import random
import argparse
from pprint import pprint as pp
#--- gristle modules -------------------
sys.path.append('../') # allows running from project structure
sys.path.append('../../') # allows running from project structure
import combatwiz.randomizer as randomizer
import pdb
def main():
""" runs all processes:
- gets opts & args
- analyzes file to determine csv characteristics unless data is
provided via stdin
- runs each input record through process_cols to get output
- writes records
"""
args = get_args()
my_simmer = Simulator(args.battles, args.charfile, args.verbose,
args.critters, args.sidea, args.sideb)
my_simmer.run_all_battles()
my_simmer.analysis()
return 0
class Simulator(object):
def __init__(self, battles, charfile, verbose, critters, sidea, sideb):
self.battles = battles
self.verbose = verbose
self.critters = critters
self.critters_sidea = sidea
self.critters_sideb = sideb
self.critter_results = []
self.char_sum = {}
self.charfile = charfile
self.battle_results = []
def run_all_battles(self):
for battle in xrange(self.battles):
if self.verbose:
print
print '==== Battle: %d ========================================' % battle
my_creatures = CreatureManager(self.charfile, self.critters,
self.critters_sidea, self.critters_sideb)
if battle == 0:
my_creatures.print_creature_summary()
one_arena = ArenaManager(my_creatures, self.verbose)
self.battle_results.append(one_arena.runner())
def analysis(self):
""" input: results are a tuple consisting of rounds followed by a
creatures dictionary.
"""
for battle in self.battle_results:
rounds = battle[0]
creatures = battle[1]
for key in creatures:
if creatures[key].curr_hp > 0:
winner_name = creatures[key].name
self.critter_results.append((creatures[key].name,
creatures[key].critter_id,
int(rounds),
creatures[key].hp,
creatures[key].curr_hp))
self._create_char_summary()
print
for char in self.char_sum:
if 'critter_id' in self.char_sum[char]:
print
print 'For: %s' % self.char_sum[char]['name']
print 'Battles: %d' % self.battles
print 'Total Wins: %d' % self.char_sum[char]['tot_wins']
print 'Total Damage Taken: %d' % self.char_sum[char]['tot_damage']
print 'Total Rounds Required: %d' % self.char_sum[char]['tot_rounds']
print 'Mean Rounds Required: %2.1f' % (self.char_sum[char]['tot_rounds'] /
self.char_sum[char]['tot_wins'])
print 'Percentage of Wins: %d' % \
((self.char_sum[char]['tot_wins'] / self.battles) * 100)
print 'Mean PCT HP Taken: %d%%' % ((self.char_sum[char]['tot_damage'] /
self.char_sum[char]['tot_wins']) / self.char_sum[char]['hp'] * 100)
#print sum(critter_results[1])
#print sum(critter_results[1]) / self.battles
def _create_char_summary(self):
""" Creates summary dict of creatures - using their name as the key.
"""
for crit in self.critter_results:
try:
self.char_sum[crit[0]]['tot_rounds'] += crit[2]
self.char_sum[crit[0]]['tot_wins'] += 1
self.char_sum[crit[0]]['tot_damage'] += crit[3] - crit[4]
except KeyError:
self.char_sum[crit[0]] = {}
self.char_sum[crit[0]]['name'] = crit[0]
self.char_sum[crit[0]]['critter_id'] = crit[1]
self.char_sum[crit[0]]['hp'] = crit[3]
self.char_sum[crit[0]]['tot_rounds'] = crit[2]
self.char_sum[crit[0]]['tot_wins'] = 1
self.char_sum[crit[0]]['tot_damage'] = crit[3] - crit[4]
def get_args():
""" gets args and returns them
Input:
- command line args & options
Output:
- args namespace
"""
parser = argparse.ArgumentParser(description='Simulate combat between critters')
parser.add_argument('critters',
nargs='*',
type=int,
default=[],
help='Specifies the critters to fight by id. All monsters will be on Side-A, all humanoids on Side-B.')
parser.add_argument('--sidea',
nargs='*',
type=int,
default=[],
help='explicitly list critters on Side-A')
parser.add_argument('--sideb',
nargs='*',
type=int,
default=[],
help='Explicitly list critters on Side-B')
parser.add_argument('--charfile',
default='~/.config/combatwiz/creatures.csv',
help='Specifies the file with character details')
parser.add_argument('--battles',
default=1,
type=int,
help='Specifies the number of battles to run')
parser.add_argument('--verbose',
action='store_true',
default=False,
help='Specifies whether or not to print details. Default is False.')
args = parser.parse_args()
if (not args.critters and not args.sidea and not args.sideb):
parser.error('Please provide critter ids for the battle')
if args.critters and len(args.critters) == 1:
parser.error('provide more than one critter to fight')
if args.critters and (args.sidea or args.sideb):
parser.error('provide either critters argument or both side options')
if (args.sidea and not args.sideb) or (args.sideb and not args.sidea):
parser.error('when specifying critters by side - provide both sides')
return args
class CreatureManager(object):
def __init__(self, charfile, critters, critters_sidea, critters_sideb):
self.creatures = {}
self.critters = critters
self.critters_sidea = critters_sidea
self.critters_sideb = critters_sideb
self.load_creatures(charfile)
def is_one_side_dead(self):
""" output:
- True if the members of either side are completely dead
- False otherwise
"""
side_a_dead = True
side_b_dead = True
for key in self.creatures.keys():
if self.creatures[key].side == 'side-a':
if self.creatures[key].curr_hp >= 1:
side_a_dead = False
else:
if self.creatures[key].curr_hp >= 1:
side_b_dead = False
return (side_a_dead or side_b_dead)
def _get_side_and_count(self, critter_id, critter_class):
side = None
count = 0
assert(int(critter_id))
if int(critter_id) in self.critters_sidea:
count = self.critters_sidea.count(int(critter_id))
side = 'side-a'
elif int(critter_id) in self.critters_sideb:
count = self.critters_sideb.count(int(critter_id))
side = 'side-b'
elif int(critter_id) in self.critters:
count = self.critters.count(int(critter_id))
if critter_class == 'monster':
side = 'side-b'
else:
side = 'side-a'
return (side, count)
def load_creatures(self, charfile):
""" Reads from file, transforms data, inserts into dict.
Input:
- charfile - the name of the character file.
Transforms:
- adds a side to each creature record
- adds a incrementing suffix if the same creature
is in combat more than once.
"""
fighter_num = 1
for record in csv.reader(fileinput.input(charfile)):
orig_record = list(record)
try:
if record[0] in ['id', '', ' ']:
continue # header-record
if not record:
break
(side, count) = self._get_side_and_count(record[0], record[4])
record.append(side)
while count:
if self.critters.count(int(record[0])) > 1:
record[1] = orig_record[1] + '-' + str(count)
self.creatures['fighter%d' % fighter_num] = OneCreature(record)
fighter_num += 1
count -= 1
except ValueError:
pass # skipping over header row or any empty rows
def print_creature_summary(self):
for key in self.creatures.keys():
print key
print
print '----------------------------------------------------------------'
print 'fighter_num: %-8.8s side: %-10.10s' % \
(key, self.creatures[key].side)
print 'critter_id: %-4.4s name: %-20.20s' % \
(self.creatures[key].critter_id, self.creatures[key].name)
print 'hd: %-4.4s hp: %-4.4s ' % \
(self.creatures[key].hd, self.creatures[key].hp)
print 'ac: %-4.4s race: %-20.20s' % \
(self.creatures[key].ac, self.creatures[key].race)
print 'class: %-10.10s class_level: %-4.4s ' % \
(self.creatures[key].critter_class, self.creatures[key].class_level)
print 'attack_thaco: %-4.4s attack_damage: %-5.5s ' % \
(self.creatures[key].attack_thaco, self.creatures[key].attack_damage)
print 'vision: %-10.10s move: %-4.4s ' % \
(self.creatures[key].vision, self.creatures[key].move)
print '----------------------------------------------------------------'
def __repr__(self):
result = 'my creatures:\n'
for key in self.creatures.keys():
result += 'critter_id: %-8.8s side: %-8.8s \n' % (key, self.creatures[key].side)
result += 'id: %-4.4s config: %-20.20s\n' % (self.creatures[key].critter_id, self.creatures[key].name)
result += 'hd: %-4.4s hp: %-4.4s \n' % (self.creatures[key].hd, self.creatures[key].hp)
return result
class OneCreature(object):
def __init__(self, creature_record):
self.critter_id = string2int(creature_record[0])
self.hd = string2int(creature_record[6])
self.ac = string2int(creature_record[8])
self.race = creature_record[3]
self.critter_class = creature_record[4]
self.name = creature_record[1]
self.config = creature_record[2]
self.hp = string2int(creature_record[7])
self.attack_distance = 2
self.attack_thaco = string2int(creature_record[9])
self.attack_damage = creature_record[10]
self.class_level = string2int(creature_record[5])
self.vision = creature_record[11]
self.move = string2int(creature_record[12])
self.attack_this_seg = False
if self.hp == 0:
self.hp = randomizer.roll_dice(8, self.hd)
self.curr_hp = self.hp
self.curr_loc = None
self.side = creature_record[13]
self.last_round = None # last round that creature moved
self.last_seg = None # last seg that creature moved
def moved_this_seg(self, curr_round, curr_seg):
if (self.last_round == curr_round
and self.last_seg == curr_seg):
return True
else:
return False
def change_loc(self, new_loc, curr_round, curr_seg):
if new_loc != self.curr_loc:
self.last_round = curr_round
self.last_seg = curr_seg
self.curr_loc = new_loc
def in_range(self, enemy_loc):
#print '%s, %s, %s' % (self.curr_loc, enemy_loc, get_distance(self.curr_loc, enemy_loc))
if get_distance(self.curr_loc, enemy_loc) <= self.attack_distance:
return True
else:
return False
def __repr__(self):
result = 'critter_id: %-8.8s side: %-8.8s \n' % (self.critter_id, self.side)
result += 'id: %-4.4s config: %-20.20s\n' % (self.critter_id, self.name)
result += 'hd: %-4.4s hp: %-4.4s \n' % (self.hd, self.hp)
return result
class ArenaManager(object):
def __init__(self, creature_manager, verbose):
self.x_max = 100
self.y_max = 100
self.verbose = verbose
self.creature_man = creature_manager
self.creatures = creature_manager.creatures
self.rounds = 0
self.assign_creature_starting_locations()
def my_print(self, val=''):
if self.verbose:
print val
def assign_creature_starting_locations(self):
# should be randomly distributed
self.creatures['fighter1'].curr_loc = [0, 0]
self.creatures['fighter2'].curr_loc = [self.x_max, self.y_max]
if 'fighter3' in self.creatures:
self.creatures['fighter3'].curr_loc = [0, self.y_max]
if 'fighter4' in self.creatures:
self.creatures['fighter4'].curr_loc = [self.x_max, 0]
if 'fighter5' in self.creatures:
self.creatures['fighter5'].curr_loc = [(self.x_max/2), 0]
if 'fighter6' in self.creatures:
self.creatures['fighter6'].curr_loc = [(self.x_max/2), self.y_max]
if 'fighter7' in self.creatures:
self.creatures['fighter7'].curr_loc = [0, (self.y_max/2)]
if 'fighter8' in self.creatures:
self.creatures['fighter8'].curr_loc = [(self.x_max), (self.y_max/2)]
def runner(self):
for self.curr_round in range(1, 101):
self.rounds += 1
self.my_print()
self.my_print('------------round: %d---------------' % self.curr_round)
for self.curr_seg in range(1, 11):
self.my_print(' ------------segment: %d---------------' % self.curr_seg)
for subject in self.creatures.keys():
if self.creatures[subject].curr_hp > 0:
enemy = self.get_enemy(subject)
enemy_loc = self.creatures[enemy].curr_loc
self.move_subject_towards_enemy(subject, enemy)
if (not self.creatures[subject].moved_this_seg(self.curr_round,
self.curr_seg)):
if self.creatures[subject].in_range(enemy_loc):
self.attack(subject, enemy)
if self.creature_man.is_one_side_dead():
break
if self.creature_man.is_one_side_dead():
break
if self.creature_man.is_one_side_dead():
break
return (self.rounds, self.creatures)
def move_subject_towards_enemy(self, subject, enemy):
enemy_loc = self.creatures[enemy].curr_loc
for move in range(1, (self.creatures[subject].move + 1) ):
self.creatures[subject].change_loc(self.move_subject_one_block(self.creatures[subject].curr_loc,
enemy_loc),
self.curr_round,
self.curr_seg)
#print ' %s moves to %s' % (self.creatures[subject].name, self.creatures[subject].curr_loc)
if (self.creatures[subject].moved_this_seg(self.curr_round, self.curr_seg)):
self.my_print(' %-20.20s moved to location: %s' % \
(self.creatures[subject].name, self.creatures[subject].curr_loc))
def move_subject_one_block(self, subject_loc, enemy_loc):
""" Returns new location up to 1 block away from currrent location
that is closer to the enemy location.
Note that once the battle begins it will generally indicate to
stay in the same position.
"""
X = 0
Y = 1
# first get distance for all 9 movement possibilities:
relative_moves = {}
for xmove in range(-1, 2):
for ymove in range(-1, 2):
sub_loc_adj = (subject_loc[X] + xmove, subject_loc[Y] + ymove)
if sub_loc_adj[X] < 0 or sub_loc_adj[Y] < 0:
relative_moves[(xmove, ymove)] = 9999 # make going off board too expensive
else:
relative_moves[(xmove, ymove)] = get_distance(sub_loc_adj, enemy_loc)
best_relative_move = get_key_with_least_value(relative_moves)
best_absolute_new_loc = (subject_loc[X] + best_relative_move[X],
subject_loc[Y] + best_relative_move[Y])
return best_absolute_new_loc
def get_enemy(self, subject):
""" Needs a test-harness.
"""
for enemy in self.creatures.keys():
if enemy != subject:
if self.creatures[subject].side != self.creatures[enemy].side:
if self.creatures[enemy].curr_hp > 0:
return enemy
raise ValueError, 'no enemy found'
def attack(self, subject, enemy):
attacks_per_round = 1.00
segments_per_round = 10.00
if random.random() > (attacks_per_round / segments_per_round):
self.my_print(' %s fails to get an attack opportunity against %s' % \
(self.creatures[subject].name, self.creatures[enemy].name))
return
roll = randomizer.roll_dice(20, 1)
ac_hit = self.creatures[subject].attack_thaco - roll
if ac_hit <= self.creatures[enemy].ac:
damage = randomizer.roll_range(self.creatures[subject].attack_damage)
self.creatures[enemy].curr_hp -= damage
self.my_print(' %s hits %s for %d damage with a to-hit roll of %d' % \
(self.creatures[subject].name, self.creatures[enemy].name, damage, roll ))
if self.creatures[enemy].curr_hp < 1:
self.my_print(' %s dies!' % self.creatures[enemy].name)
else:
self.my_print(' %s misses %s with a to-hit roll of %d' % \
(self.creatures[subject].name, self.creatures[enemy].name, roll))
def get_distance(loc_a, loc_b):
""" inputs:
- loc_a coordinates [positive x, positive y]
- loc_b coordinates [positive x, positive y]
outputs:
- distance - float
"""
X = 0
Y = 1
assert(loc_a[X] >= 0)
assert(loc_a[Y] >= 0)
assert(loc_b[X] >= 0)
assert(loc_b[Y] >= 0)
dist = math.sqrt((loc_a[X] - loc_b[X])**2
+ (loc_a[Y] - loc_b[Y])**2)
return dist
def string2int(val):
""" needs test harness
"""
if val == '':
return 0
else:
try:
return int(val)
except TypeError:
return 0
def get_key_with_least_value(source_dict):
least_key_value = 9999999
least_key = None
for key in source_dict.keys():
if source_dict[key] < least_key_value:
least_key = key
least_key_value = source_dict[key]
return least_key
if __name__ == '__main__':
sys.exit(main()) | PypiClean |
/MikeT_messenger_server-0.4.1.tar.gz/MikeT_messenger_server-0.4.1/server/server.py | import sys
import os
import argparse
import logging
import configparser
import logs.config_server_log
from common.utils import *
from common.decos import log
from server.core import MessageProcessor
from server.database import ServerStorage
from server.main_window import MainWindow
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import Qt
logger = logging.getLogger('server')
@log
def arg_parser(default_port, default_address):
"""Парсер аргументов коммандной строки."""
logger.debug(
f'Инициализация парсера аргументов коммандной строки: {sys.argv}')
parser = argparse.ArgumentParser()
parser.add_argument('-p', default=default_port, type=int, nargs='?')
parser.add_argument('-a', default=default_address, nargs='?')
parser.add_argument('--no_gui', action='store_true')
namespace = parser.parse_args(sys.argv[1:])
listen_address = namespace.a
listen_port = namespace.p
gui_flag = namespace.no_gui
logger.debug('Аргументы успешно загружены.')
return listen_address, listen_port, gui_flag
@log
def config_load():
"""Парсер конфигурационного ini файла."""
config = configparser.ConfigParser()
dir_path = os.path.dirname(os.path.realpath(__file__))
config.read(f"{dir_path}/{'server.ini'}")
if 'SETTINGS' in config:
return config
else:
config.add_section('SETTINGS')
config.set('SETTINGS', 'Default_port', str(DEFAULT_PORT))
config.set('SETTINGS', 'Listen_Address', '')
config.set('SETTINGS', 'Database_path', '')
config.set('SETTINGS', 'Database_file', 'server_database.db3')
return config
@log
def main():
"""Основная функция"""
config = config_load()
listen_address, listen_port, gui_flag = arg_parser(
config['SETTINGS']['Default_port'], config['SETTINGS']['Listen_Address'])
database = ServerStorage(
os.path.join(
config['SETTINGS']['Database_path'],
config['SETTINGS']['Database_file']))
server = MessageProcessor(listen_address, listen_port, database)
server.daemon = True
server.start()
if gui_flag:
while True:
command = input('Введите exit для завершения работы сервера.')
if command == 'exit':
server.running = False
server.join()
break
else:
server_app = QApplication(sys.argv)
server_app.setAttribute(Qt.AA_DisableWindowContextHelpButton)
main_window = MainWindow(database, server, config)
server_app.exec_()
server.running = False
if __name__ == '__main__':
main() | PypiClean |
/BCPy2000-1.6.tar.gz/BCPy2000-1.6/src/LangTools/TextPrediction.py | __all__ = [
'FixupDistribution',
'Decoder',
'LoadLanguageModel', 'LanguageModel',
'PPM', 'NGram', 'MKN',
'pdict'
]
import numpy
import os,sys
import codecs
from . import TreeStructure
from .ProgressMonitoring import progress
import pickle, gzip, time
import md5
##############################################################################################
def FixupDistribution(p, minprob=None, exponent=1.0):
cl = p.__class__
isdict = isinstance(p, dict)
if isdict: k,p = list(zip(*list(p.items())))
n = float(len(p))
if minprob == None: minprob = 0.5 / n
p = numpy.asarray(p, dtype=numpy.float64)
if p.sum(): p = p / p.sum()
p = numpy.clip(p, minprob, 1.0) - minprob
div = p.sum()
if div == 0.0: div = 1.0
p = minprob + p / div * (1.0 - n * minprob)
p = p / p.sum()
p = p ** exponent
p = p / p.sum()
if isdict: p = cl(dict(list(zip(k,p))))
return p
##############################################################################################
def stringify(k):
if isinstance(k, (str, int, float, bool)): return str(k)
if isinstance(k, list): return '[' + ','.join([stringify(x) for x in k]) + ']'
if isinstance(k, tuple): return '(' + ','.join([stringify(x) for x in k]) + {1:','}.get(len(k), '') + ')'
return "<%s.%s instance at 0x%08X>" % (k.__class__.__module__,k.__class__.__name__,id(k))
##############################################################################################
class pdict(dict):
def __repr__(self):
w = 50
s = ''
kv = sorted([(stringify(k),v) for k,v in list(self.items())])
sn = max([len(k) for k,v in kv] + [5])
vals = [v for k,v in kv]
vmin, vmax, vsum = float(min(vals)),float(max(vals)),float(sum(vals))
for k,v in kv:
if vmax == 0: n = 0
else: n = int(round(w * v / vmax))
s += '%s : %s \n' % (k.rjust(sn), '*' * n)
s += '%s = %g \n' % ('min'.rjust(sn), vmin)
s += '%s = %g \n' % ('max'.rjust(sn), vmax)
s += '%s = %g \n' % ('total'.rjust(sn), vsum)
return s
##############################################################################################
##############################################################################################
class Decoder(object):
##########################################################################################
def __init__(self, choices, mapping=None, labels=None,
context='', model=None, verbose=None,
threshold=1.0, min_epochs=1, max_epochs=None,
minprob=None, exponent=1.0, cromwell=0.01):
"""
<choices> is a list of N possible choices. The element values are
arbitrary here, having meaning only to the interface which calls this.
<mapping> is a list of N items corresponding to the items of <choices>.
Each item is a sequence (a string, list or tuple) of the symbols of the
alphabet that the corresponding choice might lead to: that is, the user
should choose option choices[i] if he wants to transmit any of the
symbols in mapping[i].
"""###
self.choices = choices
if mapping == None: mapping = [(c,) for c in choices]
if labels == None: labels = [' '.join(map(str,sorted(x))) for x in mapping]
self.mapping = mapping
self.labels = labels
self.threshold = threshold
self.min_epochs = min_epochs
self.max_epochs = max_epochs
self.context = context
self.model = model # will contain the alphabet
self.N = len(self.choices)
self.L = 0
self.minprob = minprob
self.exponent = exponent
self.cromwell = cromwell
self.verbose = verbose
if self.verbose == None: self.verbose = getattr(self.model, 'verbose', 0)
self.loglikelihood = numpy.zeros((self.N,), dtype=numpy.float64)
self.logprior = numpy.log(numpy.ones((self.N,), dtype=numpy.float64) / self.N)
if self.model != None:
dist = self.model.predict_letter(context) # over symbols of the alphabet
prior = numpy.zeros((self.N,), dtype=numpy.float64)
for choice_index in range(self.N):
symbols = mapping[choice_index]
for s in symbols: prior[choice_index] += dist.get(s, 0.0)
prior = FixupDistribution(prior, minprob=self.minprob, exponent=self.exponent)
self.logprior = numpy.log(prior)
self.cols = []
self.probs = []
self.done = False
if self.verbose:
context = getattr(self.model, 'last_used_context', context)
print('\n\n\n"' + context + '"\n')
print(self.prior())
##########################################################################################
def prior(self):
return pdict(list(zip(self.labels, numpy.exp(self.logprior).flat)))
##########################################################################################
def likelihood(self):
return pdict(list(zip(self.labels, numpy.exp(self.loglikelihood).flat)))
##########################################################################################
def posterior(self):
posterior = numpy.exp(self.loglikelihood + self.logprior)
posterior /= posterior.sum()
return pdict(list(zip(self.labels, numpy.exp(posterior).flat)))
##########################################################################################
def new_column(self, col):
if col.size != self.N: raise RuntimeError('wrong-sized codebook passed to Decoder (expected %d, got %d)' % (self.N, col.size))
self.cols.append(col)
##########################################################################################
def new_transmission(self, p, force_answer=False):
p = max(self.cromwell, min(1.0 - self.cromwell, p))
self.probs.append(p)
while len(self.cols) and len(self.probs) and (self.max_epochs == None or self.L < self.max_epochs):
pj = self.probs.pop(0)
C_j = self.cols.pop(0)
self.L += 1
loglike = numpy.log([1-pj, pj])[[int(c) for c in C_j.flat]]
self.loglikelihood += loglike
#self.eachcol = getattr(self, 'eachcol', []); self.eachloglike = getattr(self, 'eachloglike', []) # &&&
#self.eachcol.append(C_j); self.eachloglike.append(loglike) # &&&
if self.verbose:
print('got probability', p)
print('likelihood:')
print(self.likelihood())
print('posterior:')
print(self.posterior())
posterior = numpy.exp(self.loglikelihood + self.logprior)
posterior /= posterior.sum()
if self.done: return
if self.max_epochs != None and self.L >= self.max_epochs: force_answer = True
if self.min_epochs != None and self.L < self.min_epochs and not force_answer: return
if force_answer or max(posterior) > self.threshold:
atMAP = (posterior == max(posterior))
if sum(atMAP) == 1: answer = numpy.argmax(posterior) # if there's a single maximum-a-posteriori candidate, use it...
else:
posterior = self.loglikelihood * atMAP # ...otherwise, judge among the MAP candidates according to their likelihood (uninfluenced by the prior)
atMAP = (posterior==max(posterior))
if sum(atMAP) == 1: answer = numpy.argmax(posterior) # ...and if there's one winner of *that*, choose it...
else:
posterior = self.logprior * atMAP # ...otherwise judge among the winners according to their prior probabilities
answer = numpy.argmax(posterior) # ...and if there's one winner of *that*, choose it, otherwise choose an arbitrary one of the winners
self.done = True
if self.verbose:
print("chosen", stringify(self.choices[answer]))
return self.choices[answer]
##########################################################################################
def evolution(self, n=None, flash=False, log=False, **kwargs): # &&&
e = numpy.array(self.eachloglike).T
if n == None: n = e.shape[1]
c = numpy.array(self.eachcol).T
c = numpy.c_[c[:,1]*0, c[:, :n]]
ll = numpy.concatenate((numpy.expand_dims(self.logprior,1)*0+1, e[:, :n]), axis=1)
lp = numpy.concatenate((numpy.expand_dims(self.logprior,1)*1+0, e[:, :n]), axis=1)
ll = numpy.exp(numpy.cumsum(ll, axis=1)); ll = ll / numpy.expand_dims(ll.sum(axis=0),0)
lp = numpy.exp(numpy.cumsum(lp, axis=1)); lp = lp / numpy.expand_dims(lp.sum(axis=0),0)
lab = self.labels
if log:
lp,ll = numpy.log(lp),numpy.log(ll)
vmin,vmax = numpy.log(0.01), 0.0
else:
vmin,vmax = 0.0,1.0
import SigTools.Plotting
pylab = SigTools.Plotting.load_pylab()
pylab.clf();
sp = {True:3, False:2}[flash]
if 1: ax = pylab.subplot(1,sp,1); pylab.imshow(ll, vmin=vmin, vmax=vmax, interpolation='nearest', aspect='auto'); ax.set_yticks(list(range(self.N))); ax.set_yticklabels(lab) ; ax.set_title('%s likelihood' % kwargs.get('title', ''))
if 1: ax = pylab.subplot(1,sp,2); pylab.imshow(lp, vmin=vmin, vmax=vmax, interpolation='nearest', aspect='auto'); ax.set_yticks(list(range(self.N))); ax.set_yticklabels(lab) ; ax.set_title('%s posterior' % kwargs.get('title', ''))
if flash: ax = pylab.subplot(1,sp,3); pylab.imshow(c, vmin=0, vmax=1, interpolation='nearest', aspect='auto'); ax.set_yticks(list(range(self.N))); ax.set_yticklabels(lab) ; ax.set_title('%s flash' % kwargs.get('title', ''))
return ll,lp
##############################################################################################
##############################################################################################
rsrcpath = [
os.path.join(os.path.dirname(__file__), 'rsrc'),
#os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', '..', '..', '..', '..', '..', 'coding', 'rsrc'),
]
rsrcpath = [os.path.realpath(d) for d in rsrcpath if os.path.isdir(d)]
def FindFile(filename):
if filename == None: return None
f = os.path.abspath(filename)
if os.path.isfile(f): return f
p = ['.'] + rsrcpath + sys.path
for d in p:
f = os.path.realpath(os.path.join(d,filename))
if os.path.isfile(f): return f
return filename
def ReadUTF8(filename):
fh = open(filename)
txt = fh.read()
fh.close()
txt = txt.decode('utf-8').lstrip(str(codecs.BOM_UTF8, 'utf-8'))
txt = txt.replace('\r\n', '\n').replace('\r', '\n')
return txt
def LoadLanguageModel(filename, **kwargs):
if filename.lower().endswith('.gz'): openfn = gzip.open
else: openfn = open
filename = FindFile(filename)
print('loading language model from %s ...' % filename)
obj = pickle.load(openfn(filename, 'rb'))
for k,v in list(kwargs.items()): setattr(obj, k, v)
print('done')
return obj
##############################################################################################
default_alphabet36 = 'abcdefghijklmnopqrstuvwxyz -.,;:"\'?!'
default_alphabet = ' -.,;:"\'?!()0123456789abcdefghijklmnopqrstuvwxyz'
german_alphabet = ' -.,;:"\'?!()0123456789abcdefghijklmnopqrstuvwxyzäöüß'
default_corpus = 'dasher-english-corpus.txt'
default_translations = 'translation-table-utf8.txt'
default_test_text = 'obama.txt'
##############################################################################################
##############################################################################################
class LanguageModel(object):
##########################################################################################
def __init__(self, filename=None, alphabet=None, translation_file=None, space_delimited=False, max_chars=16, max_words=3, trim=True, verbose=False):
if alphabet == None: alphabet = default_alphabet
self.alphabet = alphabet
self.space_delimited = space_delimited
self.translation_file = FindFile(translation_file)
self.max_chars = max_chars
self.max_words = max_words
self.trim = trim
self.verbose = verbose
self.corpus = None
self.timestamp = None
self.prefixtree = TreeStructure.trie()
if filename != None: self.loadtrie(filename)
self.translations = self.read_translations(self.translation_file)
self.hashattr = ['space_delimited', 'max_words', 'max_chars', 'trim', 'translations']
##########################################################################################
def clean(self, text):
text = self.match_case(text)
if '\n' not in self.alphabet: text = text.replace('\n', ' ')
text = ['. '] + list(text) + [' ']
pr = progress(len(text), 'cleaning')
for i in range(len(text)):
if i > 0: prevx = text[i-1]
else: prevx = ''
if i+1 < len(text): nextx = text[i+1]
else: nextx = ''
x = text[i]
if not x in self.alphabet: x = self.translations.get(x,'')
if self.space_delimited:
if x in '.,?!;:' and nextx == ' ': x = ' ' + x
elif x in '"()[]{}' and prevx == ' ': x = x + ' '
# In space_delimited mode, space now unambiguously denotes the end of a word. Ending the sentence
# is a separate issue (only likely to happen after the end of a word). So effectively, each punctuation
# concept ("sentence ends now", "sentence ends now as a question", etc...) has been made a "word". For
# now, the default is *not* to do things in space_delimited mode, but in future it would be desirable.
# However, TODO: how to undo this at the prediction stage??
if prevx.endswith(' ') or prevx.endswith('\n'): x = x.lstrip(' ') # collapse multiple spaces
text[i] = x
if i%5000 == 0: pr.update(i)
pr.done()
text = ''.join(text)
return text
##########################################################################################
def gethash(self, length=16):
attr = tuple(sorted([
(p,getattr(self,p)) for p in self.hashattr
]))
m = md5.md5()
m.update(pickle.dumps(attr, 0))
return m.hexdigest()[:length]
##########################################################################################
def pre_save(self):
return self.__class__.__name__
##########################################################################################
def save(self, filestem='', compress=True):
if self.timestamp == None: self.timestamp = time.localtime()[:6]
dsaved = dict(self.__dict__)
desc = self.pre_save()
if self.prefixtree.isondisk():
self.prefixtree = self.prefixtree.read()
self.pack()
filename = '-'.join([x for x in [
filestem,
desc,
'%s' % self.gethash(8),
'%s' % self.prefixtree.gethash(8),
# '%04d%02d%02d%02d%02d%02d' % time.localtime()[:6],
] if len(x)])
filename += '.pk'
if compress:
openfn = gzip.open
filename += '.gz'
else:
openfn = open
print("saving language model as %s ..." % filename)
pickle.dump(self, openfn(filename, 'wb'), -1)
self.__dict__.update(dsaved)
print('done')
def loadtrie(self, filename):
filename = FindFile(filename) # TODO: @@@ more graceful behaviour if not found?
self.prefixtree = TreeStructure.trie(filename)
self.alphabet = sorted(self.prefixtree.dist('').keys())
##########################################################################################
def cases(self):
"""
Return (loweronly, upperonly, both) which are counts of the number of elements in
self.alphabet for which different .upper() and .lower() case versions theoretically
exist, and...
loweronly: only the lower-case version is present
upperonly: only the upper-case version is present
both: both versions are present
"""###
lowers = set()
uppers = set()
for c in self.alphabet:
if not hasattr(c, 'lower') or not hasattr(c, 'upper'): continue
up,lo = c.upper(),c.lower()
if up == lo: continue
if c == up: uppers.add(lo)
if c == lo: lowers.add(lo)
loweronly = len(lowers - uppers)
upperonly = len(uppers - lowers)
both = len(lowers.intersection(uppers))
return loweronly,upperonly,both
##########################################################################################
def read_corpus(self, filename=None):
"""
Read a given text file, "clean" it according to the specified alphabet, and
return the resulting content. The text file is assumed to be either ASCII
or UTF-8 encoded.
"""###
if filename == None: filename = default_corpus
filename = FindFile(filename) # TODO: @@@ more graceful behaviour if not found?
t = ReadUTF8(filename)
return self.clean(t)
##########################################################################################
def read_translations(self, filename=None):
if filename == None:
filename = FindFile(default_translations)
if not os.path.isfile(filename): return {}
# TODO: @@@ is this really satisfactory? fail silently if using default, and default not found
else:
filename = FindFile(filename)
t = ReadUTF8(filename)
t = t.split('\n')
d = {}
for line in t:
w = line.split(' ')
if len(w) == 0: continue
src,cand = w[0], w[2:]
if src in self.alphabet: continue
cand = [c for c in cand if len(c) and False not in [ci in self.alphabet for ci in c]]
if len(cand): d[src] = cand[0]
return d
##########################################################################################
def build(self, text=None):
"""
Take text (for example as returned by corpus() ) and build a trie. The
maximum n-gram length is self.max_chars characters or self.max_words
words, whichever is shorter.
"""###
if text == None: text = self.read_corpus()
t = TreeStructure.trie()
istop = len(text)
pr = progress(istop, 'building language model')
for i in range(istop):
substr = ''; j = i; nwords = 0; lastchar = ' '
while j<istop:
char = text[j]
substr += char
endword = (char == ' ' and lastchar != ' ')
lastchar = char
j += 1
if endword:
nwords += 1
if len(substr) >= self.max_chars or nwords >= self.max_words: break
#print '"'+substr+'"'
t.add(substr)
if i%5000 == 0: pr.update(i, '(%d nodes)'%len(t.nodes))
self.prefixtree = t
self.corpus = text
self.timestamp = time.localtime()[:6]
return self
##########################################################################################
def pack(self):
if not self.prefixtree.ispacked(): self.prefixtree = self.prefixtree.pack()
return self
##########################################################################################
def match_case(self, txt):
loweronly,upperonly,both = self.cases()
if hasattr(txt, 'lower') and loweronly > 0 and upperonly == 0 and both == 0: txt = txt.lower()
if hasattr(txt, 'upper') and upperonly > 0 and loweronly == 0 and both == 0: txt = txt.upper()
return txt
##########################################################################################
def trim_until_found(self, txt):
"""
Trim the context from the left until it *is* actually found in the trie.
(actually, grow it from the right and stop at the longest string that is found).
without this, the algorithm seems to return a flat distribution whenever a context
is used that does not appear as an exact substring in the corpus. Clearly this is
crude: more sophisticated language models will not require this.
"""###
found = ''
for i in range(1,len(txt)+1):
query = txt[-i:]
if self.prefixtree.retrieve(query) == None: break
else: found = query
txt = found
return txt
##########################################################################################
def prepend_context(self, txt, prefix='. '):
baseline = self.prefixtree.dist('')
cc, prefix = prefix[::-1],''
for c in cc:
if c not in baseline: break
prefix = c + prefix
if len(prefix) == 0: return txt
rm = 0
for i in range(1, len(prefix)+1):
if txt.startswith(prefix[-i:]): rm = i
if rm: prefix = prefix[:-rm]
if len(prefix): txt = prefix + txt
return txt
##########################################################################################
def prepare_context(self, context):
# do not exceed the character limit used while building the trie (including the next char)
context = context[-self.max_chars+1:]
# on the assumption that the next character is a non-space, do not exceed the word limit
# used while building the trie
backwards, context = context[::-1], ''
nwords, wasspace = 0, False
for c in backwards:
isspace = (c == ' ')
if isspace and not wasspace: nwords += 1
if not isspace and wasspace and nwords >= self.max_words: break
context = c + context
wasspace = isspace
# model option: trim until found
if self.trim: context = self.trim_until_found(context)
# store
self.last_used_context = context
return context
##########################################################################################
def predict_letter(self, context):
context = self.prepare_context(context)
return pdict(self.prefixtree.dist(context))
##########################################################################################
def predict_word_completion(self, wordstem, prevwords=''):
startword = ' ' # may only be one
endword = '.,?!;:\'"[](){}\n\t '
if startword not in self.prefixtree.dist(''):
raise RuntimeError('the word-delimiter is not in the language-model\'s alphabet')
words,stemcount = [],0
prevwords = startword + prevwords.lstrip(endword)
if prevwords[-1] not in endword: prevwords = prevwords + startword
wordstem = wordstem.split(' ')[-1]
nodenum,substr = self.prefixtree.walk(prevwords + wordstem)
if substr == prevwords + wordstem:
stemcount = self.prefixtree.nodes[nodenum].count
t = [(nodenum,wordstem)]
while len(t):
nodenum,substr = t.pop()
d = self.prefixtree.nodes[nodenum].children
wordcount = 0
for k,v in list(d.items()):
if k in endword: wordcount += self.prefixtree.nodes[v].count
else: t.append((v,substr+k))
wordcount = float(wordcount)/float(stemcount)
if wordcount: words.append((wordcount, substr))
return sorted(words), stemcount
##############################################################################################
##############################################################################################
class PPM(LanguageModel):
"""
Provides the classical PPM (Prediction by partial matching) distributions behavior
by combining the n-grams with the method C. Returns the distribution for the next
letter, based on the given context.
"""###
##########################################################################################
def __init__(self, *pargs, **kwargs):
#super(PPM, self).__init__(*pargs, **kwargs)
LanguageModel.__init__(self, *pargs, **kwargs)
##########################################################################################
def predict_letter(self, context):
baseline = self.prefixtree.dist('')
context = self.prepare_context(context)
if len(context)==0: return baseline
counts = dict()
preFactor = 1.0
#combine n-grams
for i in range( len(context)):
newCounts = self.prefixtree.dist(context[i:])
if len(newCounts) == 0: break
normFactor = float(len(newCounts) + sum(newCounts.values()))
#update distribution
for k in list(newCounts.keys()):
counts[k] = counts.get(k, 0.0) + preFactor * newCounts[k] / normFactor #normalize by Method C
preFactor *= len(newCounts) / normFactor
#uniform distribution over alphabet
alphabet = list(baseline.keys())
for k in alphabet:
counts[k] = counts.get(k, 0.0) + preFactor / len(alphabet)
return pdict(counts)
##############################################################################################
##############################################################################################
class NGram(LanguageModel):
def __init__(self, *pargs, **kwargs):
"""
e.g. to construct a trigram model: NGram(3, ...) or NGram(..., N=3)
where ... are the other input arguments to LanguageModel.
"""###
pargs = list(pargs)
if len(pargs) and isinstance(pargs[0], int) and not 'N' in kwargs: self.N = pargs.pop(0)
else: self.N = kwargs.pop('N', None)
if self.N == None: raise TypeError('argument N was not supplied')
LanguageModel.__init__(self, *pargs, **kwargs)
self.hashattr += ['N']
def pre_save(self):
return '%s%d' % (self.__class__.__name__, self.N )
def predict_letter(self, context):
context = context[-self.N:]
context = self.prepare_context(context)
return pdict(self.prefixtree.dist(context))
##############################################################################################
##############################################################################################
class MKN(LanguageModel):
def __init__(self, *pargs, **kwargs):
"""
A language model which is
- Interpolated: meaning all context lengths are combined for all
predictions, irrespective of whether the predicted string appears
in the corpus (in contrast to a backoff model---see [1], pp15--16).
- Kneser-Ney-based: so that absolute discounting of n-gram frequency
estimates is used, and lower-than-maximum-order models use a
strange way of counting (extended context counts as opposed to
n-gram occurrence counts). This is what was used in [1] and
corresponds to the variant called MODKN-EXTEND in [2] (pp 3--4).
- Modified: meaning that the modification proposed in [1] is used,
namely that the discounting value has a dependency on the value of
the count it is discounting.
[1] Chen, S. F. and Goodman, J. (1998): An Empirical Study of Smoothing
Techniques for Language Modeling. Technical Report TR-10-98, Center
for Research in Computing Technology (Harvard University), August 1998.
[2] James, F. (2000): Modified Kneser-Ney Smoothing of n-gram Models.
Technical Report 00.07, Research Institute for Advanced Computer
Science (USRA/NASA), October 2000.
"""###
pargs = list(pargs)
if len(pargs) and isinstance(pargs[0], int) and not 'N' in kwargs: self.maxn = pargs.pop(0)
else: self.maxn = kwargs.pop('N', None)
if self.maxn == None: raise TypeError('argument N was not supplied')
self.maxc = kwargs.pop('maxc', 3)
self.discounts = None
self.suffixtree = None
self.slow = False
LanguageModel.__init__(self, *pargs, **kwargs)
self.hashattr += ['maxn', 'maxc']
def pre_save(self):
if getattr(self, 'suffixtree', None) != None:
if self.suffixtree.isondisk(): self.suffixtree = self.suffixtree.read()
return '%s%d' % (self.__class__.__name__, self.maxn )
def CountNGrams(self):
Q = dict([(
n,
dict([ (i,0) for i in list(range(1, self.maxc+2))+['more'] ])
) for n in range(1, self.maxn+1)])
t = self.prefixtree.nodes
stack = [list(t[0].children.values())]
while len(stack):
nodes = stack[-1]
if len(nodes) == 0: stack.pop(); continue
node = t[nodes.pop()]
n = len(stack)
c = node.count
if c > self.maxc+1: c = 'more'
Q[n][c] += 1
if n < self.maxn: stack.append(list(node.children.values()))
return Q
def SetupDiscounts(self, maxc=None, maxn=None):
"""
Y[n] = Q[n][1] / (Q[n][1] + 2.0 * Q[n][2])
D[n][1] = 1.0 - 2.0 * Y[n] * Q[2] / Q[1]
D[n][2] = 2.0 - 3.0 * Y[n] * Q[3] / Q[2]
D[n][3] = 3.0 - 4.0 * Y[n] * Q[4] / Q[3]
where Q[n][c] is the number of strings of length n
that appear exactly c times.
"""###
if maxn != None: self.maxn = maxn
if maxc != None: self.maxc = maxc
self.discounts = [None]
print("counting n-grams up to length %d..." % self.maxn)
Q = self.counts = self.CountNGrams()
for n in range(1, self.maxn+1):
Y = Q[n][1] / max(1.0, Q[n][1] + 2.0 * Q[n][2])
disc = [0]
for c in range(1, self.maxc+1):
D = c - (c + 1.0) * Y * float(Q[n][c+1]) / max(1.0, float(Q[n][c]))
disc.append(D)
self.discounts.append(disc)
return self
def N(self, d, criterion, exact=False):
"""
N_criterion ( string . ) if exact==True
N_criterion+ ( string . ) if exact==False
where string is the context that led us to d, an
unnormalized dictionary of children's counts.
"""###
if exact: return sum([c == criterion for wi,c in list(d.items())])
else: return sum([c >= criterion for wi,c in list(d.items())])
def D(self, c, n):
"""
Discount as a function of count (adjusted so as never to go negative).
"""###
if n < len(self.discounts): disc = self.discounts[n]
else: disc = self.discounts[-1]
ind = min(len(disc)-1, c)
d = disc[ind]
d = min(c, d)
return d
def predict_letter(self, context):
#import SigTools; return SigTools.unpickle('firstprior') # &&&
if self.discounts == None: self.SetupDiscounts()
context = context[-(self.maxn-1):]
context = self.prepare_context(context)
p = {}
fac = 1.0
nz = False
nstart = n = len(context) + 1
highest = True
while True:
ttt = time.time()
if highest:
counts = self.prefixtree.dist(context)
denom = float(sum(counts.values()))
denom = max(1.0, denom)
gamma = 0.0
if n < len(self.discounts): disc = self.discounts[n]
else: disc = self.discounts[-1]
for i in range(1, len(disc)):
exact = i < len(disc) - 1
gamma += disc[i] * self.N(counts,i,exact)
gamma /= denom
else:
counts = self.kndist(context)
v = list(counts.values())
denom = max(1.0, float(sum(v)))
gamma = sum([self.D(c, n) for c in v]) / denom
for k,v in list(counts.items()):
p[k] = p.get(k, 0.0) + fac * max(0, v - self.D(v, n)) / denom
if p[k]: nz = True
fac *= gamma
ttt = time.time() - ttt
#if self.verbose: print "n=%d: %g sec" % (n, ttt)
if len(context) == 0: break
context = context[1:]
n -= 1
highest = False
return pdict(p)
def pack(self, *pargs, **kwargs):
LanguageModel.pack(self, *pargs, **kwargs)
if getattr(self, 'suffixtree', None) != None:
if not self.suffixtree.ispacked(): self.suffixtree = self.suffixtree.pack()
if self.discounts == None or len(self.discounts) <= self.maxn: self.SetupDiscounts()
return self
def kndist(self, string):
if self.slow or self.suffixtree == None:
return self.prefixtree.kndist(string) # slow
# equivalent but much faster algorithm
d = self.prefixtree.retrieve(string)
if d == None: successors = []
else: successors = list(d.children.keys())
gnirts = string[::-1]
p = {}
for successor in successors:
d = self.suffixtree.dist(successor+gnirts)
p[successor] = self.N(d, 1, exact=False)
return p
def build_suffixtree(self, text=None):
if text == None: text = self.corpus
if text == None: text = self.read_corpus()
prev = self.prefixtree, self.corpus, self.timestamp
self.build(text[::-1])
self.suffixtree = self.prefixtree
self.prefixtree, self.corpus, self.timestamp = prev
return self
##############################################################################################
##############################################################################################
# TODO:
# nasty bug: when a language model is used which takes up a lot of memory,
# pyspeller likelihoods somehow get corrupted.... is not a function of the prior itself
# but somehow a side-effect of using the trees... parts marked &&& here and in BciApplication.py are for debugging this
# flat prior results from MKN with trim=False when string gets too long. Can that be right?
#
# factor in word-completions and backspace to predictive distribution
# implement word-completions
# model the backspace key
##############################################################################################
############################################################################################## | PypiClean |
/ECmean4-0.1.4.tar.gz/ECmean4-0.1.4/ecmean/libs/files.py | import os
import re
import logging
from pathlib import Path
from glob import glob
import yaml
import xarray as xr
loggy = logging.getLogger(__name__)
##################
# FILE FUNCTIONS #
##################
def inifiles_priority(inidict):
"""
For areas dictionary and remap dictionary, provides a priority of which
files to be used for interpolation and area computation
Areas files comes first, then gridfile and finally land-sea mask.
Provides flexibility for multiple models with different data access
"""
if inidict['areafile']:
file = inidict['areafile']
elif inidict['gridfile']:
file = inidict['gridfile']
elif inidict['maskfile']:
file = inidict['maskfile']
else:
file = None
return file
def var_is_there(flist, var, face):
"""
Check if a variable is available in the input file and provide its units.
Returns:
isavail (bool): if the variable is found or not
varunit (string): if the variable is there, its unit (None otherwise)
"""
# we expect a list obtained by glob
if not isinstance(flist, (xr.DataArray, xr.Dataset)):
isavail = all(os.path.isfile(f) for f in flist) and len(flist) > 0
else:
# isavail = True if var in flist else False
isavail = True
if isavail:
# no need of preproc here
if not isinstance(flist, (xr.DataArray, xr.Dataset)):
xfield = xr.open_mfdataset(flist)
else:
xfield = flist
# if variable is derived, extract required vars
var_req = _get_variables_to_load(var, face)
# check if all required variables are in model output
if set(var_req).issubset(set(xfield.data_vars)):
units_avail = {}
# if I don't know the unit, assuming is a fraction
for i in xfield.data_vars:
units_avail[i] = getattr(xfield[i], 'units', 'frac')
# this is because I can't get rid of this unit
if units_avail[i] == '(0 - 1)':
units_avail[i] = 'frac'
else:
isavail = False
varunit = None
x = [e for e in var_req if e not in xfield.data_vars]
loggy.warning("Variable %s requires %s which is not "
"available in the model output. Ignoring it.", var, ' '.join(x))
return isavail, varunit
# get units
varunit = face['variables'][var].get('units', None)
if not varunit:
varunit = units_avail.get(var_req[0])
if len(var_req) > 1:
loggy.info('%s is a derived var, assuming unit '
'as the first of its term', var)
else:
varunit = None
# print(f'Not available: {var} File: {flist}')
loggy.error("No file found for variable %s. Ignoring it.", var)
return isavail, varunit
def get_clim_files(piclim, var, diag, season):
"""Function to extra names for the climatology files"""
# extract info from pi_climatology.yml
# reference dataset and reference varname
# as well as years when available
dataref = piclim[var]['dataset']
datayear1 = piclim[var].get('year1', None)
datayear2 = piclim[var].get('year2', None)
# get files for climatology
if diag.climatology == 'RK08':
dataname = piclim[var]['dataname']
clim = str(diag.resclmdir / f'climate_{dataref}_{dataname}.nc')
vvvv = str(diag.resclmdir / f'variance_{dataref}_{dataname}.nc')
elif diag.climatology in 'EC23':
if season == 'ALL':
stringname = 'climate'
else:
stringname = 'seasons'
clim = str(
diag.resclmdir /
f'{stringname}_average_{var}_{dataref}_{diag.resolution}_{datayear1}-{datayear2}.nc')
vvvv = str(
diag.resclmdir /
f'{stringname}_variance_{var}_{dataref}_{diag.resolution}_{datayear1}-{datayear2}.nc')
return clim, vvvv
def get_inifiles(face, diag):
"""Return the inifiles from the interface, needs the component dictionary.
Check if inifiles exist.
Args:
face: the interface dictionary
diag: the diagnostic object
Returns:
a dictionary with the different initial files
"""
dictcomp = face['model']['component']
ifiles = {}
for comp in dictcomp.keys():
ifiles[comp] = {}
dictnames = face['component'][dictcomp[comp]]
for name in dictnames.keys():
inifile = dictnames[name]
if inifile:
if inifile[0] != '/':
inifile = Path(diag.ecedir) / \
Path(face['model']['basedir']) / \
Path(inifile)
ifiles[comp][name] = str(_expand_filename(inifile, '', diag))
loggy.info('%s for component %s is: %s', name, comp, ifiles[comp][name])
# safe check if inifile exist
if not glob(ifiles[comp][name]):
loggy.warning('Inifile %s cannot be found!', ifiles[comp][name])
ifiles[comp][name] = ''
else:
ifiles[comp][name] = ''
return ifiles
def _expand_filename(filenames, var, diag):
"""Expands a path (filename or dir) for var, expname, frequency, ensemble etc.
and environment variables. Years are set as a wildcard and filtered by _filter_by_year"""
return Path(str(os.path.expandvars(filenames)).format(
expname=diag.expname,
year1='*',
year2='*',
var=var,
frequency=diag.frequency,
ensemble=diag.ensemble,
grid=diag.grid,
model=diag.modelname,
version=diag.version
))
def _filter_filename_by_year(template, filenames, year):
"""Find filename containing a given year in a list of filenames"""
# if year1 is used in the file template
if 'year1' in template:
# Assumes that the file name ends with 199001-199012.nc or 1990-1991.nc
year1 = [int(x.split('_')[-1].split('-')[0][0:4]) for x in filenames]
# if year2 is used in the file template
if 'year2' in template:
year2 = [int(x.split('_')[-1].split('-')[1][0:4]) for x in filenames]
else:
year2 = year1
# filter names
filternames = [filenames[i] for i in range(len(year1))
if year >= year1[i] and year <= year2[i]]
else:
# this is introduced for file that does not have year in their filename
filternames = filenames
# safety warning if something is missing
if not filternames and len(filenames) > 0:
loggy.warning('Data for year %s has not been found!', str(year))
loggy.info('Filtered filenames: %s', filternames)
return filternames
def load_yaml(infile):
"""Load generic yaml file"""
try:
with open(infile, 'r', encoding='utf-8') as file:
cfg = yaml.load(file, Loader=yaml.FullLoader)
except IOError:
raise IOError(f'ERROR: {infile} not found: you need to have this configuration file!')
return cfg
def _create_filepath(cmorname, face, diag):
"""Create filepath with wildcards"""
filetype = face['variables'][cmorname]['filetype']
filepath = Path(diag.ecedir) / \
Path(face['model']['basedir']) / \
Path(face['filetype'][filetype]['dir']) / \
Path(face['filetype'][filetype]['filename'])
loggy.info('Filepath: %s', filepath)
return filepath
def make_input_filename(cmorname, face, diag):
"""Create full input filepaths for the required variable and a given year
Returns:
a list of files to be loaded by xarray
"""
# if a dataarray is provided
if diag.xdataset is not None:
return diag.xdataset
else:
# detect if it is a derived vars and get the list of var to be loaded
varname = _get_variables_to_load(cmorname, face)
# create filepath
filepath = _create_filepath(cmorname, face, diag)
# create the file structure according to the interface file
filename = []
for year in diag.years_joined:
filename1 = []
for var in varname:
expandfile = _expand_filename(filepath, var, diag)
filenames = glob(str(expandfile))
fname = _filter_filename_by_year(str(filepath), filenames, year)
filename1 = filename1 + fname
filename = filename + filename1
filename = list(dict.fromkeys(filename)) # Filter unique ones
loggy.info("Filenames: %s", filename)
return filename
def _get_variables_to_load(var, face):
"""Function to extract from the interface file the list of derived variable,
i.e. the real variables to be loaded, for each of the cmorname introduced in the
interface file
Args:
var: the cmorname variable of the data to be loaded
face: the interface file
"""
if 'derived' in face['variables'][var].keys():
cmd = face['variables'][var]['derived']
dervars = [x for x in re.split('[*+-/]', cmd) if not x.isdigit()]
else:
dervars = [var]
return dervars | PypiClean |
/Annalist-0.5.18.tar.gz/Annalist-0.5.18/annalist_root/annalist/layout.py | from __future__ import unicode_literals
from __future__ import absolute_import, division, print_function
__author__ = "Graham Klyne (GK@ACM.ORG)"
__copyright__ = "Copyright 2014-2016, G. Klyne"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import os.path
import logging
log = logging.getLogger(__name__)
# Annalist configuration and metadata files
#
# Directory layout:
#
# $BASE_DATA_DIR
# annalist-site/
# c/
# _annalist-site/
# d/
# coll_meta.json_ld
# coll_prov.json_ld
# coll_context.json_ld
# _type/
# :
# <collection-id>/
# d/
# coll_meta.jsonld
# coll_prov.jsonld
# _type/
# <type-id>/
# type_meta.jsonld
# type_prov.jsonld
# :
# _view/
# <view-id>/
# view_meta.jsonld
# view_prov.jsonld
# :
# _list/
# <list-id>/
# list_meta.jsonld
# list_prov.jsonld
# :
# (etc.)
# :
# <type-id>/
# <entity-id>/
# entity-data.jsonld
# entity-prov.jsonld
# :
# :
# <collection-id>/
# :
COLL_TYPEID = "_coll"
COLL_BASE_DIR = "d"
COLL_PAGE_DIR = "p"
COLL_ROOT_CONF_OLD_DIR = "_annalist_collection"
COLL_BASE_CONF_OLD_DIR = "../" + COLL_ROOT_CONF_OLD_DIR
COLL_META_FILE = "coll_meta.jsonld"
COLL_META_TURTLE = "coll_meta.ttl"
COLL_PROV_FILE = "coll_prov.jsonld"
COLL_BASE_REF = COLL_BASE_DIR + "/"
COLL_PAGE_REF = COLL_PAGE_DIR + "/"
COLL_META_REF = COLL_BASE_REF + COLL_META_FILE
COLL_PROV_REF = COLL_BASE_REF + COLL_PROV_FILE
COLL_TURTLE_REF = COLL_BASE_REF + COLL_META_TURTLE
META_COLL_REF = "../"
META_COLL_BASE_REF = "./"
COLL_CONTEXT_FILE = "coll_context.jsonld"
# COLL_CONTEXT_REF = COLL_BASE_REF + COLL_CONTEXT_FILE
SITE_TYPEID = "_site"
SITEDATA_ID = "_annalist_site"
SITEDATA_DIR = "c/%(id)s"%{'id': SITEDATA_ID}
SITEDATA_OLD_DIR1 = "_annalist_site"
SITEDATA_OLD_DIR2 = SITEDATA_DIR+"/"+COLL_ROOT_CONF_OLD_DIR
SITE_META_PATH = ""
SITE_META_REF = "."
SITE_META_FILE = "site_meta.jsonld" # Currently not used except to store description data
META_SITE_REF = "./"
SITE_COLL_VIEW = "c/%(id)s/"
SITE_COLL_PATH = "c/%(id)s"
SITE_CONTEXT_FILE = "site_context.jsonld"
SITE_DATABASE_FILE = "db.sqlite3"
SITEDATA_BASE_DIR = SITEDATA_DIR + "/" + COLL_BASE_DIR # used in tests
SITEDATA_META_FILE = COLL_META_FILE # used in views
SITEDATA_PROV_FILE = COLL_PROV_FILE # used in views
SITEDATA_CONTEXT_PATH = "./" # used in models
BIBDATA_ID = "Bibliography_defs" # used for testing
# -------------------------
# Entities of various types
# -------------------------
#
# NOTE: definitive entity URIs are *without* trailing "/".
# Rediretion to a URI wit the trailing "/" retrieves a representation of the entity,
# generally an HTML form view. Redirection to other forms is used for alternative
# representations.
#
# Type records
TYPE_TYPEID = "_type" # type id for type records, used in URL
TYPE_DIR = "_type" # collection directory in file system
TYPE_DIR_PREV = "types" # collection directory in file system
TYPE_META_FILE = "type_meta.jsonld" # type metadata file name
TYPE_META_TURTLE = "type_meta.ttl" # reference type metadata as Turtle
TYPE_PROV_FILE = "type_prov.jsonld" # type provenance file name
COLL_BASE_TYPE_REF = TYPE_TYPEID + "/%(id)s" # ref type relative to collection base URL
COLL_TYPE_VIEW = COLL_BASE_REF + COLL_BASE_TYPE_REF + "/" # ref type view relative to collection entity
COLL_TYPE_PATH = COLL_BASE_REF + TYPE_DIR + "/%(id)s"
# type dir relative to collection root dir
# List description records
LIST_TYPEID = "_list" # list type id, used in URL
LIST_DIR = "_list" # collection directory in file system
LIST_DIR_PREV = "lists" # collection directory in file system
LIST_META_FILE = "list_meta.jsonld" # list metadata file name
LIST_META_TURTLE = "list_meta.ttl" # reference list metadata as Turtle
LIST_PROV_FILE = "list_prov.jsonld" # list provenance file name
COLL_BASE_LIST_REF = LIST_TYPEID + "/%(id)s" # ref list relative to collection base URL
COLL_LIST_VIEW = COLL_BASE_REF + COLL_BASE_LIST_REF + "/" # ref list view relative to collection entity
COLL_LIST_PATH = COLL_BASE_REF + LIST_DIR + "/%(id)s"
# list dir relative to collection root dir
# View description records
VIEW_TYPEID = "_view" # view type id, used in URL
VIEW_DIR = "_view" # collection directory in file system
VIEW_DIR_PREV = "views" # previous collection directory
VIEW_META_FILE = "view_meta.jsonld" # view metadata file name
VIEW_META_TURTLE = "view_meta.ttl" # reference view metadata as turtle
VIEW_PROV_FILE = "view_prov.jsonld" # view provenance file name
COLL_BASE_VIEW_REF = VIEW_TYPEID + "/%(id)s" # ref view relative to collection base URL
COLL_VIEW_VIEW = COLL_BASE_REF + COLL_BASE_VIEW_REF + "/" # ref view relative to collection entity
COLL_VIEW_PATH = COLL_BASE_REF + VIEW_DIR + "/%(id)s"
# view dir relative to collection root dir
# Field-group description records
GROUP_TYPEID = "_group" # group type id, used in URL
GROUP_DIR = "_group" # collection directory in file system
GROUP_DIR_PREV = "groups" # previous collection directory
GROUP_META_FILE = "group_meta.jsonld" # group metadata file name
GROUP_PROV_FILE = "group_prov.jsonld" # group provenance file name
COLL_BASE_GROUP_REF = GROUP_TYPEID + "/%(id)s" # ref group relative to collection base URL
COLL_GROUP_VIEW = COLL_BASE_REF + COLL_BASE_GROUP_REF + "/" # ref group view relative to collection entity
COLL_GROUP_PATH = COLL_BASE_REF + GROUP_DIR + "/%(id)s"
# group dir relative to collection root dir
# Field description records
FIELD_TYPEID = "_field" # field type id, used in URL
FIELD_DIR = "_field" # collection directory in file system
FIELD_DIR_PREV = "fields" # previous collection directory
FIELD_META_FILE = "field_meta.jsonld" # field metadata file name
FIELD_META_TURTLE = "field_meta.ttl" # reference field metadata as turtle
FIELD_PROV_FILE = "field_prov.jsonld" # field provenance file name
COLL_BASE_FIELD_REF = FIELD_TYPEID + "/%(id)s" # ref field relative to collection base URL
COLL_FIELD_VIEW = COLL_BASE_REF + COLL_BASE_FIELD_REF + "/" # ref field view relative to collection entity
COLL_FIELD_PATH = COLL_BASE_REF + FIELD_DIR + "/%(id)s"
# field dir relative to collection root dir
# User permission records
USER_TYPEID = "_user" # type id, used in URL
USER_DIR = "_user" # collection directory in file system
USER_DIR_PREV = "users" # previous collection directory
USER_META_FILE = "user_meta.jsonld" # user metadata file name
USER_META_TURTLE = "user_meta.ttl" # reference user metadata as turtle
USER_PROV_FILE = "user_prov.jsonld" # user provenance file name
COLL_BASE_USER_REF = USER_TYPEID + "/%(id)s" # ref user relative to collection base URL
COLL_USER_VIEW = COLL_BASE_REF + COLL_BASE_USER_REF + "/" # ref user relative to collection entity
COLL_USER_PATH = COLL_BASE_REF + USER_DIR + "/%(id)s"
# user dir relative to collection root dir
# Vocabulary namespace records
VOCAB_TYPEID = "_vocab" # type id, used in URL
VOCAB_DIR = "_vocab" # collection directory in file system
VOCAB_DIR_PREV = "vocabs" # previous collection directory
VOCAB_META_FILE = "vocab_meta.jsonld" # vocab metadata file name
VOCAB_META_TURTLE = "vocab_meta.ttl" # reference vocab metadata as Turtle
VOCAB_PROV_FILE = "vocab_prov.jsonld" # vocab provenance file name
COLL_BASE_VOCAB_REF = VOCAB_TYPEID + "/%(id)s" # ref vocab relative to collection base URL
COLL_VOCAB_VIEW = COLL_BASE_REF + COLL_BASE_VOCAB_REF + "/" # ref vocab view relative to collection entity
COLL_VOCAB_PATH = COLL_BASE_REF + VOCAB_DIR + "/%(id)s"
# vocab dir relative to collection root dir
# General information records
# Used for holding application information for display; e.g., for the `about` link.
INFO_TYPEID = "_info" # info type id
INFO_DIR = "_info" # collection directory in file system
INFO_DIR_PREV = None # previous directory for migration
INFO_META_FILE = "info_meta.jsonld" # info data file name
INFO_META_TURTLE = "info_meta.ttl" # reference info data as Turtle
INFO_PROV_FILE = "info_prov.jsonld" # info provenance file name
COLL_BASE_INFO_REF = INFO_TYPEID + "/%(id)s" # ref info relative to collection base URL
COLL_INFO_VIEW = COLL_BASE_REF + COLL_BASE_INFO_REF + "/" # ref info view relative to collection entity
COLL_INFO_PATH = COLL_BASE_REF + INFO_DIR + "/%(id)s"
# info dir relative to collection root dir
# Enumerated value descriptions
ENUM_FIELD_PLACEMENT_ID = "_enum_field_placement" # Field placement options
ENUM_LIST_TYPE_ID = "_enum_list_type" # List type (list, grid)
ENUM_RENDER_TYPE_ID = "_enum_render_type" # Field render type
ENUM_VALUE_MODE_ID = "_enum_value_mode" # Field value mode (direct, entity, upload, etc.)
ENUM_VALUE_TYPE_ID = "_enum_value_type" # Field value type (text, longtext, etc.)
ENUM_FIELD_PLACEMENT_DIR = ENUM_FIELD_PLACEMENT_ID # Field placement options
ENUM_LIST_TYPE_DIR = ENUM_LIST_TYPE_ID # List type (list, grid)
ENUM_RENDER_TYPE_DIR = ENUM_RENDER_TYPE_ID # Field render type
ENUM_VALUE_MODE_DIR = ENUM_VALUE_MODE_ID # Field value mode (direct, entity, upload, etc.)
ENUM_VALUE_TYPE_DIR = ENUM_VALUE_TYPE_ID # Field value type (text, longtext, etc.)
ENUM_FIELD_PLACEMENT_DIR_PREV1 = "enums/Enum_field_placement"
ENUM_LIST_TYPE_DIR_PREV1 = "enums/Enum_list_type"
ENUM_RENDER_TYPE_DIR_PREV1 = "enums/Enum_render_type"
ENUM_VALUE_MODE_DIR_PREV1 = "enums/Enum_value_mode"
ENUM_VALUE_TYPE_DIR_PREV1 = "enums/Enum_value_type"
ENUM_FIELD_PLACEMENT_DIR_PREV2 = "_enum/Enum_field_placement"
ENUM_LIST_TYPE_DIR_PREV2 = "_enum/Enum_list_type"
ENUM_RENDER_TYPE_DIR_PREV2 = "_enum/Enum_render_type"
ENUM_VALUE_MODE_DIR_PREV2 = "_enum/Enum_value_mode"
ENUM_VALUE_TYPE_DIR_PREV2 = "_enum/Enum_value_type"
ENUM_META_FILE = "enum_meta.jsonld" # enum metadata file name
ENUM_META_TURTLE = "enum_meta.ttl" # reference enum metadata as Turtle
ENUM_PROV_FILE = "enum_prov.jsonld" # enum provenance file name
COLL_BASE_ENUM_REF = "%(type_id)s/%(id)s" # ref enum relative to collection base URL
COLL_ENUM_PATH = COLL_BASE_REF + "%(type_id)s/%(id)s"
COLL_ENUM_VIEW = COLL_ENUM_PATH + "/" # ref enum view relative to collection entity
# Record type data records (these act as parents for Entity data records)
TYPEDATA_TYPEID = "_entitytypedata" # typedata id
TYPEDATA_META_FILE = "type_data_meta.jsonld" # type data metadata file name
TYPEDATA_PROV_FILE = "type_data_prov.jsonld" # type data provenance file name
COLL_BASE_TYPEDATA_REF = "%(id)s" # ref type data relative to collection base URL
TYPEDATA_COLL_BASE_REF = "../" # ref collection base from record type data
TYPEDATA_CONTEXT_FILE = TYPEDATA_COLL_BASE_REF + COLL_CONTEXT_FILE # ref collection context file
COLL_TYPEDATA_PATH = "d/%(id)s" # dir type data relative to collection root dir
COLL_TYPEDATA_VIEW = "d/%(id)s/" # ref type data view relative to collection entity
# Entity data records (these contain user data, organized by record type)
# Entity data layout information...
TYPEDATA_ENTITY_VIEW = "%(id)s/"
TYPEDATA_ENTITY_PATH = "%(id)s"
COLL_ENTITY_VIEW = "d/%(type_id)s/%(id)s/"
COLL_ENTITY_PATH = "d/%(type_id)s/%(id)s"
SITE_ENTITY_VIEW = "c/%(coll_id)s/d/%(type_id)s/%(id)s/"
SITE_ENTITY_PATH = "c/%(coll_id)s/d/%(type_id)s/%(id)s"
ENTITY_BASE_REF = ""
ENTITY_DATA_FILE = "entity_data.jsonld"
ENTITY_DATA_TURTLE = "entity_data.ttl"
ENTITY_PROV_FILE = "entity_prov.jsonld"
ENTITY_LIST_FILE = "entity_list.jsonld" # Entity list as JSON resource
ENTITY_LIST_TURTLE = "entity_list.ttl" # Entity list as Turtle resource
COLL_BASE_ENTITY_REF = "%(type_id)s/%(id)s"
ENTITY_COLL_BASE_REF = "../../"
#@@ NOTE: @base ignored when loading external context - is this correct?
#@@ ENTITY_CONTEXT_FILE = COLL_CONTEXT_FILE
ENTITY_CONTEXT_FILE = ENTITY_COLL_BASE_REF + COLL_CONTEXT_FILE
ENTITY_OLD_DATA_FILE = "entity-data.jsonld"
# Other symbols
TASK_TYPEID = "_task" # task id
INITIAL_VALUES_ID = "_initial_values" # reserved id used for initial values of new entity
# Lists of directory names for collection migration, etc:
DATA_DIRS_CURR_PREV = (
[ (TYPE_DIR, TYPE_DIR_PREV)
, (LIST_DIR, LIST_DIR_PREV)
, (VIEW_DIR, VIEW_DIR_PREV)
, (GROUP_DIR, GROUP_DIR_PREV)
, (FIELD_DIR, FIELD_DIR_PREV)
, (ENUM_FIELD_PLACEMENT_DIR, ENUM_FIELD_PLACEMENT_DIR_PREV1)
, (ENUM_LIST_TYPE_DIR, ENUM_LIST_TYPE_DIR_PREV1)
, (ENUM_RENDER_TYPE_DIR, ENUM_RENDER_TYPE_DIR_PREV1)
, (ENUM_VALUE_MODE_DIR, ENUM_VALUE_MODE_DIR_PREV1)
, (ENUM_VALUE_TYPE_DIR, ENUM_VALUE_TYPE_DIR_PREV1)
, (ENUM_FIELD_PLACEMENT_DIR, ENUM_FIELD_PLACEMENT_DIR_PREV2)
, (ENUM_LIST_TYPE_DIR, ENUM_LIST_TYPE_DIR_PREV2)
, (ENUM_RENDER_TYPE_DIR, ENUM_RENDER_TYPE_DIR_PREV2)
, (ENUM_VALUE_MODE_DIR, ENUM_VALUE_MODE_DIR_PREV2)
, (ENUM_VALUE_TYPE_DIR, ENUM_VALUE_TYPE_DIR_PREV2)
])
DATA_DIRS = [ p[0] for p in DATA_DIRS_CURR_PREV ]
# map(lambda pair:pair[0], DATA_DIRS_CURR_PREV)
DATA_DIRS_PREV = [ p[1] for p in DATA_DIRS_CURR_PREV ]
# map(lambda pair:pair[1], DATA_DIRS_CURR_PREV)
DATA_VOCAB_DIRS = DATA_DIRS + [VOCAB_DIR]
COLL_DIRS_CURR_PREV = (
DATA_DIRS_CURR_PREV +
[ (USER_DIR, USER_DIR_PREV)
, (VOCAB_DIR, VOCAB_DIR_PREV)
, (INFO_DIR, INFO_DIR_PREV)
])
COLL_DIRS = [ p[0] for p in COLL_DIRS_CURR_PREV ]
COLL_DIRS_PREV = [ p[1] for p in COLL_DIRS_CURR_PREV if p[1] ]
# Name generation suffixes for tasks that generate new records
SUFFIX_LIST = ""
SUFFIX_VIEW = ""
SUFFIX_TYPE = ""
SUFFIX_SUBTYPE = "_subtype"
SUFFIX_SUBPROPERTY = "_subproperty"
SUFFIX_REPEAT = "_many"
SUFFIX_REPEAT_P = "_many"
SUFFIX_SEQUENCE = "_list"
SUFFIX_SEQUENCE_P = "_list"
SUFFIX_REF_FIELD = "_ref" # Reference field name...
SUFFIX_REF_FIELD_P = "_ref" # Reference field property ...
class Layout(object):
"""
A dynamically created layout value with paths that are dynamically constructed
using a supplied base directory.
"""
def __init__(self, base_data_dir, site_dir_name):
"""
Dynamically initialize a layout value
"""
self.BASE_DIR = base_data_dir
self.SITE_DIR_NAME = site_dir_name
self.SITEDATA_ID = SITEDATA_ID
self.SITEDATA_DIR = SITEDATA_DIR
self.SITEDATA_OLD_DIR1 = SITEDATA_OLD_DIR1
self.SITEDATA_OLD_DIR2 = SITEDATA_OLD_DIR2
self.SITEDATA_BASE_DIR = SITEDATA_BASE_DIR # e.g. c/_annalist_site/d
self.SITE_PATH = os.path.join(base_data_dir, site_dir_name)
self.SITE_META_FILE = SITE_META_FILE
self.SITE_DATABASE_FILE = SITE_DATABASE_FILE
return
# End. | PypiClean |
/Flask-DataTables-peewee-0.1.2.tar.gz/Flask-DataTables-peewee-0.1.2/flask_datatables/model.py | import functools
import traceback
from typing import TYPE_CHECKING
import flask
from peewee import Field
from peewee import Metadata as _Metadata
from peewee import Model as _Model
from .utils import parse_request
if TYPE_CHECKING:
from typing import Callable, Dict, List, Optional, Union
from peewee import AutoField, Expression, ModelSelect, Ordering
from .fields import Field
from .typing import ArrayData, ObjectData, Query, Response
# factory function to convert records
Factory = Callable[['Model'], Union[ArrayData, ObjectData]]
__all__ = ['Model', 'Metadata']
class Metadata(_Metadata):
"""Basic metadata for data models.
``Flask-DataTables`` extends the original metadata record from :mod:`peewee`
with a :attr:`~flask_datatables.model.Metadata.datatables` switch to indicate
if current data model supports and/or enables `DataTables`_ server-side
processing integration.
"""
#: `DataTables`_ integration indicator flag.
datatables: bool = False
class Model(_Model):
"""Extends :class:`peewee.Model` with `DataTables`_ support."""
id: 'AutoField'
_meta: 'Metadata'
#: `DataTables`_ orderable fields.
dt_orderable: 'Dict[str, Field]'
#: `DataTables`_ searchable fields.
dt_searchable: 'Dict[str, Field]'
@classmethod
def validate_model(cls) -> None:
"""Validates data model and dynamically insert fields.
If `DataTables`_ integration is enabled for the data model, this method
will insert fields (database columns) for both *order* and *search*
operations respectively on each defined fields according to the original
field type definition.
By default, each field is *orderable* and/or *searchable* as long as the
:attr:`~flask_datatables.model.Metadata.datatables` switch is enabled.
When the :attr:`~flask_datatables.fields.Field.orderable` and/or
:attr:`~flask_datatables.fields.Field.searchable` attributes are set to
an instance of a :class:`~peewee.Field`, ``Flask-DataTables`` will insert
additional fields of such type with ``_dt_order`` and/or ``_dt_search``
suffix as the field names accordingly.
"""
cls.dt_orderable = {}
cls.dt_searchable = {}
metaclass = cls._meta
if getattr(metaclass, 'datatables', False):
for key, value in metaclass.fields.copy().items():
orderable = getattr(value, 'orderable', True)
if orderable:
target = value
if isinstance(orderable, Field):
metaclass.add_field(f'{key}_dt_order', orderable)
target = metaclass.fields[f'{key}_dt_order']
setattr(cls, f'{key}_dt_order', target)
cls.dt_orderable[key] = target
searchable = getattr(value, 'searchable', True)
if searchable:
target = value
if isinstance(searchable, Field):
metaclass.add_field(f'{key}_dt_search', searchable)
target = metaclass.fields[f'{key}_dt_search']
setattr(cls, f'{key}_dt_order', target)
cls.dt_searchable[key] = target
return super().validate_model()
def save(self, force_insert: bool = False, only: 'Optional[List[Field]]' = None) -> int:
"""Save the data in the model instance.
The method extends the original :meth:`peewee.Model.save` method by automatically
update the *searching* and *ordering* field data with the actual data.
Args:
force_insert: Force ``INSERT`` query.
only: Only save the given :class:`~peewee.Field` instances.
Returns:
Number of rows modified.
"""
metaclass = self._meta
for key, target in self.dt_orderable.items():
if key == target.name:
continue
value = getattr(self, key)
source = metaclass.fields[key]
if hasattr(source, 'dt_order'):
value = source.dt_order(value)
setattr(self, target.name, value)
for key, target in self.dt_searchable.items():
if key == target.name:
continue
value = getattr(self, key)
source = metaclass.fields[key]
if hasattr(source, 'dt_search'):
value = source.dt_search(value)
setattr(self, target.name, value)
return super().save(force_insert, only)
@classmethod
def search(cls, query: 'Optional[Query]' = None,
factory: 'Optional[Factory]' = None) -> 'Response':
"""Server-side processing integration with `DataTables`_.
Args:
query: Query parameters sent from the client-side.
factory: Factory function to prepare the server-side data.
Returns:
Selected information from the database in format to
be sent to `DataTables`_.
See Also:
The ``factory`` function takes exactly one parameter, the data
record returned from :mod:`peewee` selection, and returns
the converted data of fields. See
:func:`flask_datatables.utils.prepare_response` for an example.
"""
if query is None:
query = parse_request(flask.request.args)
errors = [] # type: List[BaseException]
try:
draw = int(query['draw'])
except ValueError as error:
draw = query['draw']
errors.append(error)
global_search_info = query['search']
global_search_value = global_search_info['value']
global_search_regex = global_search_info['regex']
field_list = [] # type: List[Field]
extra_field_list = [cls.id] # type: List[Field]
where_query_list = [] # type: List[Expression]
for column in query['columns']:
field_name = column['data']
try:
source_field = cls._meta.fields[field_name] # type: Field
except KeyError as error:
errors.append(error)
continue
field_list.append(source_field)
if not column['searchable']:
continue
try:
field = cls._meta.fields[f'{source_field.name}_dt_search'] # type: Field
extra_field_list.append(field)
except KeyError:
field = source_field
search_info = column['search']
search_value = search_info['value']
if search_value:
search_regex = search_info['regex']
else:
search_value = global_search_value
search_regex = global_search_regex
if not search_value:
continue
#field = field.collate('utf8mb4_unicode_ci') # case-insensitive search
if search_regex:
where_query = field.iregexp(search_value)
else:
where_query = field.contains(search_value)
where_query_list.append(where_query)
order_by_list = [] # type: List[Ordering]
for order_info in query['order']:
try:
column_index = int(order_info['column'])
source_field = field_list[column_index]
except IndexError as error:
errors.append(error)
continue
try:
field = cls._meta.fields[f'{source_field.name}_dt_order']
extra_field_list.append(field)
except KeyError:
field = source_field
order_dir = order_info['dir'].casefold()
if order_dir == 'asc':
order_by_list.append(field.asc())
elif order_dir == 'desc':
order_by_list.append(field.desc())
else:
errors.append(ValueError(f'unknown ordering direction: {order_dir}'))
select_query = cls.select(*field_list, *extra_field_list) # type: ModelSelect
if where_query_list:
select_query = select_query.where(functools.reduce(
lambda p0, p1: p0 | p1, where_query_list,
))
select_query = select_query.order_by(*order_by_list)
records_total = cls.select().count() # pylint: disable=no-value-for-parameter
records_filtered = select_query.count()
start = query['start']
length = query['length']
data = [] # type: List[Union[ArrayData, ObjectData]]
for record in select_query.offset(start).limit(length).objects():
if factory is not None:
row = factory(record)
else:
row = [record.__data__[field.name] for field in field_list]
data.append(row)
error_msg = None
if flask.current_app.debug and errors:
error_msg = 'Error processing query...\n'
for exc in errors:
error_msg += '-' * 80 + '\n'
error_msg += ''.join(traceback.format_exception(type(exc), exc, exc.__traceback__))
return {
'draw': draw,
'recordsTotal': records_total,
'recordsFiltered': records_filtered,
'data': data,
'error': error_msg,
} | PypiClean |
/Finance-Ultron-1.0.8.1.tar.gz/Finance-Ultron-1.0.8.1/ultron/strategy/experimental/single_factor.py | import code
from tokenize import group
from ultron.factor.experimental.normalize import rolling_groups
from ultron.factor.analysis.quantile_analysis import er_quantile_analysis
from ultron.factor.data.quantile import quantile
from ultron.tradingday import *
import numpy as np
import pandas as pd
import copy
class SingleFactor(object):
def __init__(self, factor_data, market_data, codes, columns):
self._factor_data = factor_data
self._market_data = market_data
self._codes = codes
self._columns = columns
def returns(self, market_data, period):
price_tb = market_data['closePrice'].unstack()
price_tb.fillna(method='pad', inplace=True)
return_tb = np.log(price_tb.shift(-period) / price_tb)
return_tb = return_tb.replace([np.inf, -np.inf], np.nan)
return_tb = return_tb.stack().reindex(market_data.index)
return_tb.name = 'nxt1_ret'
return return_tb.reset_index()
def normalize(self, factor_data, windows, columns):
#normalize_data = factor_data.set_index('code').groupby(level=['code']).apply(
# lambda x: rolling_groups(x,columns, windows))
#normalize_data = normalize_data.reset_index().drop(['level_1'],axis=1)
rolling_data = factor_data.set_index(['trade_date', 'code'
]).unstack().rolling(windows)
current_data = factor_data.set_index(['trade_date', 'code']).unstack()
normalize_data = (current_data -
rolling_data.mean()) / rolling_data.std()
normalize_data = normalize_data.stack(dropna=True)
normalize_data = normalize_data.sort_values(
by=['trade_date', 'code']).fillna(0)
return normalize_data.reset_index()
def _transformer(self, normalize_data, returns, columns, period):
begin_date = normalize_data.trade_date.min()
end_date = normalize_data.trade_date.max()
dates = makeSchedule(begin_date, end_date,
str(period) + 'b', 'china.sse',
BizDayConventions.Preceding)
dates = [d.strftime('%Y-%m-%d') for d in dates]
dt = normalize_data.trade_date.dt.strftime(
'%Y-%m-%d').unique().tolist()
dates = set(dt) & set(dates)
normalize_data = normalize_data.set_index(
'trade_date').loc[dates].sort_values(
by=['trade_date']).reset_index()
normalize_data['trade_date'] = pd.to_datetime(
normalize_data['trade_date'])
returns['trade_date'] = pd.to_datetime(returns['trade_date'])
total_data = returns.merge(normalize_data,
on=['trade_date', 'code'],
how='left')
total_data = total_data.set_index([
'trade_date', 'code'
])[columns].unstack().fillna(method='pad').stack().reset_index()
return total_data
def quantile_analysis(self,
normalize_data,
factor_name,
n_bins,
de_trend=False):
df = pd.DataFrame(columns=['q' + str(i) for i in range(1, n_bins + 1)])
grouped = normalize_data.groupby('trade_date')
for k, g in grouped:
er = g[factor_name].values
dx_return = g['nxt1_ret'].values
res = er_quantile_analysis(er,
n_bins=n_bins,
dx_return=dx_return,
de_trend=de_trend)
df.loc[k, :] = res
df['q'] = df['q' + str(n_bins)] - df['q1']
return df
def quantile(self, normalize_data, factor_name, n_bins):
grouped = normalize_data.groupby('trade_date')
res = []
for k, g in grouped:
o = copy.deepcopy(g)
t = copy.deepcopy(o)
o = o.dropna(subset=[factor_name])
o['group'] = quantile(o[factor_name], n_bins) + 1
o = t.merge(o[['code', 'trade_date', 'group']],
on=['trade_date', 'code'],
how='left')
res.append(o.set_index(['trade_date', 'code']))
group_dt = pd.concat(res, axis=0)
return group_dt
def run(self,
codes=None,
columns=[],
windows=10,
period=1,
n_bins=5,
normalize_data=None,
returns_data=None):
print('start single factor analysis...')
if len(columns) == 0:
columns = self._columns
if normalize_data is None:
print('start data normalize...')
normalize_data = self.normalize(factor_data=self._factor_data,
windows=windows,
columns=columns)
normalize_data = self._factor_data
if returns_data is None:
print('start data returns...')
returns_data = self.returns(
self._market_data.set_index(['trade_date', 'code']), period)
if codes is None:
codes = self._codes
normalize_data['trade_date'] = pd.to_datetime(
normalize_data['trade_date'])
returns_data['trade_date'] = pd.to_datetime(returns_data['trade_date'])
total_data = normalize_data.merge(returns_data,
on=['trade_date', 'code'])
### 指定品种
codes = list(set(total_data['code'].unique().tolist()) & set(codes))
total_data = total_data.set_index('code').loc[codes].reset_index()
qt_res = []
for col in columns:
print("start {0} quantile ...".format(col))
df = self.quantile(normalize_data[['code', 'trade_date', col]],
col, n_bins)
qt_res.append({'name': col, "qdata": df})
qa_res = []
for col in columns:
print("start {0} quantile analysis...".format(col))
df = self.quantile_analysis(normalize_data=total_data,
factor_name=col,
n_bins=n_bins)
qa_res.append({'name': col, "qdata": df})
return qt_res, qa_res | PypiClean |
/DSNS-SDK-0.1.1.tar.gz/DSNS-SDK-0.1.1/dsns/services.py | from __future__ import absolute_import
import json
from dsns.connection import BaseConnection
class ChannelAPIService(BaseConnection):
"""
Channel service to serve various api endpoints
"""
def __init__(self, **kwargs):
self.config.resp_format = 'json'
super(ChannelAPIService, self).__init__(**kwargs)
def policy(self):
"""
Description:
returns default policy for the channel
returns:
instance of response formator class
can be used like:
response_dict: r.response_dict()
status_code = r.get_status_code()
"""
self.config.uri = '/channel/api/policy/'
return self.execute()
def list_all(self):
"""
listing of channels
"""
self.config.uri = '/channel/api/list/'
return self.execute()
def create(self, data, create=True):
"""
method is being used to create channel
data:
{
"name": "<channel name(unique)>",
"display_name": "<channel display name>",
"priority": "<channel priority>"
}
"""
self.config.uri = '/channel/api/create/'
self.method = "POST"
response = self.execute(data=data)
if response.get_status_code() == 200:
from dsns.models import Channel
resp_dict = response.response_dict()
r = resp_dict['info']
channel_drn = r.get(
"channel_drn", None)
data.update(**{"channel_drn": channel_drn})
if create:
Channel.objects.get_or_create(
name=data["name"], channel_drn=data["channel_drn"],
defaults=data)
return response
def get_by_channel_drn(self, drn):
"""
get the channel details
channel_drn:
channel drn to get the channel
"""
self.config.uri = "/channel/api/detail/{drn}/".format(drn=drn)
return self.execute()
def update_details(self, drn, data):
"""
method is being used to update details
of channel
data:
{
"display_name": <display name>,
"priority": <priority>,
"policy": <valid json policy>
}
"""
self.config.uri = "/channel/api/update/{drn}/".format(
drn=drn)
self.method = "PUT"
return self.execute(data=data)
class SubscriptionAPIService(BaseConnection):
"""
Channel service to serve various api endpoints
"""
def __init__(self, **kwargs):
self.config.resp_format = 'json'
super(SubscriptionAPIService, self).__init__(**kwargs)
def policy(self):
"""
listing of channels
"""
self.config.uri = '/subscription/api/policy/'
return self.execute()
def list_all(self, channel_drn):
"""
listing of all subscription
against a channel
"""
self.query_params = {
"channel_drn": channel_drn}
self.config.uri = '/subscription/api/list/'
return self.execute()
def list_all_active(self, channel_drn):
"""
listing of all active subscription
against a channel
"""
self.query_params = {
"channel_drn": channel_drn,
"active": True}
self.config.uri = '/subscription/api/list/'
return self.execute()
def list_all_inactive(self, channel_drn):
"""
listing of all inactive subscription
against a channel
"""
self.query_params = {
"channel_drn": channel_drn,
"active": False}
self.config.uri = '/subscription/api/list/'
return self.execute()
def subscribe(self, data):
"""
method is being used to create subscription
data:
{
"channel_drn": <channel_drn (Mendatory)>,
"target_drn": <target_drn (Mendatory)>,
"protocol": <protocol (Mendatory)>,
"token": <token to validate request to endpoint (Non-Mendatory)>,
"username": <username (Non-Mendatory)>,
"password": <password (Non-Mendatory)>
"endpoint": <endpoint (Non-Mendatory)>
}
"""
self.config.uri = '/subscription/api/create/'
self.method = "POST"
response = self.execute(data=data)
if response.get_status_code() == 200:
from dsns.models import Channel
resp_dict = response.response_dict()
try:
channel = Channel.objects.get(
channel_drn=resp_dict['info']['channel_drn'])
pre_subs = channel.subscriptions
ids = [int(
each.split('#')[0].split('-')[1]) for each in pre_subs]
except (Channel.DoesNotExist, KeyError, IndexError), e:
pass
else:
sub = resp_dict['info']
if not sub['id'] in ids:
subscription = "id-%s#target_drn-%s#end_point-%s" % (
sub['id'], sub['target_drn'], sub['endpoint'])
subs = channel.subscriptions
subs.append(subscription)
channel.subscriptions = subs
channel.save()
return response
def update_details(self, channel_drn, subs_id, data):
"""
method is being used to update details
of channel
data:
{
"endpoint": <display name>,
"policy": <policy subscriber>,
"active": <True/False>,
"token": <token to validate request to endpoint>,
"username": <username>,
"password": <password>
}
# Incase BasicAuthentication is selected then username
and password is mendetory to be passed
"""
if "policy" in data:
data['policy'] = json.dumps(data['policy'])
self.query_params = {"channel_drn": channel_drn}
self.config.uri = "/subscription/api/update/{subs_id}/".format(
subs_id=subs_id)
self.method = "PUT"
return self.execute(data=data)
class PublishAPIService(BaseConnection):
def __init__(self, **kwargs):
self.config.resp_format = 'json'
super(PublishAPIService, self).__init__(**kwargs)
def publish(self, data):
"""
method to publish data into channel
data:
{
"channel_drn": "test:2",
"target_drn": ["1"],
"message": '{
"orderId": "ORDER123",
"suborderId": "OL123",
"status": "SHIPPED"}',
"subject": "REST API TEST",
"meta": {
"headers": {
"X-EBAY-API-DEV-NAME":"6a6d1553-018d-4864-871b",
"query_params": {"DEVID": "AIQPV12"}
}}
}
"""
self.method = "POST"
self.config.uri = "/channel/api/publish/"
if "meta" in data:
data["meta"] = json.dumps(data["meta"])
return self.execute(data=data) | PypiClean |
/Flask-Execute-0.1.6.tar.gz/Flask-Execute-0.1.6/flask_execute/managers.py |
# imports
# -------
import re
import sys
import json
import subprocess
from celery.schedules import crontab
from .cli import cli
# classes
# -------
class TaskManager(object):
"""
Object for managing registered celery tasks, providing
users a way of submitting tasks via the celery API when
using the factory pattern for configuring a flask application.
This proxy for the @celery.task decorator is designed to
manage two things:
1. For applications set up with the flask application directly,
register tasks with the celery application directly. This
has the same effect as the original mechanism for configuring
celery alongside a Flask application.
2. For applications set up using the factory pattern,
store all registered tasks internally so they can be
registered with the celery application once the plugin
as been initialized with a flask application instance.
"""
def __init__(self):
self.__app__ = None
self.__registered__ = {}
self.__tasks__ = {}
self.__funcs__ = {}
return
def __call__(self, *args, **kwargs):
"""
Internal decorator logic for ``celery.task``.
"""
# plugin hasn't been initialized
if self.__app__ is None:
def _(func):
self.__registered__[func.__name__] = {'func': func, 'args': args, 'kwargs': kwargs}
return func
# plugin has been initialized
else:
def _(func):
func = self.__app__.task(*args, **kwargs)(func)
if func.name not in self.__tasks__:
self.__tasks__[func.name] = func
self.__funcs__[func.__name__] = func
return func
# return decorated function if called directly
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
func, args = args[0], args[1:]
return _(func)
# return inner decorator
else:
return _
def __getattr__(self, key):
if key not in self.__tasks__:
if key not in self.__funcs__:
raise AttributeError('Task ``{}`` has not been registered'.format(key))
return self.__funcs__[key]
return self.__tasks__[key]
def __getitem__(self, key):
return self.__getattr__(key)
def init_celery(self, controller):
"""
Initialize the task manager with a celery controller. This
will register all decorated tasks with the specified
``controller`` (celery application).
Args:
controller (Celery): Celery application instance to
register tasks for.
"""
self.__app__ = controller
for key, item in self.__registered__.items():
if not len(item['args']) and not len(item['kwargs']):
self(item['func'])
else:
self(*item['args'], **item['kwargs'])(item['func'])
return
class ScheduleManager(object):
"""
Object for managing scheduled celery tasks, providing
users a way of scheduling tasks via the celery API when
using the factory pattern for configuring a flask application.
This proxy for the @celery.task decorator is designed to
manage two things:
1. For applications set up with the flask application directly,
schedule tasks with the celery application directly. This
has the same effect as the original mechanism for configuring
celery alongside a Flask application.
2. For applications set up using the factory pattern,
store all scheduled tasks internally so they can be
registered with the celery application once the plugin
as been initialized with a flask application instance.
"""
def __init__(self):
self.__app__ = None
self.__registered__ = {}
self.__tasks__ = {}
self.__funcs__ = {}
return
def __call__(self, schedule=None, name=None, args=tuple(), kwargs=dict(), options=dict(), **skwargs):
"""
Internal decorator logic for ``celery.schedule``.
"""
# handle ambiguous schedule input
if schedule is not None and len(skwargs):
raise AssertionError(
'Invalid schedule arguments - please see documentation for '
'how to use @celery.schedule'
)
# handle crontab input
if schedule is None and len(skwargs):
schedule = crontab(**skwargs)
# handle missing schedule input
if schedule is None:
raise AssertionError('Schedule for periodic task must be defined, either via numeric arguments or crontab keywords. See documentation for details.')
# plugin hasn't been initialized
if self.__app__ is None:
def _(func):
key = name or func.__module__ + '.' + func.__name__
self.__registered__[key] = {
'func': func,
'schedule': schedule,
'args': args,
'kwargs': kwargs,
'options': options,
}
return func
# plugin has been initialized
else:
def _(func):
if not hasattr(func, 'name'):
func = self.__app__.task(func)
# add schedule to beat manager
self.__app__.conf['CELERYBEAT_SCHEDULE'][name] = {
'task': func.name,
'schedule': schedule,
'args': args,
'kwargs': kwargs,
'options': options
}
# save in scheduled registry
if func.name not in self.__tasks__:
self.__tasks__[func.name] = func
self.__funcs__[func.__name__] = func
return func
# return inner decorator
return _
def __getattr__(self, key):
if key not in self.__tasks__:
if key not in self.__funcs__:
raise AttributeError('Task {} has not been registered'.format(key))
return self.__funcs__[key]
return self.__tasks__[key]
def __getitem__(self, key):
return self.__getattr__(key)
def init_celery(self, controller):
"""
Initialize the task manager with a celery controller. This
will register all decorated tasks with the specified
``controller`` (celery application).
Args:
controller (Celery): Celery application instance to
register tasks for.
"""
self.__app__ = controller
for key, item in self.__registered__.items():
self(
schedule=item['schedule'],
args=item['args'],
kwargs=item['kwargs'],
options=item['options'],
name=key,
)(item['func'])
return
class CommandManager(object):
"""
Manager for issuing celery ``inspect`` or ``control`` calls
to the celery API.
Example:
.. code-block:: python
>>> inspect = CommandManager('inspect')
# no options
>>> inspect.active()
# options
>>> inspect.active(timeout=5, destination=['w1@e.com', 'w2@e.com'])
This tool is primarily used alongside the ``Celery`` plugin
object, allowing developers to issue celery commands via
property.
Examples:
.. code-block:: python
>>> celery = Celery(app)
# ``inspect`` command manager.
>>> celery.inspect.ping()
{'worker@localhost': {'ok': 'pong'}}
# ``control`` command manager.
>>> celery.control.pool_shrink(1)
{'worker@localhost': {'ok': 'pool will shrink'}}
>>> celery.control.shutdown()
Shutdown signal sent to workers.
Use ``celery.inspect.help()`` and ``celery.control.help()`` to see
available celery commands.
"""
def __init__(self, name):
self.name = name
return
def __getattr__(self, key):
def _(*args, **kwargs):
return self.call(self.name + ' ' + key, *args, **kwargs)
return _
def __getitem__(self, key):
return self.__getattr__(key)
def help(self):
"""
Return help message for specific command.
"""
output = cli.output(self.name + ' --help', stderr=None)
sys.stderr.write('\n>>> celery.' + self.name + '.command()\n\n')
sys.stderr.write('Issue celery command to {} workers.\n\n'.format(self.name))
sys.stderr.write('Commands:')
for line in output.split('\n'):
if line and line[0] == '|':
sys.stderr.write(line + '\n')
return
def call(self, cmd, timeout=None, destination=None, quiet=False):
"""
Issue celery subcommand and return output.
Args:
cmd (str): Command to call.
timeout (float): Timeout in seconds (float) waiting for reply.
destination (str, list): List of destination node names.
"""
cmd += ' --json'
# parse timeout
if timeout is not None:
cmd += ' --timeout={}'.format(timeout)
# parse destination
if destination is not None:
if isinstance(destination, str):
destination = destination.split(',')
cmd += ' --destination={}'.format(','.join(destination))
# make call accounting for forced error
try:
output = cli.output(cmd)
except subprocess.CalledProcessError as err:
if not quiet:
if 'shutdown' in cmd:
print('Shutdown signal sent to workers.')
else:
print(err.stdout.decode('utf-8'))
return
# make call and parse result
output = output.split('\n')[0]
try:
data = json.loads(output)
except json.JSONDecodeError:
data = {}
if isinstance(data, (list, tuple)):
result = {}
for item in data:
result.update(item)
data = result
return data | PypiClean |
/BiologicalProcessNetworks-1.0a3.tar.gz/BiologicalProcessNetworks-1.0a3/bpn/mcmc/sabpn.py |
# Copyright (c) 2011 Chris D. Lasher & Phillip Whisenhunt
#
# This software is released under the MIT License. Please see
# LICENSE.txt for details.
"""A program to detect Process Linkage Networks using
Simulated Annealing.
"""
import collections
import itertools
import sys
from convutils import convutils
import bpn.cli
import bpn.structures
from defaults import (
SUPERDEBUG,
SUPERDEBUG_MODE,
LINKS_FIELDNAMES,
PARAMETERS_FIELDNAMES,
TRANSITIONS_FIELDNAMES,
DETAILED_TRANSITIONS_FIELDNAMES
)
# Configure all the logging stuff
import logging
logger = logging.getLogger('bpn.sabpn')
if SUPERDEBUG_MODE:
# A logging level below logging.DEBUG
logging.addLevelName(SUPERDEBUG, 'SUPERDEBUG')
logger.setLevel(SUPERDEBUG)
#stream_handler.setLevel(SUPERDEBUG)
import simulatedannealing
import states
import recorders
def main(argv=None):
cli_parser = bpn.cli.SaCli()
input_data = cli_parser.parse_args(argv)
logger.info("Constructing supporting data structures; this may "
"take a while...")
annotated_interactions = bpn.structures.AnnotatedInteractionsArray(
input_data.interactions_graph,
input_data.annotations_dict
)
logger.info("Considering %d candidate links in total." %
annotated_interactions.calc_num_links())
logger.info("Constructing Simulated Annealing")
if input_data.free_parameters:
logger.info("Using free parameter transitions.")
parameters_state_class = states.RandomTransitionParametersState
else:
parameters_state_class = states.PLNParametersState
if input_data.disable_swaps:
logger.info("Disabling swap transitions.")
links_state_class = states.NoSwapArrayLinksState
else:
links_state_class = states.ArrayLinksState
if input_data.detailed_transitions:
logger.info("Recording extra information for each state.")
transitions_csvfile = convutils.make_csv_dict_writer(
input_data.transitions_outfile,
DETAILED_TRANSITIONS_FIELDNAMES
)
else:
transitions_csvfile = convutils.make_csv_dict_writer(
input_data.transitions_outfile,
TRANSITIONS_FIELDNAMES
)
sa = simulatedannealing.ArraySimulatedAnnealing(
annotated_interactions,
input_data.activity_threshold,
input_data.transition_ratio,
num_steps=input_data.steps,
temperature=input_data.temperature,
end_temperature=input_data.end_temperature,
parameters_state_class=parameters_state_class,
links_state_class=links_state_class
)
logger.info("Beginning to Anneal. This may take a while...")
sa.run()
logger.info("Run completed.")
logger.info("Writing link results to %s" %
input_data.links_outfile.name)
links_out_csvwriter = convutils.make_csv_dict_writer(
input_data.links_outfile, LINKS_FIELDNAMES)
logger.info("Writing parameter results to %s" % (
input_data.parameters_outfile.name))
parameters_out_csvwriter = convutils.make_csv_dict_writer(
input_data.parameters_outfile, PARAMETERS_FIELDNAMES)
logger.info("Writing transitions data to %s." % (
input_data.transitions_outfile.name))
logger.info("Finished.")
if __name__ == '__main__':
main() | PypiClean |
/HydPy-5.0.1-cp38-cp38-win_amd64.whl/hydpy/cythons/modelutils.py | # import...
# ...from standard library
from __future__ import annotations
import copy
import distutils.core
import distutils.extension
# pylint: enable=no-name-in-module
# pylint: enable=import-error
import functools
import importlib
import inspect
import math
import os
import platform
import shutil
import sys
import types
from typing import *
# ...third party modules
import numpy
from numpy import inf # pylint: disable=unused-import
from numpy import nan # pylint: disable=unused-import
# ...from HydPy
import hydpy
from hydpy import config
from hydpy import cythons
from hydpy.core import exceptiontools
from hydpy.core import importtools
from hydpy.core import modeltools
from hydpy.core import objecttools
from hydpy.core import parametertools
from hydpy.core import sequencetools
from hydpy.core import testtools
from hydpy.core import typingtools
if TYPE_CHECKING:
import Cython.Build as build
else:
build = exceptiontools.OptionalImport("build", ["Cython.Build"], locals())
def get_dllextension() -> str:
"""Return the DLL file extension for the current operating system.
The returned value depends on the response of function |platform.system| of module
|platform|. |get_dllextension| returns `.pyd` if |platform.system| returns the
string "windows" and `.so` for all other strings:
>>> from hydpy.cythons.modelutils import get_dllextension
>>> import platform
>>> from unittest import mock
>>> with mock.patch.object(
... platform, "system", side_effect=lambda: "Windows") as mocked:
... get_dllextension()
'.pyd'
>>> with mock.patch.object(
... platform, "system", side_effect=lambda: "Linux") as mocked:
... get_dllextension()
'.so'
"""
if platform.system().lower() == "windows":
return ".pyd"
return ".so"
_dllextension = get_dllextension()
_int = "numpy." + str(numpy.array([1]).dtype) + "_t"
TYPE2STR: Dict[Union[Type[Any], str, None], str] = { # pylint: disable=duplicate-key
bool: "bint",
"bool": "bint",
int: _int,
"int": _int,
parametertools.IntConstant: _int,
"parametertools.IntConstant": _int,
"IntConstant": _int,
float: "double",
"float": "double",
str: "str",
"str": "str",
None: "void",
"None": "void",
type(None): "void",
typingtools.Vector: "double[:]", # to be removed as soon as possible
"typingtools.Vector": "double[:]",
"Vector": "double[:]",
typingtools.Vector[float]: "double[:]", # This works because the `__getitem__`
# of `_ProtocolMeta` is decorated by `_tp_cache`. I don't know if this caching
# is documented behaviour, so this might cause (little) trouble in the future.
"typingtools.Vector[float]": "double[:]",
"Vector[float]": "double[:]",
}
"""Maps Python types to Cython compatible type declarations.
The Cython type belonging to Python's |int| is selected to agree with numpy's default
integer type on the current platform/system.
"""
_checkable_types: List[Type[Any]] = []
for maybe_a_type in TYPE2STR:
try:
isinstance(1, maybe_a_type) # type: ignore[arg-type]
except TypeError:
continue
assert isinstance(maybe_a_type, type)
_checkable_types.append(maybe_a_type)
CHECKABLE_TYPES: Tuple[Type[Any], ...] = tuple(_checkable_types)
""""Real types" of |TYPE2STR| allowed as second arguments of function |isinstance|."""
del _checkable_types
NDIM2STR = {0: "", 1: "[:]", 2: "[:,:]", 3: "[:,:,:]"}
_nogil = " nogil" if config.FASTCYTHON else ""
class Lines(List[str]):
"""Handles code lines for `.pyx` file."""
def __init__(self, *args: str) -> None:
super().__init__(args)
def add(self, indent: int, line: typingtools.Mayberable1[str]) -> None:
"""Append the given text line with prefixed spaces following the given number
of indentation levels.
"""
if isinstance(line, str):
self.append(indent * 4 * " " + line)
else:
for subline in line:
self.append(indent * 4 * " " + subline)
def __repr__(self) -> str:
return "\n".join(self) + "\n"
def get_methodheader(methodname: str, nogil: bool = False, idxarg: bool = False) -> str:
"""Returns the Cython method header for methods without arguments except`self`.
Note the influence of the configuration flag `FASTCYTHON`:
>>> from hydpy.cythons.modelutils import get_methodheader
>>> from hydpy import config
>>> config.FASTCYTHON = False
>>> print(get_methodheader(methodname="test", nogil=True, idxarg=False))
cpdef inline void test(self):
>>> config.FASTCYTHON = True
>>> print(get_methodheader(methodname="test", nogil=True, idxarg=True))
cpdef inline void test(self, int idx) nogil:
"""
if not config.FASTCYTHON:
nogil = False
header = f"cpdef inline void {methodname}(self"
header += ", int idx)" if idxarg else ")"
header += " nogil:" if nogil else ":"
return header
def decorate_method(wrapped: Callable[[PyxWriter], Iterator[str]]) -> property:
"""The decorated method returns a |Lines| object including a method header.
However, the |Lines| object is empty if the respective model does not implement a
method with the same name as the wrapped method.
"""
def wrapper(self: PyxWriter) -> Lines:
lines = Lines()
if hasattr(self.model, wrapped.__name__):
print(f" . {wrapped.__name__}")
lines.add(1, get_methodheader(wrapped.__name__, nogil=True))
for line in wrapped(self):
lines.add(2, line)
return lines
functools.update_wrapper(wrapper, wrapped)
wrapper.__doc__ = f"Lines of model method {wrapped.__name__}."
return property(wrapper)
class Cythonizer:
"""Handles the writing, compiling and initialisation of Cython models."""
Model: Type[modeltools.Model]
Parameters: Type[parametertools.Parameters]
Sequences: Type[sequencetools.Sequences]
tester: testtools.Tester
pymodule: str
_cymodule: Optional[types.ModuleType]
def __init__(self) -> None:
self._cymodule = None
frame = inspect.currentframe()
assert frame is not None
frame = frame.f_back
assert frame is not None
self.pymodule = frame.f_globals["__name__"]
for (key, value) in frame.f_locals.items():
setattr(self, key, value)
def cythonize(self) -> None:
"""Translate Python source code of the relevant model first into Cython and
then into C, compile it, and move the resulting dll file to the `autogen`
subfolder of subpackage `cythons`."""
print(f"Translate module/package {self.pyname}.")
self.pyxwriter.write()
print(f"Compile module {self.cyname}.")
self.compile_()
self.move_dll()
@property
def pyname(self) -> str:
"""Name of the original Python module or package.
>>> from hydpy.models.hland import cythonizer
>>> cythonizer.pyname
'hland'
>>> from hydpy.models.hland_v1 import cythonizer
>>> cythonizer.pyname
'hland_v1'
"""
return self.pymodule.split(".")[-1]
@property
def cyname(self) -> str:
"""Name of the compiled module.
>>> from hydpy.models.hland import cythonizer
>>> cythonizer.cyname
'c_hland'
>>> from hydpy.models.hland_v1 import cythonizer
>>> cythonizer.cyname
'c_hland_v1'
"""
return "c_" + self.pyname
@property
def cydirpath(self) -> str:
"""The absolute path of the directory containing the compiled modules.
>>> from hydpy.models.hland import cythonizer
>>> from hydpy import repr_
>>> repr_(cythonizer.cydirpath) # doctest: +ELLIPSIS
'.../hydpy/cythons/autogen'
>>> import os
>>> os.path.exists(cythonizer.cydirpath)
True
"""
return cythons.autogen.__path__[0]
@property
def cymodule(self) -> types.ModuleType:
"""The compiled module.
Property |Cythonizer.cymodule| returns the relevant DLL module:
>>> from hydpy.models.hland_v1 import cythonizer
>>> from hydpy.cythons.autogen import c_hland_v1
>>> c_hland_v1 is cythonizer.cymodule
True
However, if this module is missing for some reasons, it tries to create the
module first and returns it afterwards. For demonstration purposes, we define
a wrong |Cythonizer.cyname|:
>>> from hydpy.cythons.modelutils import Cythonizer
>>> cyname = Cythonizer.cyname
>>> Cythonizer.cyname = "wrong"
>>> cythonizer._cymodule = None
>>> from unittest import mock
>>> with mock.patch.object(Cythonizer, "cythonize") as mock:
... cythonizer.cymodule
Traceback (most recent call last):
...
ModuleNotFoundError: No module named 'hydpy.cythons.autogen.wrong'
>>> mock.call_args_list
[call()]
>>> Cythonizer.cyname = cyname
"""
cymodule = self._cymodule
if cymodule:
return cymodule
modulepath = f"hydpy.cythons.autogen.{self.cyname}"
try:
self._cymodule = importlib.import_module(modulepath)
except ModuleNotFoundError:
self.cythonize()
self._cymodule = importlib.import_module(modulepath)
return self._cymodule
@property
def pyxfilepath(self) -> str:
"""The absolute path of the compiled module.
>>> from hydpy.models.hland_v1 import cythonizer
>>> from hydpy import repr_
>>> repr_(cythonizer.pyxfilepath) # doctest: +ELLIPSIS
'.../hydpy/cythons/autogen/c_hland_v1.pyx'
>>> import os
>>> os.path.exists(cythonizer.pyxfilepath)
True
"""
return os.path.join(self.cydirpath, f"{self.cyname}.pyx")
@property
def dllfilepath(self) -> str:
"""The absolute path of the compiled module.
>>> from hydpy.models.hland_v1 import cythonizer
>>> from hydpy import repr_
>>> repr_(cythonizer.dllfilepath) # doctest: +ELLIPSIS
'.../hydpy/cythons/autogen/c_hland_v1...'
>>> import os
>>> os.path.exists(os.path.split(cythonizer.dllfilepath)[0])
True
"""
return os.path.join(self.cydirpath, f"{self.cyname}{_dllextension}")
@property
def buildpath(self) -> str:
"""The absolute path for temporarily build files.
>>> from hydpy.models.hland_v1 import cythonizer
>>> from hydpy import repr_
>>> repr_(cythonizer.buildpath) # doctest: +ELLIPSIS
'.../hydpy/cythons/autogen/_build'
"""
return os.path.join(self.cydirpath, "_build")
@property
def pyxwriter(self) -> PyxWriter:
"""A new |PyxWriter| instance.
>>> from hydpy.models.hland_v1 import cythonizer
>>> pyxwriter = cythonizer.pyxwriter
>>> from hydpy import classname
>>> classname(pyxwriter)
'PyxWriter'
>>> cythonizer.pyxwriter is pyxwriter
False
"""
model = self.Model()
dict_ = vars(self)
dict_["model"] = model
model.parameters = importtools.prepare_parameters(dict_)
model.sequences = importtools.prepare_sequences(dict_)
return PyxWriter(self, model, self.pyxfilepath)
def compile_(self) -> None:
"""Translate Cython code to C code and compile it."""
argv = copy.deepcopy(sys.argv)
sys.argv = [
sys.argv[0],
"build_ext",
"--build-lib=" + self.buildpath,
"--build-temp=" + self.buildpath,
]
exc_modules = [
distutils.extension.Extension(
"hydpy.cythons.autogen." + self.cyname,
[self.pyxfilepath],
extra_compile_args=["-O2"],
)
]
distutils.core.setup(
ext_modules=build.cythonize(exc_modules), include_dirs=[numpy.get_include()]
)
sys.argv = argv
def move_dll(self) -> None:
"""Try to find the DLL file created my method |Cythonizer.compile_| and to move
it into the `autogen` folder of the `cythons` subpackage.
Usually, one does not need to apply the |Cythonizer.move_dll| method directly.
However, if you are a model developer, you might see one of the following error
messages from time to time:
>>> from hydpy.models.hland_v1 import cythonizer
>>> cythonizer.move_dll() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
OSError: After trying to cythonize model `hland_v1`, the resulting file \
`c_hland_v1...` could not be found in directory `.../hydpy/cythons/autogen/_build` \
nor any of its subdirectories. The distutil report should tell whether the file has \
been stored somewhere else, is named somehow else, or could not be build at all.
>>> import os
>>> from unittest import mock
>>> from hydpy import TestIO
>>> with TestIO(): # doctest: +ELLIPSIS
... with mock.patch.object(
... type(cythonizer), "buildpath", new_callable=mock.PropertyMock
... ) as mocked_buildpath:
... mocked_buildpath.return_value = "_build"
... os.makedirs("_build/subdir", exist_ok=True)
... filepath = f"_build/subdir/c_hland_v1{get_dllextension()}"
... with open(filepath, "w"):
... pass
... with mock.patch(
... "shutil.move",
... side_effect=PermissionError("Denied!")):
... cythonizer.move_dll()
Traceback (most recent call last):
...
PermissionError: After trying to cythonize module `hland_v1`, when trying to \
move the final cython module `c_hland_v1...` from directory `_build` to directory \
`.../hydpy/cythons/autogen`, the following error occurred: Denied! A likely error \
cause is that the cython module `c_hland_v1...` does already exist in this directory \
and is currently blocked by another Python process. Maybe it helps to close all \
Python processes and restart the cythonization afterwards.
"""
dirinfos = os.walk(self.buildpath)
system_dependent_filename = None
for dirinfo in dirinfos:
for filename in dirinfo[2]:
if filename.startswith(self.cyname) and filename.endswith(
_dllextension
):
system_dependent_filename = filename
break
if system_dependent_filename:
try:
shutil.move(
os.path.join(dirinfo[0], system_dependent_filename),
os.path.join(self.cydirpath, self.cyname + _dllextension),
)
break
except BaseException:
objecttools.augment_excmessage(
f"After trying to cythonize module `{self.pyname}`, "
f"when trying to move the final cython module "
f"`{system_dependent_filename}` from directory "
f"`{self.buildpath}` to directory "
f"`{objecttools.repr_(self.cydirpath)}`",
f"A likely error cause is that the cython module "
f"`{self.cyname}{_dllextension}` does already exist "
f"in this directory and is currently blocked by "
f"another Python process. Maybe it helps to close "
f"all Python processes and restart the cythonization "
f"afterwards.",
)
else:
raise IOError(
f"After trying to cythonize model `{self.pyname}`, the resulting file "
f"`{self.cyname}{_dllextension}` could not be found in directory "
f"`{objecttools.repr_(self.buildpath)}` nor any of its "
f"subdirectories. The distutil report should tell whether the file "
f"has been stored somewhere else, is named somehow else, or could not "
f"be build at all."
)
class PyxWriter:
"""Translates the source code of Python models into Cython source code.
Method |PyxWriter| serves as a master method, which triggers the complete writing
process. The other properties and methods supply the required code lines. Their
names are selected to match the names of the original Python models as close as
possible.
"""
cythonizer: Cythonizer
model: modeltools.Model
pyxpath: str
def __init__(
self, cythonizer: Cythonizer, model: modeltools.Model, pyxpath: str
) -> None:
self.cythonizer = cythonizer
self.model = model
self.pyxpath = pyxpath
def write(self) -> None:
"""Collect the source code and write it into a Cython extension file ("pyx")."""
with open(self.pyxpath, "w", encoding=config.ENCODING) as pxf:
print(" * cython options")
pxf.write(repr(self.cythondistutilsoptions))
print(" * C imports")
pxf.write(repr(self.cimports))
print(" * constants (if defined)")
pxf.write(repr(self.constants))
print(" * parameter classes")
pxf.write(repr(self.parameters))
print(" * sequence classes")
pxf.write(repr(self.sequences))
print(" * numerical parameters")
pxf.write(repr(self.numericalparameters))
print(" * submodel classes")
pxf.write(repr(self.submodels))
print(" * model class")
print(" - model attributes")
pxf.write(repr(self.modeldeclarations))
print(" - standard functions")
pxf.write(repr(self.modelstandardfunctions))
print(" - numeric functions")
pxf.write(repr(self.modelnumericfunctions))
print(" - additional functions")
pxf.write(repr(self.modeluserfunctions))
@property
def cythondistutilsoptions(self) -> List[str]:
"""Cython and Distutils option lines.
Use the configuration options "FASTCYTHON" and "PROFILECYTHON" to configure the
cythonization processes as follows:
>>> from hydpy.cythons.modelutils import PyxWriter
>>> pyxwriter = PyxWriter(None, None, None)
>>> pyxwriter.cythondistutilsoptions
#!python
# cython: language_level=3
# cython: boundscheck=False
# cython: wraparound=False
# cython: initializedcheck=False
# cython: cdivision=True
<BLANKLINE>
>>> from hydpy import config
>>> config.FASTCYTHON = False
>>> config.PROFILECYTHON = True
>>> pyxwriter.cythondistutilsoptions
#!python
# cython: language_level=3
# cython: boundscheck=True
# cython: wraparound=True
# cython: initializedcheck=True
# cython: cdivision=False
# cython: linetrace=True
# distutils: define_macros=CYTHON_TRACE=1
# distutils: define_macros=CYTHON_TRACE_NOGIL=1
<BLANKLINE>
>>> config.FASTCYTHON = True
>>> config.PROFILECYTHON = False
"""
if config.FASTCYTHON:
lines = Lines(
"#!python",
"# cython: language_level=3",
"# cython: boundscheck=False",
"# cython: wraparound=False",
"# cython: initializedcheck=False",
"# cython: cdivision=True",
)
else:
lines = Lines(
"#!python",
"# cython: language_level=3",
"# cython: boundscheck=True",
"# cython: wraparound=True",
"# cython: initializedcheck=True",
"# cython: cdivision=False",
)
if config.PROFILECYTHON:
lines.add(0, "# cython: linetrace=True")
lines.add(0, "# distutils: define_macros=CYTHON_TRACE=1")
lines.add(0, "# distutils: define_macros=CYTHON_TRACE_NOGIL=1")
return lines
@property
def cimports(self) -> List[str]:
"""Import command lines."""
return Lines(
"import numpy",
"cimport numpy",
"from libc.math cimport exp, fabs, log, "
"sin, cos, tan, asin, acos, atan, isnan, isinf",
"from libc.math cimport NAN as nan",
"from libc.math cimport INFINITY as inf",
"import cython",
"from cpython.mem cimport PyMem_Malloc",
"from cpython.mem cimport PyMem_Realloc",
"from cpython.mem cimport PyMem_Free",
"from hydpy.cythons.autogen cimport configutils",
"from hydpy.cythons.autogen cimport interputils",
"from hydpy.cythons.autogen import pointerutils",
"from hydpy.cythons.autogen cimport pointerutils",
"from hydpy.cythons.autogen cimport quadutils",
"from hydpy.cythons.autogen cimport rootutils",
"from hydpy.cythons.autogen cimport smoothutils",
)
@property
def constants(self) -> List[str]:
"""Constants declaration lines."""
lines = Lines()
for (name, member) in vars(self.cythonizer).items():
if (
name.isupper()
and not inspect.isclass(member)
and isinstance(member, CHECKABLE_TYPES)
):
ndim = numpy.array(member).ndim
ctype = TYPE2STR[type(member)] + NDIM2STR[ndim]
lines.add(0, f"cdef public {ctype} {name} = {member}")
return lines
@property
def parameters(self) -> List[str]:
"""Parameter declaration lines."""
lines = Lines()
if self.model.parameters:
lines.add(0, "@cython.final")
lines.add(0, "cdef class Parameters:")
for subpars in self.model.parameters:
lines.add(1, f"cdef public {type(subpars).__name__} {subpars.name}")
for subpars in self.model.parameters:
print(f" - {subpars.name}")
lines.add(0, "@cython.final")
lines.add(0, f"cdef class {type(subpars).__name__}:")
for par in subpars:
try:
ctype = TYPE2STR[par.TYPE] + NDIM2STR[par.NDIM]
except KeyError:
ctype = par.TYPE + NDIM2STR[par.NDIM]
lines.add(1, f"cdef public {ctype} {par.name}")
return lines
@property
def sequences(self) -> List[str]:
"""Sequence declaration lines."""
lines = Lines()
lines.add(0, "@cython.final")
lines.add(0, "cdef class Sequences:")
for subseqs in self.model.sequences:
lines.add(1, f"cdef public {type(subseqs).__name__} {subseqs.name}")
if self.model.sequences.states:
lines.add(1, "cdef public StateSequences old_states")
lines.add(1, "cdef public StateSequences new_states")
for subseqs in self.model.sequences:
print(f" - {subseqs.name}")
lines.add(0, "@cython.final")
lines.add(0, f"cdef class {type(subseqs).__name__}:")
for seq in subseqs:
ctype = f"double{NDIM2STR[seq.NDIM]}"
if isinstance(subseqs, sequencetools.LinkSequences):
if seq.NDIM == 0:
lines.add(1, f"cdef double *{seq.name}")
elif seq.NDIM == 1:
lines.add(1, f"cdef double **{seq.name}")
lines.add(1, f"cdef public int len_{seq.name}")
lines.add(
1, f"cdef public {TYPE2STR[int]}[:] _{seq.name}_ready"
)
else:
lines.add(1, f"cdef public {ctype} {seq.name}")
lines.add(1, f"cdef public int _{seq.name}_ndim")
lines.add(1, f"cdef public int _{seq.name}_length")
for idx in range(seq.NDIM):
lines.add(1, f"cdef public int _{seq.name}_length_{idx}")
if seq.NUMERIC:
ctype_numeric = "double" + NDIM2STR[seq.NDIM + 1]
lines.add(1, f"cdef public {ctype_numeric} _{seq.name}_points")
lines.add(1, f"cdef public {ctype_numeric} _{seq.name}_results")
if isinstance(subseqs, sequencetools.FluxSequences):
lines.add(
1, f"cdef public {ctype_numeric} " f"_{seq.name}_integrals"
)
lines.add(1, f"cdef public {ctype} _{seq.name}_sum")
if isinstance(seq, sequencetools.IOSequence):
lines.extend(self.iosequence(seq))
if isinstance(subseqs, sequencetools.IOSequences):
lines.extend(self.load_data(subseqs))
lines.extend(self.save_data(subseqs))
if isinstance(subseqs, sequencetools.LinkSequences):
lines.extend(self.set_pointer(subseqs))
lines.extend(self.get_value(subseqs))
lines.extend(self.set_value(subseqs))
if isinstance(
subseqs,
(
sequencetools.InputSequences,
sequencetools.OutputSequences,
),
):
lines.extend(self.set_pointer(subseqs))
if isinstance(subseqs, sequencetools.OutputSequences):
lines.extend(self.update_outputs(subseqs))
return lines
@staticmethod
def iosequence(seq: sequencetools.IOSequence) -> List[str]:
"""Declaration lines for the given |IOSequence| object."""
ctype = f"double{NDIM2STR[seq.NDIM+1]}"
lines = Lines()
lines.add(1, f"cdef public bint _{seq.name}_ramflag")
lines.add(1, f"cdef public {ctype} _{seq.name}_array")
lines.add(1, f"cdef public bint _{seq.name}_diskflag_reading")
lines.add(1, f"cdef public bint _{seq.name}_diskflag_writing")
lines.add(1, f"cdef public double[:] _{seq.name}_ncarray")
if isinstance(seq, sequencetools.InputSequence):
lines.add(1, f"cdef public bint _{seq.name}_inputflag")
lines.add(1, f"cdef double *_{seq.name}_inputpointer")
elif isinstance(seq, sequencetools.OutputSequence):
lines.add(1, f"cdef public bint _{seq.name}_outputflag")
lines.add(1, f"cdef double *_{seq.name}_outputpointer")
return lines
@staticmethod
def _get_index(ndim: int) -> str:
return ", ".join(f"jdx{idx}" for idx in range(ndim))
@staticmethod
def _add_cdef_jdxs(
lines: Lines, subseqs: sequencetools.IOSequences[Any, Any, Any]
) -> None:
maxndim = max(seq.NDIM for seq in subseqs)
if maxndim:
jdxs = ", ".join(f"jdx{ndim}" for ndim in range(maxndim))
lines.add(2, f"cdef int {jdxs}")
@classmethod
def load_data(cls, subseqs: sequencetools.IOSequences[Any, Any, Any]) -> List[str]:
"""Load data statements."""
print(" . load_data")
lines = Lines()
lines.add(1, f"cpdef inline void load_data(self, int idx) {_nogil}:")
cls._add_cdef_jdxs(lines, subseqs)
lines.add(2, "cdef int k")
for seq in subseqs:
if isinstance(seq, sequencetools.InputSequence):
lines.add(2, f"if self._{seq.name}_inputflag:")
lines.add(3, f"self.{seq.name} = self._{seq.name}_inputpointer[0]")
if_or_elif = "elif"
else:
if_or_elif = "if"
lines.add(2, f"{if_or_elif} self._{seq.name}_diskflag_reading:")
if seq.NDIM == 0:
lines.add(3, f"self.{seq.name} = self._{seq.name}_ncarray[0]")
else:
lines.add(3, "k = 0")
for idx in range(seq.NDIM):
lines.add(
3 + idx,
f"for jdx{idx} in range(self._{seq.name}_length_{idx}):",
)
lines.add(
3 + seq.NDIM,
f"self.{seq.name}[{cls._get_index(seq.NDIM)}] "
f"= self._{seq.name}_ncarray[k]",
)
lines.add(3 + seq.NDIM, "k += 1")
lines.add(2, f"elif self._{seq.name}_ramflag:")
if seq.NDIM == 0:
lines.add(3, f"self.{seq.name} = self._{seq.name}_array[idx]")
else:
for idx in range(seq.NDIM):
lines.add(
3 + idx,
f"for jdx{idx} in " f"range(self._{seq.name}_length_{idx}):",
)
index = cls._get_index(seq.NDIM)
lines.add(
3 + seq.NDIM,
f"self.{seq.name}[{index}] = self._{seq.name}_array[idx, {index}]",
)
return lines
@classmethod
def save_data(cls, subseqs: sequencetools.IOSequences[Any, Any, Any]) -> List[str]:
"""Save data statements."""
print(" . save_data")
lines = Lines()
lines.add(1, f"cpdef inline void save_data(self, int idx) {_nogil}:")
cls._add_cdef_jdxs(lines, subseqs)
lines.add(2, "cdef int k")
for seq in subseqs:
lines.add(2, f"if self._{seq.name}_diskflag_writing:")
if seq.NDIM == 0:
lines.add(3, f"self._{seq.name}_ncarray[0] = self.{seq.name}")
else:
lines.add(3, "k = 0")
for idx in range(seq.NDIM):
lines.add(
3 + idx,
f"for jdx{idx} in " f"range(self._{seq.name}_length_{idx}):",
)
index = cls._get_index(seq.NDIM)
lines.add(
3 + seq.NDIM,
f"self._{seq.name}_ncarray[k] = self.{seq.name}[{index}]",
)
lines.add(3 + seq.NDIM, "k += 1")
lines.add(2, f"if self._{seq.name}_ramflag:")
if seq.NDIM == 0:
lines.add(3, f"self._{seq.name}_array[idx] = self.{seq.name}")
else:
for idx in range(seq.NDIM):
lines.add(
3 + idx,
f"for jdx{idx} in " f"range(self._{seq.name}_length_{idx}):",
)
index = cls._get_index(seq.NDIM)
lines.add(
3 + seq.NDIM,
f"self._{seq.name}_array[idx, {index}] = self.{seq.name}[{index}]",
)
return lines
def set_pointer(
self,
subseqs: Union[
sequencetools.InputSequences,
sequencetools.OutputSequences[Any],
sequencetools.LinkSequences[Any],
],
) -> List[str]:
"""Set pointer statements for all input, output, and link sequences."""
lines = Lines()
if isinstance(subseqs, sequencetools.InputSequences):
lines.extend(self.set_pointerinput(subseqs))
elif isinstance(subseqs, sequencetools.OutputSequences):
lines.extend(self.set_pointeroutput(subseqs))
else:
if any(seq.NDIM == 0 for seq in subseqs):
lines.extend(self.set_pointer0d(subseqs))
if any(seq.NDIM == 1 for seq in subseqs):
lines.extend(self.alloc(subseqs))
lines.extend(self.dealloc(subseqs))
lines.extend(self.set_pointer1d(subseqs))
return lines
@staticmethod
def set_pointer0d(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Set pointer statements for 0-dimensional link sequences."""
print(" . set_pointer0d")
lines = Lines()
lines.add(
1,
"cpdef inline set_pointer0d(self, str name, pointerutils.Double value):",
)
lines.add(2, "cdef pointerutils.PDouble pointer = pointerutils.PDouble(value)")
for seq in (seq for seq in subseqs if seq.NDIM == 0):
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"self.{seq.name} = pointer.p_value")
return lines
@staticmethod
def get_value(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Get value statements for link sequences."""
print(" . get_value")
lines = Lines()
lines.add(1, "cpdef get_value(self, str name):")
lines.add(2, "cdef int idx")
for seq in subseqs:
lines.add(2, f'if name == "{seq.name}":')
if seq.NDIM == 0:
lines.add(3, f"return self.{seq.name}[0]")
elif seq.NDIM == 1:
lines.add(3, f"values = numpy.empty(self.len_{seq.name})")
lines.add(3, f"for idx in range(self.len_{seq.name}):")
PyxWriter._check_pointer(lines, seq)
lines.add(4, f"values[idx] = self.{seq.name}[idx][0]")
lines.add(3, "return values")
return lines
@staticmethod
def set_value(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Set value statements for link sequences."""
print(" . set_value")
lines = Lines()
lines.add(1, "cpdef set_value(self, str name, value):")
for seq in subseqs:
lines.add(2, f'if name == "{seq.name}":')
if seq.NDIM == 0:
lines.add(3, f"self.{seq.name}[0] = value")
elif seq.NDIM == 1:
lines.add(3, f"for idx in range(self.len_{seq.name}):")
PyxWriter._check_pointer(lines, seq)
lines.add(4, f"self.{seq.name}[idx][0] = value[idx]")
return lines
@staticmethod
def _check_pointer(lines: Lines, seq: sequencetools.LinkSequence) -> None:
lines.add(4, f"pointerutils.check0(self._{seq.name}_length_0)")
lines.add(4, f"if self._{seq.name}_ready[idx] == 0:")
lines.add(5, f"pointerutils.check1(self._{seq.name}_length_0, idx)")
lines.add(5, f"pointerutils.check2(self._{seq.name}_ready, idx)")
@staticmethod
def alloc(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Allocate memory statements for 1-dimensional link sequences."""
print(" . setlength")
lines = Lines()
lines.add(1, f"cpdef inline alloc(self, name, {TYPE2STR[int]} length):")
for seq in (seq for seq in subseqs if seq.NDIM == 1):
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"self._{seq.name}_length_0 = length")
lines.add(
3,
f"self._{seq.name}_ready = "
f"numpy.full(length, 0, dtype={ TYPE2STR[int].split('_')[0]})",
)
lines.add(
3,
f"self.{seq.name} = "
f"<double**> PyMem_Malloc(length * sizeof(double*))",
)
return lines
@staticmethod
def dealloc(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Deallocate memory statements for 1-dimensional link sequences."""
print(" . dealloc")
lines = Lines()
lines.add(1, "cpdef inline dealloc(self, name):")
for seq in (seq for seq in subseqs if seq.NDIM == 1):
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"PyMem_Free(self.{seq.name})")
return lines
@staticmethod
def set_pointer1d(subseqs: sequencetools.LinkSequences[Any]) -> List[str]:
"""Set_pointer statements for 1-dimensional link sequences."""
print(" . set_pointer1d")
lines = Lines()
lines.add(
1,
"cpdef inline set_pointer1d"
"(self, str name, pointerutils.Double value, int idx):",
)
lines.add(2, "cdef pointerutils.PDouble pointer = pointerutils.PDouble(value)")
for seq in (seq for seq in subseqs if seq.NDIM == 1):
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"self.{seq.name}[idx] = pointer.p_value")
lines.add(3, f"self._{seq.name}_ready[idx] = 1")
return lines
@staticmethod
def set_pointerinput(subseqs: sequencetools.InputSequences) -> List[str]:
"""Set pointer statements for input sequences."""
print(" . set_pointerinput")
lines = Lines()
lines.add(
1,
"cpdef inline set_pointerinput"
"(self, str name, pointerutils.PDouble value):",
)
for seq in subseqs:
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"self._{seq.name}_inputpointer = value.p_value")
return lines
def set_pointeroutput(
self, subseqs: sequencetools.OutputSequences[Any]
) -> List[str]:
"""Set pointer statements for output sequences."""
print(" . set_pointeroutput")
lines = Lines()
lines.add(
1,
"cpdef inline set_pointeroutput"
"(self, str name, pointerutils.PDouble value):",
)
subseqs_ = self._filter_outputsequences(subseqs)
if subseqs_:
for seq in subseqs_:
lines.add(2, f'if name == "{seq.name}":')
lines.add(3, f"self._{seq.name}_outputpointer = value.p_value")
else:
lines.add(2, "pass")
return lines
@staticmethod
def _filter_outputsequences(
subseqs: sequencetools.OutputSequences[Any],
) -> List[sequencetools.OutputSequence]:
return [subseq for subseq in subseqs if not subseq.NDIM]
@property
def numericalparameters(self) -> List[str]:
"""Numeric parameter declaration lines."""
lines = Lines()
if isinstance(self.model, modeltools.SolverModel):
lines.add(0, "@cython.final")
lines.add(0, "cdef class NumConsts:")
for name in ("nmb_methods", "nmb_stages"):
lines.add(1, f"cdef public {TYPE2STR[int]} {name}")
for name in ("dt_increase", "dt_decrease"):
lines.add(1, f"cdef public {TYPE2STR[float]} {name}")
lines.add(1, "cdef public configutils.Config pub")
lines.add(1, "cdef public double[:, :, :] a_coefs")
lines.add(0, "cdef class NumVars:")
lines.add(1, "cdef public bint use_relerror")
for name in ("nmb_calls", "idx_method", "idx_stage"):
lines.add(1, f"cdef public {TYPE2STR[int]} {name}")
for name in (
"t0",
"t1",
"dt",
"dt_est",
"abserror",
"relerror",
"last_abserror",
"last_relerror",
"extrapolated_abserror",
"extrapolated_relerror",
):
lines.add(1, f"cdef public {TYPE2STR[float]} {name}")
lines.add(1, f"cdef public {TYPE2STR[bool]} f0_ready")
return lines
@property
def submodels(self) -> List[str]:
"""Submodel declaration lines."""
lines = Lines()
for submodel in self.model.SUBMODELS:
lines.add(0, "@cython.final")
cls = submodel.CYTHONBASECLASS
lines.add(
0,
f"cdef class {submodel.__name__}("
f"{cls.__module__.split('.')[-1]}."
f"{cls.__name__}):",
)
lines.add(1, "cdef public Model model")
lines.add(1, "def __init__(self, Model model):")
lines.add(2, "self.model = model")
for idx, method in enumerate(submodel.METHODS):
lines.add(1, f"cpdef double apply_method{idx}(self, double x) nogil:")
lines.add(2, f"return self.model.{method.__name__.lower()}(x)")
return lines
@property
def modeldeclarations(self) -> List[str]:
"""The attribute declarations of the model class."""
submodels = getattr(self.model, "SUBMODELS", ())
lines = Lines()
lines.add(0, "@cython.final")
lines.add(0, "cdef class Model:")
for cls in inspect.getmro(type(self.model)):
for name, member in vars(cls).items():
if isinstance(member, modeltools.IndexProperty):
lines.add(1, f"cdef public int {name}")
if self.model.parameters:
lines.add(1, "cdef public Parameters parameters")
lines.add(1, "cdef public Sequences sequences")
for submodel in submodels:
lines.add(1, f"cdef public {submodel.__name__} {submodel.name}")
if hasattr(self.model, "numconsts"):
lines.add(1, "cdef public NumConsts numconsts")
if hasattr(self.model, "numvars"):
lines.add(1, "cdef public NumVars numvars")
if submodels:
lines.add(1, "def __init__(self):")
for submodel in submodels:
lines.add(2, f"self.{submodel.name} = {submodel.__name__}(self)")
return lines
@property
def modelstandardfunctions(self) -> List[str]:
"""The standard functions of the model class."""
lines = Lines()
lines.extend(self.simulate)
lines.extend(self.iofunctions)
lines.extend(self.new2old)
if isinstance(self.model, modeltools.RunModel):
lines.extend(self.run(self.model))
lines.extend(self.update_inlets)
lines.extend(self.update_outlets)
lines.extend(self.update_receivers)
lines.extend(self.update_senders)
lines.extend(self.update_outputs_model)
return lines
@property
def modelnumericfunctions(self) -> List[str]:
"""Numerical integration functions of the model class."""
lines = Lines()
if isinstance(self.model, modeltools.SolverModel):
lines.extend(self.solve)
lines.extend(self.calculate_single_terms(self.model))
lines.extend(self.calculate_full_terms(self.model))
lines.extend(self.get_point_states)
lines.extend(self.set_point_states)
lines.extend(self.set_result_states)
lines.extend(self.get_sum_fluxes)
lines.extend(self.set_point_fluxes)
lines.extend(self.set_result_fluxes)
lines.extend(self.integrate_fluxes)
lines.extend(self.reset_sum_fluxes)
lines.extend(self.addup_fluxes)
lines.extend(self.calculate_error)
lines.extend(self.extrapolate_error)
return lines
@property
def simulate(self) -> List[str]:
"""Simulation statements."""
print(" . simulate")
lines = Lines()
lines.add(1, f"cpdef inline void simulate(self, int idx) {_nogil}:")
lines.add(2, "self.idx_sim = idx")
seqs = self.model.sequences
if seqs.inputs:
lines.add(2, "self.load_data()")
if self.model.INLET_METHODS:
lines.add(2, "self.update_inlets()")
if isinstance(self.model, modeltools.SolverModel):
lines.add(2, "self.solve()")
else:
lines.add(2, "self.run()")
if seqs.states:
lines.add(2, "self.new2old()")
if self.model.OUTLET_METHODS:
lines.add(2, "self.update_outlets()")
if seqs.factors or seqs.fluxes or seqs.states:
lines.add(2, "self.update_outputs()")
return lines
@property
def iofunctions(self) -> List[str]:
"""Input/output functions of the model class.
The result of property |PyxWriter.iofunctions| depends on the availability of
different types of sequences. So far, the models implemented in *HydPy* do not
reflect all possible combinations, which is why we modify the |hland_v1|
application model in the following examples:
>>> from hydpy.models.hland_v1 import cythonizer
>>> pyxwriter = cythonizer.pyxwriter
>>> pyxwriter.iofunctions
. load_data
. save_data
cpdef inline void load_data(self) nogil:
self.sequences.inputs.load_data(self.idx_sim)
cpdef inline void save_data(self, int idx) nogil:
self.sequences.inputs.save_data(self.idx_sim)
self.sequences.factors.save_data(self.idx_sim)
self.sequences.fluxes.save_data(self.idx_sim)
self.sequences.states.save_data(self.idx_sim)
<BLANKLINE>
>>> pyxwriter.model.sequences.factors = None
>>> pyxwriter.model.sequences.fluxes = None
>>> pyxwriter.model.sequences.states = None
>>> pyxwriter.iofunctions
. load_data
. save_data
cpdef inline void load_data(self) nogil:
self.sequences.inputs.load_data(self.idx_sim)
cpdef inline void save_data(self, int idx) nogil:
self.sequences.inputs.save_data(self.idx_sim)
<BLANKLINE>
>>> pyxwriter.model.sequences.inputs = None
>>> pyxwriter.iofunctions
<BLANKLINE>
<BLANKLINE>
"""
lines = Lines()
seqs = self.model.sequences
if not (seqs.inputs or seqs.factors or seqs.fluxes or seqs.states):
return lines
for func in ("load_data", "save_data"):
if (func == "load_data") and not seqs.inputs:
continue
print(f" . {func}")
nogil = func in ("load_data", "save_data")
idx_as_arg = func == "save_data"
lines.add(1, get_methodheader(func, nogil=nogil, idxarg=idx_as_arg))
for subseqs in seqs:
if func == "load_data":
applyfuncs: Tuple[str, ...] = ("inputs",)
else:
applyfuncs = ("inputs", "factors", "fluxes", "states")
if subseqs.name in applyfuncs:
lines.add(
2, f"self.sequences.{subseqs.name}." f"{func}(self.idx_sim)"
)
return lines
@property
def new2old(self) -> List[str]:
"""Old states to new states statements."""
lines = Lines()
if self.model.sequences.states:
print(" . new2old")
lines.add(1, get_methodheader("new2old", nogil=True))
self._add_cdef_jdxs(lines, self.model.sequences.states)
for seq in self.model.sequences.states:
if seq.NDIM == 0:
lines.add(
2,
f"self.sequences.old_states.{seq.name} = "
f"self.sequences.new_states.{seq.name}",
)
else:
indexing = ""
for idx in range(seq.NDIM):
lines.add(
2 + idx,
f"for jdx{idx} in range(self.sequences.states."
f"_{seq.name}_length_{idx}):",
)
indexing += f"jdx{idx},"
indexing = indexing[:-1]
lines.add(
2 + seq.NDIM,
f"self.sequences.old_states.{seq.name}[{indexing}] = "
f"self.sequences.new_states.{seq.name}[{indexing}]",
)
return lines
def _call_methods(
self,
name: str,
methods: Tuple[Type[modeltools.Method], ...],
idx_as_arg: bool = False,
) -> Lines:
lines = Lines()
if hasattr(self.model, name):
lines.add(1, get_methodheader(name, nogil=True, idxarg=idx_as_arg))
if idx_as_arg:
lines.add(2, "self.idx_sim = idx")
anything = False
for method in methods:
lines.add(2, f"self.{method.__name__.lower()}()")
anything = True
if not anything:
lines.add(2, "pass")
return lines
def _call_runmethods_segmentwise(
self,
methods: Tuple[Type[modeltools.Method], ...],
) -> Lines:
lines = Lines()
if hasattr(self.model, "run"):
lines.add(1, get_methodheader("run", nogil=True, idxarg=False))
lines.add(2, f"cdef {TYPE2STR[int]} idx_segment, idx_run")
lines.add(
2,
"for idx_segment in range(self.parameters.control.nmbsegments):",
)
lines.add(3, "self.idx_segment = idx_segment")
lines.add(3, "for idx_run in range(self.parameters.solver.nmbruns):")
lines.add(4, "self.idx_run = idx_run")
for method in methods:
lines.add(4, f"self.{method.__name__.lower()}()")
return lines
@property
def update_receivers(self) -> List[str]:
"""Lines of the model method with the same name."""
return self._call_methods("update_receivers", self.model.RECEIVER_METHODS, True)
@property
def update_inlets(self) -> List[str]:
"""Lines of the model method with the same name."""
return self._call_methods("update_inlets", self.model.INLET_METHODS)
def run(self, model: modeltools.RunModel) -> List[str]:
"""Return the lines of the model method with the same name."""
if isinstance(model, modeltools.SegmentModel):
return self._call_runmethods_segmentwise(model.RUN_METHODS)
return self._call_methods("run", model.RUN_METHODS)
@property
def update_outlets(self) -> List[str]:
"""Lines of the model method with the same name."""
return self._call_methods("update_outlets", self.model.OUTLET_METHODS)
@property
def update_senders(self) -> List[str]:
"""Lines of the model method with the same name."""
return self._call_methods("update_senders", self.model.SENDER_METHODS, True)
@property
def update_outputs_model(self) -> List[str]:
"""Lines of the model method with the same name (except the `_model` suffix)."""
lines = Lines()
add = lines.add
methodheader = get_methodheader(
"update_outputs",
nogil=True,
idxarg=False,
)
add(1, methodheader)
factors = self._filter_outputsequences(self.model.sequences.factors)
fluxes = self._filter_outputsequences(self.model.sequences.fluxes)
states = self._filter_outputsequences(self.model.sequences.states)
if factors:
add(2, "self.sequences.factors.update_outputs()")
if fluxes:
add(2, "self.sequences.fluxes.update_outputs()")
if states:
add(2, "self.sequences.states.update_outputs()")
if not (factors or fluxes or states):
add(2, "pass")
return lines
def update_outputs(self, subseqs: sequencetools.OutputSequences[Any]) -> List[str]:
"""Lines of the subsequences method with the same name."""
lines = Lines()
add = lines.add
methodheader = get_methodheader(
"update_outputs",
nogil=True,
idxarg=False,
)
add(1, methodheader)
subseqs_ = self._filter_outputsequences(subseqs)
if subseqs_:
for seq in subseqs_:
name = seq.name
add(2, f"if self._{name}_outputflag:")
add(3, f"self._{name}_outputpointer[0] = self.{name}")
else:
add(2, "pass")
return lines
def calculate_single_terms(self, model: modeltools.SolverModel) -> List[str]:
"""Return the lines of the model method with the same name."""
lines = self._call_methods("calculate_single_terms", model.PART_ODE_METHODS)
if lines:
lines.insert(
1, (" self.numvars.nmb_calls = self.numvars.nmb_calls + 1")
)
return lines
def calculate_full_terms(self, model: modeltools.SolverModel) -> List[str]:
"""Return the lines of the model method with the same name."""
return self._call_methods("calculate_full_terms", model.FULL_ODE_METHODS)
@property
def listofmodeluserfunctions(self) -> List[Tuple[str, Callable[..., Any]]]:
"""User functions of the model class."""
lines = []
for (name, member) in vars(self.model).items():
if getattr(getattr(member, "__func__", None), "CYTHONIZE", False):
lines.append((name, member))
return lines
@property
def modeluserfunctions(self) -> List[str]:
"""Model-specific functions."""
lines = Lines()
for (name, func) in self.listofmodeluserfunctions:
print(f" . {name}")
funcconverter = FuncConverter(self.model, name, func)
lines.extend(funcconverter.pyxlines)
return lines
@property
def solve(self) -> List[str]:
"""Lines of the model method with the same name."""
lines = Lines()
solve = getattr(self.model, "solve", None)
if solve:
print(" . solve")
funcconverter = FuncConverter(self.model, "solve", solve)
lines.extend(funcconverter.pyxlines)
return lines
@classmethod
def _assign_seqvalues(
cls,
subseqs: Iterable[sequencetools.IOSequence],
subseqs_name: str,
target: str,
index: Optional[str],
load: bool,
) -> Iterator[str]:
subseqs = list(subseqs)
from1 = f"self.sequences.{subseqs_name}.%s"
to1 = f"self.sequences.{subseqs_name}._%s_{target}"
if index is not None:
to1 += f"[self.numvars.{index}]"
if load:
from1, to1 = to1, from1
yield from cls._declare_idxs(subseqs)
for seq in subseqs:
from2 = from1 % seq.name
to2 = to1 % seq.name
if seq.NDIM == 0:
yield f"{to2} = {from2}"
elif seq.NDIM == 1:
yield (
f"for idx0 in range(self.sequences."
f"{subseqs_name}._{seq.name}_length):"
)
yield f" {to2}[idx0] = {from2}[idx0]"
elif seq.NDIM == 2:
yield (
f"for idx0 in range(self.sequences."
f"{subseqs_name}._{seq.name}_length0):"
)
yield (
f" for idx1 in range(self.sequences."
f"{subseqs_name}._{seq.name}_length1):"
)
yield f" {to2}[idx0, idx1] = {from2}[idx0, idx1]"
else:
raise NotImplementedError(
f"NDIM of sequence `{seq.name}` is higher than expected."
)
@staticmethod
def _declare_idxs(subseqs: Iterable[sequencetools.IOSequence]) -> Iterator[str]:
maxdim = 0
for seq in subseqs:
maxdim = max(maxdim, seq.NDIM)
if maxdim == 1:
yield "cdef int idx0"
elif maxdim == 2:
yield "cdef int idx0, idx1"
@decorate_method
def get_point_states(self) -> Iterator[str]:
"""Get point statements for state sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.states,
subseqs_name="states",
target="points",
index="idx_stage",
load=True,
)
@decorate_method
def set_point_states(self) -> Iterator[str]:
"""Set point statements for state sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.states,
subseqs_name="states",
target="points",
index="idx_stage",
load=False,
)
@decorate_method
def set_result_states(self) -> Iterator[str]:
"""Get results statements for state sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.states,
subseqs_name="states",
target="results",
index="idx_method",
load=False,
)
@decorate_method
def get_sum_fluxes(self) -> Iterator[str]:
"""Get sum statements for flux sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.fluxes.numericsequences,
subseqs_name="fluxes",
target="sum",
index=None,
load=True,
)
@decorate_method
def set_point_fluxes(self) -> Iterator[str]:
"""Set point statements for flux sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.fluxes.numericsequences,
subseqs_name="fluxes",
target="points",
index="idx_stage",
load=False,
)
@decorate_method
def set_result_fluxes(self) -> Iterator[str]:
"""Set result statements for flux sequences."""
return self._assign_seqvalues(
subseqs=self.model.sequences.fluxes.numericsequences,
subseqs_name="fluxes",
target="results",
index="idx_method",
load=False,
)
@decorate_method
def integrate_fluxes(self) -> Iterator[str]:
"""Integrate statements for flux sequences."""
max_ndim = -1
for seq in self.model.sequences.fluxes.numericsequences:
max_ndim = max(max_ndim, seq.NDIM)
if max_ndim == 0:
yield "cdef int jdx"
elif max_ndim == 1:
yield "cdef int jdx, idx0"
elif max_ndim == 2:
yield "cdef int jdx, idx0, idx1"
for seq in self.model.sequences.fluxes.numericsequences:
to_ = f"self.sequences.fluxes.{seq.name}"
from_ = f"self.sequences.fluxes._{seq.name}_points"
coefs = (
"self.numvars.dt * self.numconsts.a_coefs"
"[self.numvars.idx_method-1, self.numvars.idx_stage, jdx]"
)
if seq.NDIM == 0:
yield f"{to_} = 0."
yield "for jdx in range(self.numvars.idx_method):"
yield f" {to_} = {to_} +{coefs}*{from_}[jdx]"
elif seq.NDIM == 1:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length):"
)
yield f" {to_}[idx0] = 0."
yield " for jdx in range(self.numvars.idx_method):"
yield (
f" {to_}[idx0] = "
f"{to_}[idx0] + {coefs}*{from_}[jdx, idx0]"
)
elif seq.NDIM == 2:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length0):"
)
yield (
f" for idx1 in range("
f"self.sequences.fluxes._{seq.name}_length1):"
)
yield f" {to_}[idx0, idx1] = 0."
yield " for jdx in range(self.numvars.idx_method):"
yield (
f" {to_}[idx0, idx1] = "
f"{to_}[idx0, idx1] + {coefs}*{from_}[jdx, idx0, idx1]"
)
else:
raise NotImplementedError(
f"NDIM of sequence `{seq.name}` is higher than expected."
)
@decorate_method
def reset_sum_fluxes(self) -> Iterator[str]:
"""Reset sum statements for flux sequences."""
subseqs = list(self.model.sequences.fluxes.numericsequences)
yield from PyxWriter._declare_idxs(subseqs)
for seq in subseqs:
to_ = f"self.sequences.fluxes._{seq.name}_sum"
if seq.NDIM == 0:
yield f"{to_} = 0."
elif seq.NDIM == 1:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length):"
)
yield f" {to_}[idx0] = 0."
elif seq.NDIM == 2:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length0):"
)
yield (
f" for idx1 in "
f"range(self.sequences.fluxes._{seq.name}_length1):"
)
yield f" {to_}[idx0, idx1] = 0."
else:
raise NotImplementedError(
f"NDIM of sequence `{seq.name}` is higher than expected."
)
@decorate_method
def addup_fluxes(self) -> Iterator[str]:
"""Add up statements for flux sequences."""
subseqs = list(self.model.sequences.fluxes.numericsequences)
yield from PyxWriter._declare_idxs(subseqs)
for seq in subseqs:
to_ = f"self.sequences.fluxes._{seq.name}_sum"
from_ = f"self.sequences.fluxes.{seq.name}"
if seq.NDIM == 0:
yield f"{to_} = {to_} + {from_}"
elif seq.NDIM == 1:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length):"
)
yield f" {to_}[idx0] = {to_}[idx0] + {from_}[idx0]"
elif seq.NDIM == 2:
yield (
f"for idx0 in " f"range(self.sequences.fluxes._{seq.name}_length0):"
)
yield (
f" for idx1 in "
f"range(self.sequences.fluxes._{seq.name}_length1):"
)
yield (
f" {to_}[idx0, idx1] = "
f"{to_}[idx0, idx1] + {from_}[idx0, idx1]"
)
else:
raise NotImplementedError(
f"NDIM of sequence `{seq.name}` is higher than expected."
)
@decorate_method
def calculate_error(self) -> Iterator[str]:
"""Calculate error statements."""
subseqs = list(self.model.sequences.fluxes.numericsequences)
assert isinstance(self.model, modeltools.ELSModel)
if self.model.SOLVERSEQUENCES:
subseqs = [
seq for seq in subseqs if isinstance(seq, self.model.SOLVERSEQUENCES) # type: ignore[arg-type] # pylint: disable=line-too-long
]
yield from self._declare_idxs(subseqs)
userel = "self.numvars.use_relerror:"
abserror = "self.numvars.abserror"
relerror = "self.numvars.relerror"
index = "self.numvars.idx_method"
yield "cdef double abserror"
yield f"{abserror} = 0."
yield f"if {userel}"
yield f" {relerror} = 0."
yield "else:"
yield f" {relerror} = inf"
for seq in subseqs:
results = f"self.sequences.fluxes._{seq.name}_results"
if seq.NDIM == 0:
yield f"abserror = fabs(" f"{results}[{index}]-{results}[{index}-1])"
yield f"{abserror} = max({abserror}, abserror)"
yield f"if {userel}"
yield f" if {results}[{index}] == 0.:"
yield f" {relerror} = inf"
yield " else:"
yield (
f" {relerror} = max("
f"{relerror}, fabs(abserror/{results}[{index}]))"
)
elif seq.NDIM == 1:
yield (
f"for idx0 in range(" f"self.sequences.fluxes._{seq.name}_length):"
)
yield (
f" abserror = fabs("
f"{results}[{index}, idx0]-{results}[{index}-1, idx0])"
)
yield f" {abserror} = max({abserror}, abserror)"
yield f" if {userel}"
yield f" if {results}[{index}, idx0] == 0.:"
yield f" {relerror} = inf"
yield " else:"
yield (
f" {relerror} = max("
f"{relerror}, fabs(abserror/{results}[{index}, idx0]))"
)
elif seq.NDIM == 2:
yield (
f"for idx0 in range(" f"self.sequences.fluxes._{seq.name}_length0):"
)
yield (
f" for idx1 in range("
f"self.sequences.fluxes._{seq.name}_length1):"
)
yield (
f" abserror = fabs({results}[{index}, "
f"idx0, idx1]-{results}[{index}-1, idx0, idx1])"
)
yield f" {abserror} = max({abserror}, abserror)"
yield f" if {userel}"
yield f" if {results}[{index}, idx0, idx1] == 0.:"
yield f" {relerror} = inf"
yield " else:"
yield (
f" {relerror} = max("
f"{relerror}, "
f"fabs(abserror/{results}[{index}, idx0, idx1]))"
)
else:
raise NotImplementedError(
f"NDIM of sequence `{seq.name}` is higher than expected."
)
@property
def extrapolate_error(self) -> List[str]:
"""Extrapolate error statements."""
lines = Lines()
extrapolate_error = getattr(self.model, "extrapolate_error", None)
if extrapolate_error:
print(" . extrapolate_error")
funcconverter = FuncConverter(
self.model, "extrapolate_error", extrapolate_error
)
lines.extend(funcconverter.pyxlines)
return lines
def write_stubfile(self) -> None:
"""Write a stub file for the actual base or application model.
At the moment, *HydPy* creates model objects quite dynamically. In many
regards, this comes with lots of conveniences. However, there two critical
drawbacks compared to more static approaches: some amount of additional
initialisation time and, more important, much opaqueness for code inspection
tools. In this context, we experiment with "stub files" at the moment. These
could either contain typing information only or define statically predefined
model classes. The following example uses method |PyxWriter.write_stubfile| to
write a (far from perfect) prototype stub file for base model |hland|:
>>> from hydpy.models.hland import *
>>> cythonizer.pyxwriter.write_stubfile()
This is the path to the written file:
>>> import os
>>> import hydpy
>>> filepath = os.path.join(hydpy.__path__[0], "hland.py")
>>> os.path.exists(filepath)
True
However, it's just an experimental prototype, so we better remove it:
>>> os.remove(filepath)
>>> os.path.exists(filepath)
False
"""
hydpypath: str = hydpy.__path__[0]
filepath = os.path.join(hydpypath, f"{self.model.name}.py")
base = ".".join(self.model.__module__.split(".")[:3])
with open(filepath, "w", encoding=config.ENCODING) as stubfile:
stubfile.write(
f"# -*- coding: utf-8 -*-\n\n"
f"import hydpy\n"
f"from {base} import *\n"
f"from hydpy.core.parametertools import (\n"
f" FastAccess,)\n"
f"from hydpy.core.parametertools import (\n"
f" Parameters, FastAccessParameter)\n"
f"from hydpy.core.sequencetools import (\n"
f" Sequences,)\n\n"
)
for subpars in self.model.parameters:
classname = f"FastAccess{subpars.name.capitalize()}Parameters"
stubfile.write(f"\n\nclass {classname}(FastAccessParameter):\n")
for partype in subpars.CLASSES:
stubfile.write(
f" {partype.__name__.lower()}: "
f"{partype.__module__}.{partype.__name__}\n"
)
for subpars in self.model.parameters:
classname = f"{subpars.name.capitalize()}Parameters"
stubfile.write(f"\n\nclass {classname}({classname}):\n")
stubfile.write(f" fastaccess: FastAccess{classname}\n")
for partype in subpars.CLASSES:
stubfile.write(
f" {partype.__name__.lower()}: "
f"{partype.__module__}.{partype.__name__}\n"
)
stubfile.write("\n\nclass Parameters(Parameters):\n")
for subpars in self.model.parameters:
classname = f"{subpars.name.capitalize()}Parameters"
stubfile.write(f" {subpars.name}: {classname}\n")
for subseqs in self.model.sequences:
classname = f"FastAccess{type(subseqs).__name__}"
stubfile.write(f"\n\nclass {classname}(FastAccess):\n")
for seqtype in subseqs.CLASSES:
stubfile.write(
f" {seqtype.__name__.lower()}: "
f"{seqtype.__module__}.{seqtype.__name__}\n"
)
for subseqs in self.model.sequences:
classname = type(subseqs).__name__
stubfile.write(f"\n\nclass {classname}({classname}):\n")
stubfile.write(f" fastaccess: FastAccess{classname}\n")
if classname == "StateSequences":
stubfile.write(f" fastaccess_old: FastAccess{classname}\n")
stubfile.write(f" fastaccess_new: FastAccess{classname}\n")
for seqtype in subseqs.CLASSES:
stubfile.write(
f" {seqtype.__name__.lower()}: "
f"{seqtype.__module__}.{seqtype.__name__}\n"
)
stubfile.write("\n\nclass Sequences(Sequences):\n")
for group in self.model.sequences:
classname = type(group).__name__
stubfile.write(f" {group.name}: {classname}\n")
stubfile.write(
"\n\nclass Model(Model):\n"
" parameters: Parameters\n"
" sequences: Sequences\n"
)
for methodgroup in self.model.METHOD_GROUPS:
for method in getattr(self.model, methodgroup):
stubfile.write(
f" {method.__name__.lower()}: hydpy.core.modeltools.Method\n"
)
stubfile.write("\n\nmodel: Model\n")
stubfile.write("parameters: Parameters\n")
stubfile.write("sequences: Sequences\n")
for subpars in self.model.parameters:
classname = f"{subpars.name.capitalize()}Parameters"
stubfile.write(f"{subpars.name}: {classname}\n")
for subseqs in self.model.sequences:
classname = type(subseqs).__name__
stubfile.write(f"{subseqs.name}: {classname}\n")
if self.model.parameters.control:
for partype in self.model.parameters.control.CLASSES:
stubfile.write(
f"{partype.__name__.lower()}: "
f"{partype.__module__}.{partype.__name__}\n"
)
class FuncConverter:
"""Helper class for class |PyxWriter| that analyses Python functions and provides
the required Cython code via property |FuncConverter.pyxlines|."""
model: modeltools.Model
funcname: str
func: Callable[..., Any]
def __init__(
self, model: modeltools.Model, funcname: str, func: Callable[..., Any]
) -> None:
self.model = model
self.funcname = funcname
self.func = func # type: ignore[assignment]
@property
def argnames(self) -> List[str]:
"""The argument names of the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).argnames
['model']
"""
return inspect.getargs(self.func.__code__)[0]
@property
def varnames(self) -> Tuple[str, ...]:
"""The variable names of the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).varnames
('self', 'con', 'inp', 'fac', 'k')
"""
return tuple(
vn if vn != "model" else "self" for vn in self.func.__code__.co_varnames
)
@property
def locnames(self) -> List[str]:
"""The variable names of the handled function except for the argument names.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).locnames
['self', 'con', 'inp', 'fac', 'k']
"""
return [vn for vn in self.varnames if vn not in self.argnames]
@property
def subgroupnames(self) -> List[str]:
"""The complete names of the subgroups relevant for the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).subgroupnames
['parameters.control', 'sequences.inputs', 'sequences.factors']
"""
names = []
for groupname in ("parameters", "sequences"):
for subgroup in getattr(self.model, groupname):
if subgroup.name[:3] in self.varnames:
names.append(groupname + "." + subgroup.name)
if "old" in self.varnames:
names.append("sequences.old_states")
if "new" in self.varnames:
names.append("sequences.new_states")
return names
@property
def subgroupshortcuts(self) -> List[str]:
"""The abbreviated names of the subgroups relevant for the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).subgroupshortcuts
['con', 'inp', 'fac']
"""
return [name.split(".")[-1][:3] for name in self.subgroupnames]
@property
def untypedvarnames(self) -> List[str]:
"""The names of the untyped variables used in the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).untypedvarnames
['k']
"""
return [
name
for name in self.varnames
if name not in self.subgroupshortcuts + ["self"]
]
@property
def untypedarguments(self) -> List[str]:
"""The names of the untyped arguments used by the current function.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).untypedarguments
[]
"""
defline = self.cleanlines[0]
return [
name
for name in self.untypedvarnames
if ((f", {name}," in defline) or (f", {name})" in defline))
]
@property
def untypedinternalvarnames(self) -> List[str]:
"""The names of the untyped variables used in the current function except for
those of the arguments.
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
>>> FuncConverter(model, None, model.calc_tc_v1).untypedinternalvarnames
['k']
"""
return [
name for name in self.untypedvarnames if name not in self.untypedarguments
]
@property
def cleanlines(self) -> List[str]:
"""The leaned code lines of the current function.
The implemented cleanups:
* eventually, remove method version
* remove all docstrings
* remove all comments
* remove all empty lines
* remove line bracks within brackets
* remove the phrase `modelutils`
* remove all lines containing the phrase `fastaccess`
* replace all shortcuts with complete reference names
* replace "model." with "self."
"""
code = inspect.getsource(self.func)
code = "\n".join(code.split('"""')[::2])
code = code.replace("modelutils.", "")
code = code.replace("model.", "self.")
for (name, shortcut) in zip(self.subgroupnames, self.subgroupshortcuts):
code = code.replace(f"{shortcut}.", f"self.{name}.")
code = self.remove_linebreaks_within_equations(code)
lines = code.splitlines()
self.remove_imath_operators(lines)
del lines[0] # remove @staticmethod
lines = [line[4:] for line in lines] # unindent
argnames = self.argnames
argnames[0] = "self"
lines[0] = f"def {self.funcname}({', '.join(argnames)}):"
lines = [line.split("#")[0] for line in lines]
lines = [line for line in lines if "fastaccess" not in line]
lines = [line.rstrip() for line in lines if line.rstrip()]
return Lines(*lines)
@staticmethod
def remove_linebreaks_within_equations(code: str) -> str:
r"""Remove line breaks within equations.
The following example is not an exhaustive test but shows how the method works
in principle:
>>> code = "asdf = \\\n(a\n+b)"
>>> from hydpy.cythons.modelutils import FuncConverter
>>> FuncConverter.remove_linebreaks_within_equations(code)
'asdf = (a+b)'
"""
code = code.replace("\\\n", "")
chars = []
counter = 0
for char in code:
if char in ("(", "[", "{"):
counter += 1
elif char in (")", "]", "}"):
counter -= 1
if not (counter and (char == "\n")):
chars.append(char)
return "".join(chars)
@staticmethod
def remove_imath_operators(lines: List[str]) -> None:
"""Remove mathematical expressions that require Pythons global interpreter
locking mechanism.
The following example is not an exhaustive test but shows how the method works
in principle:
>>> lines = [" x += 1*1"]
>>> from hydpy.cythons.modelutils import FuncConverter
>>> FuncConverter.remove_imath_operators(lines)
>>> lines
[' x = x + (1*1)']
"""
for idx, line in enumerate(lines):
for operator_ in ("+=", "-=", "**=", "*=", "//=", "/=", "%="):
sublines = line.split(operator_)
if len(sublines) > 1:
indent = line.count(" ") - line.lstrip().count(" ")
sublines = [sl.strip() for sl in sublines]
line = (
f"{indent*' '}{sublines[0]} = "
f"{sublines[0]} {operator_[:-1]} ({sublines[1]})"
)
lines[idx] = line
@property
def pyxlines(self) -> List[str]:
"""Cython code lines of the current function.
Assumptions:
* The function shall be a method.
* The method shall be inlined.
* Annotations specify all argument and return types.
* Local variables are generally of type `int` but of type `double` when their
name starts with `d_`.
We import some classes and prepare a pure-Python instance of application model
|hland_v1|:
>>> from types import MethodType
>>> from hydpy.core.modeltools import Method, Model
>>> from hydpy.core.typingtools import Vector
>>> from hydpy.cythons.modelutils import FuncConverter
>>> from hydpy import prepare_model, pub
>>> with pub.options.usecython(False):
... model = prepare_model("hland_v1")
First, we show an example on a standard method without additional arguments and
returning nothing but requiring two local variables:
>>> class Calc_Test_V1(Method):
... @staticmethod
... def __call__(model: Model) -> None:
... con = model.parameters.control.fastaccess
... flu = model.sequences.fluxes.fastaccess
... inp = model.sequences.inputs.fastaccess
... for k in range(con.nmbzones):
... d_pc = con.kg[k]*inp.p[k]
... flu.pc[k] = d_pc
>>> model.calc_test_v1 = MethodType(Calc_Test_V1.__call__, model)
>>> FuncConverter(model, "calc_test_v1", model.calc_test_v1).pyxlines
cpdef inline void calc_test_v1(self) nogil:
cdef double d_pc
cdef int k
for k in range(self.parameters.control.nmbzones):
d_pc = self.parameters.control.kg[k]*self.sequences.inputs.p[k]
self.sequences.fluxes.pc[k] = d_pc
<BLANKLINE>
The second example shows that `float` and `Vector` annotations
translate into `double` and `double[:]` types, respectively:
>>> class Calc_Test_V2(Method):
... @staticmethod
... def __call__(
... model: Model, value: float, values: Vector) -> float:
... con = model.parameters.control.fastaccess
... return con.kg[0]*value*values[1]
>>> model.calc_test_v2 = MethodType(Calc_Test_V2.__call__, model)
>>> FuncConverter(model, "calc_test_v2", model.calc_test_v2).pyxlines
cpdef inline double calc_test_v2(self, double value, double[:] values) \
nogil:
return self.parameters.control.kg[0]*value*values[1]
<BLANKLINE>
"""
annotations_ = get_type_hints(self.func)
lines = [" " + line for line in self.cleanlines]
lines[0] = lines[0].lower()
lines[0] = lines[0].replace(
"def ", f"cpdef inline {TYPE2STR[annotations_['return']]} "
)
lines[0] = lines[0].replace("):", f") {_nogil}:")
for name in self.untypedarguments:
type_ = TYPE2STR[annotations_[name]]
lines[0] = lines[0].replace(f", {name},", f", {type_} {name},")
lines[0] = lines[0].replace(f", {name})", f", {type_} {name})")
for name in self.untypedinternalvarnames:
type_ = "double" if name.startswith("d_") else "int"
lines.insert(1, f" cdef {type_} {name}")
return Lines(*lines)
def exp(double: float) -> float:
"""Cython wrapper for the |numpy.exp| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import exp
>>> from unittest import mock
>>> with mock.patch("numpy.exp") as func:
... _ = exp(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.exp(double)
def log(double: float) -> float:
"""Cython wrapper for the |numpy.log| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import log
>>> from unittest import mock
>>> with mock.patch("numpy.log") as func:
... _ = log(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.log(double)
def fabs(double: float) -> float:
"""Cython wrapper for the |math.exp| function of module |math| applied on a single
|float| object.
>>> from hydpy.cythons.modelutils import fabs
>>> from unittest import mock
>>> with mock.patch("math.fabs") as func:
... _ = fabs(123.4)
>>> func.call_args
call(123.4)
"""
return math.fabs(double)
def sin(double: float) -> float:
"""Cython wrapper for the |numpy.sin| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import sin
>>> from unittest import mock
>>> with mock.patch("numpy.sin") as func:
... _ = sin(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.sin(double)
def cos(double: float) -> float:
"""Cython wrapper for the |numpy.cos| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import cos
>>> from unittest import mock
>>> with mock.patch("numpy.cos") as func:
... _ = cos(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.cos(double)
def tan(double: float) -> float:
"""Cython wrapper for the |numpy.tan| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import tan
>>> from unittest import mock
>>> with mock.patch("numpy.tan") as func:
... _ = tan(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.tan(double)
def asin(double: float) -> float:
"""Cython wrapper for the |numpy.arcsin| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import asin
>>> from unittest import mock
>>> with mock.patch("numpy.arcsin") as func:
... _ = asin(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.arcsin(double)
def acos(double: float) -> float:
"""Cython wrapper for the |numpy.arccos| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import acos
>>> from unittest import mock
>>> with mock.patch("numpy.arccos") as func:
... _ = acos(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.arccos(double)
def atan(double: float) -> float:
"""Cython wrapper for the |numpy.arctan| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import atan
>>> from unittest import mock
>>> with mock.patch("numpy.arctan") as func:
... _ = atan(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.arctan(double)
def isnan(double: float) -> float:
"""Cython wrapper for the |numpy.isnan| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import isnan
>>> from unittest import mock
>>> with mock.patch("numpy.isnan") as func:
... _ = isnan(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.isnan(double)
def isinf(double: float) -> float:
"""Cython wrapper for the |numpy.isinf| function of module |numpy| applied on a
single |float| object.
>>> from hydpy.cythons.modelutils import isnan
>>> from unittest import mock
>>> with mock.patch("numpy.isinf") as func:
... _ = isinf(123.4)
>>> func.call_args
call(123.4)
"""
return numpy.isinf(double) | PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/web-animations-js/src/animation.js |
(function(shared, scope, testing) {
shared.sequenceNumber = 0;
var AnimationEvent = function(target, currentTime, timelineTime) {
this.target = target;
this.currentTime = currentTime;
this.timelineTime = timelineTime;
this.type = 'finish';
this.bubbles = false;
this.cancelable = false;
this.currentTarget = target;
this.defaultPrevented = false;
this.eventPhase = Event.AT_TARGET;
this.timeStamp = Date.now();
};
scope.Animation = function(effect) {
this.id = '';
if (effect && effect._id) {
this.id = effect._id;
}
this._sequenceNumber = shared.sequenceNumber++;
this._currentTime = 0;
this._startTime = null;
this._paused = false;
this._playbackRate = 1;
this._inTimeline = true;
this._finishedFlag = true;
this.onfinish = null;
this._finishHandlers = [];
this._effect = effect;
this._inEffect = this._effect._update(0);
this._idle = true;
this._currentTimePending = false;
};
scope.Animation.prototype = {
_ensureAlive: function() {
// If an animation is playing backwards and is not fill backwards/both
// then it should go out of effect when it reaches the start of its
// active interval (currentTime == 0).
if (this.playbackRate < 0 && this.currentTime === 0) {
this._inEffect = this._effect._update(-1);
} else {
this._inEffect = this._effect._update(this.currentTime);
}
if (!this._inTimeline && (this._inEffect || !this._finishedFlag)) {
this._inTimeline = true;
scope.timeline._animations.push(this);
}
},
_tickCurrentTime: function(newTime, ignoreLimit) {
if (newTime != this._currentTime) {
this._currentTime = newTime;
if (this._isFinished && !ignoreLimit)
this._currentTime = this._playbackRate > 0 ? this._totalDuration : 0;
this._ensureAlive();
}
},
get currentTime() {
if (this._idle || this._currentTimePending)
return null;
return this._currentTime;
},
set currentTime(newTime) {
newTime = +newTime;
if (isNaN(newTime))
return;
scope.restart();
if (!this._paused && this._startTime != null) {
this._startTime = this._timeline.currentTime - newTime / this._playbackRate;
}
this._currentTimePending = false;
if (this._currentTime == newTime)
return;
if (this._idle) {
this._idle = false;
this._paused = true;
}
this._tickCurrentTime(newTime, true);
scope.applyDirtiedAnimation(this);
},
get startTime() {
return this._startTime;
},
set startTime(newTime) {
newTime = +newTime;
if (isNaN(newTime))
return;
if (this._paused || this._idle)
return;
this._startTime = newTime;
this._tickCurrentTime((this._timeline.currentTime - this._startTime) * this.playbackRate);
scope.applyDirtiedAnimation(this);
},
get playbackRate() {
return this._playbackRate;
},
set playbackRate(value) {
if (value == this._playbackRate) {
return;
}
var oldCurrentTime = this.currentTime;
this._playbackRate = value;
this._startTime = null;
if (this.playState != 'paused' && this.playState != 'idle') {
this._finishedFlag = false;
this._idle = false;
this._ensureAlive();
scope.applyDirtiedAnimation(this);
}
if (oldCurrentTime != null) {
this.currentTime = oldCurrentTime;
}
},
get _isFinished() {
return !this._idle && (this._playbackRate > 0 && this._currentTime >= this._totalDuration ||
this._playbackRate < 0 && this._currentTime <= 0);
},
get _totalDuration() { return this._effect._totalDuration; },
get playState() {
if (this._idle)
return 'idle';
if ((this._startTime == null && !this._paused && this.playbackRate != 0) || this._currentTimePending)
return 'pending';
if (this._paused)
return 'paused';
if (this._isFinished)
return 'finished';
return 'running';
},
_rewind: function() {
if (this._playbackRate >= 0) {
this._currentTime = 0;
} else if (this._totalDuration < Infinity) {
this._currentTime = this._totalDuration;
} else {
throw new DOMException(
'Unable to rewind negative playback rate animation with infinite duration',
'InvalidStateError');
}
},
play: function() {
this._paused = false;
if (this._isFinished || this._idle) {
this._rewind();
this._startTime = null;
}
this._finishedFlag = false;
this._idle = false;
this._ensureAlive();
scope.applyDirtiedAnimation(this);
},
pause: function() {
if (!this._isFinished && !this._paused && !this._idle) {
this._currentTimePending = true;
} else if (this._idle) {
this._rewind();
this._idle = false;
}
this._startTime = null;
this._paused = true;
},
finish: function() {
if (this._idle)
return;
this.currentTime = this._playbackRate > 0 ? this._totalDuration : 0;
this._startTime = this._totalDuration - this.currentTime;
this._currentTimePending = false;
scope.applyDirtiedAnimation(this);
},
cancel: function() {
if (!this._inEffect)
return;
this._inEffect = false;
this._idle = true;
this._paused = false;
this._isFinished = true;
this._finishedFlag = true;
this._currentTime = 0;
this._startTime = null;
this._effect._update(null);
// effects are invalid after cancellation as the animation state
// needs to un-apply.
scope.applyDirtiedAnimation(this);
},
reverse: function() {
this.playbackRate *= -1;
this.play();
},
addEventListener: function(type, handler) {
if (typeof handler == 'function' && type == 'finish')
this._finishHandlers.push(handler);
},
removeEventListener: function(type, handler) {
if (type != 'finish')
return;
var index = this._finishHandlers.indexOf(handler);
if (index >= 0)
this._finishHandlers.splice(index, 1);
},
_fireEvents: function(baseTime) {
if (this._isFinished) {
if (!this._finishedFlag) {
var event = new AnimationEvent(this, this._currentTime, baseTime);
var handlers = this._finishHandlers.concat(this.onfinish ? [this.onfinish] : []);
setTimeout(function() {
handlers.forEach(function(handler) {
handler.call(event.target, event);
});
}, 0);
this._finishedFlag = true;
}
} else {
this._finishedFlag = false;
}
},
_tick: function(timelineTime, isAnimationFrame) {
if (!this._idle && !this._paused) {
if (this._startTime == null) {
if (isAnimationFrame) {
this.startTime = timelineTime - this._currentTime / this.playbackRate;
}
} else if (!this._isFinished) {
this._tickCurrentTime((timelineTime - this._startTime) * this.playbackRate);
}
}
if (isAnimationFrame) {
this._currentTimePending = false;
this._fireEvents(timelineTime);
}
},
get _needsTick() {
return (this.playState in {'pending': 1, 'running': 1}) || !this._finishedFlag;
},
_targetAnimations: function() {
var target = this._effect._target;
if (!target._activeAnimations) {
target._activeAnimations = [];
}
return target._activeAnimations;
},
_markTarget: function() {
var animations = this._targetAnimations();
if (animations.indexOf(this) === -1) {
animations.push(this);
}
},
_unmarkTarget: function() {
var animations = this._targetAnimations();
var index = animations.indexOf(this);
if (index !== -1) {
animations.splice(index, 1);
}
},
};
if (WEB_ANIMATIONS_TESTING) {
testing.webAnimations1Animation = scope.Animation;
}
})(webAnimationsShared, webAnimations1, webAnimationsTesting); | PypiClean |
/Lantz-0.3.zip/Lantz-0.3/lantz/drivers/kentech/hri.py | from lantz import Feat, Action
from lantz.errors import InstrumentError
from lantz.messagebased import MessageBasedDriver
def between(s, before, after):
ndx1 = s.index(before)
ndx2 = s.index(after)
return s[ndx1+len(before):ndx2]
class HRI(MessageBasedDriver):
"""Kentech High Repetition Rate Image Intensifier.
"""
DEFAULTS = {'COMMON': {'write_termination': '\r',
'read_termination': '\n'}}
def query(self, command, *, send_args=(None, None), recv_args=(None, None)):
"""Send query to the instrument and return the answer.
Set remote mode if needed.
"""
if command and not self.recall('remote'):
self.log_info('Setting Remote.')
self.remote = True
return super().query(command, send_args=send_args, recv_args=recv_args)
def query_expect(self, command, read_termination=None, expected='ok'):
"""Send a query and check that the answer contains the string.
:type command: str
:type read_termination: str | None
:type expected: str | None
"""
if command and not self.recall('remote'):
self.log_info('Setting Remote.')
self.remote = True
self.resource.write(command)
ans = self.read(read_termination)
if expected and not expected in ans:
raise InstrumentError("'{}' not in '{}'".format(expected, ans))
return ans
@Action()
def clear(self):
"""Clear the buffer.
"""
self.write('\r\r')
@Feat(None, values={True, False})
def remote(self, value):
"""Remote or local.
"""
if value:
#self.query_expect('', None, None)
self.query_expect('\r', expected=None)
self.read()
else:
return self.query_expect('LOCAL', chr(0), None)
@Feat(read_once=True)
def revision(self):
"""Revision.
"""
ans = self.query_expect('.REV', expected=None)
print(ans)
if 'UNDEFINED' in ans:
ans = '1.0'
else:
ans = self.read()
ans = ans.split()[1]
return ans
@Feat(None, values={'ecl': 'ECLTRIG', 'ttl': 'TTLTRIG'})
def trigger_logic(self, value):
"""Trigger logic.
"""
self.query_expect(value)
@Feat(None, values={'high': 'HITRIG', '50ohm': '50TRIG}'})
def trigger_ttl_termination(self, value):
"""Trigger termination for TTL logic (for ECL is fixed to 50 ohm).
"""
if self.recall('trigger_type') == 'ecl':
raise InstrumentError('Level triggering only with ECL')
self.query_expect(value)
@Feat(None, values={'rising': '+VETRIG', 'falling': '-VETRIG}'})
def trigger_edge(self, value):
"""Trigger on rising or falling edge.
"""
self.query_expect(value)
@Feat(None, values={'level': 'LVLTRIG', 'log': 'LOGTRIG}'})
def trigger_ecl_mode(self, value):
"""Trigger mode for ECL logic.
"""
if self.recall('trigger_type') == 'ttl':
raise InstrumentError('Level triggering only with ECL')
self.query_expect(value)
@Feat(units='centivolt', limits=(-40, 40, 1))
def trigger_ecl_level(self):
"""Trigger level for ECL logic, mode level.
"""
if self.revision >= 2.0:
ans = self.query_expect('THRESHV ?')
ans = between(ans, 'THRESHV ?', 'ok')
return float(ans.strip())
else:
ans = self.query_expect('THRESHV @ .')[8:]
try:
pos = ans.index('.')
except ValueError:
raise InstrumentError('Unsupported operation.')
return float(ans[pos+2:pos+7])
@trigger_ecl_level.setter
def trigger_ecl_level(self, value):
if self.revision >= 2.0:
self.query_expect('{:d} !THRESH'.format(value))
else:
value = 40 * value + 2000.0
self.query_expect('{:d} THRESH ! TRIG+RF>HW'.format(value))
@Feat(units='volt', limits=(-50, 50))
def clamp_voltage(self):
"""Most negative value of the gate pulse.
"""
if self.revision >= 2.0:
ans = self.query_expect('CLAMP ?')
ans = between(ans, 'CLAMP ?', 'ok').strip()
return float(ans)
else:
ans = self.query_expect('CLAMP @ .')
try:
pos = ans.index('.')
except ValueError:
raise InstrumentError('Unsupported operation.')
return float(ans[pos+2:pos+7]) / 10.0
@clamp_voltage.setter
def clamp_voltage(self, value):
average = self.recall('average_voltage')
mn, mx = average - Q_(60, volt), average
if mn < value < mx:
raise ValueError('Invalid clamp voltage. Not in range {}-{}'.format(mn, mx))
self.query_expect('{:d} CLAMP ! CLAMP>HW'.format(value * 10))
@Feat(units='volt', limits=(-50, 50))
def average_voltage(self):
"""Cathode potential bias with respect of MCP.
"""
if self.revision >= 2.0:
ans = self.query_expect('AVE ?')
ans = between(ans, 'AVE ?', 'ok')
return float(ans.strip()) / 10.
else:
ans = self.query_expect('THRESHV @ .')[8:]
try:
pos = ans.index('.')
except ValueError:
raise InstrumentError('Unsupported operation.')
return float(ans[pos+2:pos+7]) / 10.
@average_voltage.setter
def average_voltage(self, value):
self.query_expect('{:d} AVE ! AVE>HW'.format(value * 10))
@Feat()
def status(self):
"""Get status.
"""
return self.query_expect(".STATUS", chr(0))
@Feat(None, units='volt', limits=(0, 1700))
def mcp(self, value):
"""MCP Voltage.
"""
if self.revision >= '2.0':
return self.query_expect('{} !MCP'.format(value))
else:
return self.query_expect('{} !MCPVOLTS'.format(value))
@Feat(None, values={'inhibit': 0, 'rf': 21, 'ldc': 22, 'hdc': 23, 'dc': 24,
'user1': 25, 'user2': 26, 'user3': 27, 'user4': 28})
def mode(self, mode):
"""Gain modulation mode.
HRI Machine Modes and Mode Indices
None Mode
0 INHIBIT
2-10 COMB modes 200 ps to 1 ns inclusive (High rate operation)
11-20 COMB modes 100 ps to 3 ns inclusive (Low rate (+GOI) operation)
21 RF
22 logic low duty cycle (LDC)
23 logic high duty cycle
24 DC
25-28 user modes 1 to 4
"""
#TODO: Modes [11-20] not available in rev < 2.0
return self.query_expect("{} !MODE".format(mode))
@Feat(None)
def rfgain(self, value):
"""RF Gain.
"""
return self.query("{} !RFGAIN".format(value))
@Feat()
def temperature(self):
"""Temperature.
"""
if self.revision == 2.0:
return self.query("@TEMP .")
return 0
@Feat(None, values={True, False})
def enabled(self, value):
"""MCP Enabled
"""
if self.revision < 2:
if value:
self.query_expect('+M')
else:
self.query_expect('-M')
else:
if value:
self.mode = self.__dict__.get('_last_mode', 21)
else:
self._last_mode = self.recall('mode')
self.mode = 0
if __name__ == '__main__':
import argparse
import lantz.log
parser = argparse.ArgumentParser(description='Test Kentech HRI')
parser.add_argument('-i', '--interactive', action='store_true',
default=False, help='Show interactive GUI')
parser.add_argument('-p', '--port', type=str, default='17',
help='Serial port to connect to')
args = parser.parse_args()
lantz.log.log_to_socket(lantz.log.DEBUG)
with HRI.from_serial_port(args.port, baudrate=9600) as inst:
if args.interactive:
from lantz.ui.app import start_test_app
start_test_app(inst)
else:
#inst.clear()
inst.remote = True
print(inst.revision)
inst.mode = "inhibit"
inst.mcp = 350
inst.rfgain = 99
#print(inst.status)
inst.mode = "rf"
#print(inst.status)
inst.remote = False | PypiClean |
/Flask_Static_Digest-0.4.0-py3-none-any.whl/flask_static_digest/digester.py | import functools
import glob
import gzip
import hashlib
import json
import os.path
import re
import shutil
DIGESTED_FILE_REGEX = r"-[a-f\d]{32}"
CHUNK_SIZE = 1024 * 1024
def compile(
input_path, output_path, digest_blacklist_filter, gzip_files, brotli_files
):
"""
Generate md5 tagged static files compressed with gzip and brotli.
:param input_path: The source path of your static files
:type input_path: str
:param output_path: The destination path of your static files
:type output_path: str
:param digest_blacklist_filter: Ignore compiling these file types
:type digest_blacklist_filter: list
:param gzip_files: Whether or not gzipped files will be generated
:type gzip_files: bool
:param brotli_files: Whether or not brotli files will be generated
:type brotli_files: bool
:return: None
"""
if not os.path.exists(input_path):
print(f"The input path '{input_path}' does not exist")
return None
if not os.path.exists(output_path):
print(f"The output path '{output_path}' does not exist")
return None
files = _filter_files(input_path, digest_blacklist_filter)
manifest = _generate_manifest(files, gzip_files, brotli_files, output_path)
_save_manifest(manifest, output_path)
print(f"Check your digested files at '{output_path}'")
return None
def clean(output_path, digest_blacklist_filter, gzip_files, brotli_files):
"""
Delete the generated md5 tagged and gzipped static files.
:param input_path: The source path of your static files
:type input_path: str
:param output_path: The destination path of your static files
:type output_path: str
:param digest_blacklist_filter: Ignore cleaning these file types
:type digest_blacklist_filter: list
:param gzip_files: Whether or not gzipped files will be cleaned
:type gzip_files: bool
:param brotli_files: Whether or not brotli files will be cleaned
:type brotli_files: bool
:return: None
"""
for item in glob.iglob(output_path + "**/**", recursive=True):
if os.path.isfile(item):
_, file_extension = os.path.splitext(item)
basename = os.path.basename(item)
if (
re.search(DIGESTED_FILE_REGEX, basename)
and file_extension not in digest_blacklist_filter
):
if os.path.exists(item):
os.remove(item)
if gzip_files and file_extension == ".gz":
if os.path.exists(item):
os.remove(item)
if brotli_files and file_extension == ".br":
if os.path.exists(item):
os.remove(item)
manifest_path = os.path.join(output_path, "cache_manifest.json")
if os.path.exists(manifest_path):
os.remove(manifest_path)
print(f"Check your cleaned files at '{output_path}'")
return None
def _filter_files(input_path, digest_blacklist_filter):
filtered_files = []
for item in glob.iglob(input_path + "**/**", recursive=True):
if os.path.isfile(item):
if not _is_compiled_file(item, digest_blacklist_filter):
filtered_files.append(item)
return filtered_files
def _is_compiled_file(file_path, digest_blacklist_filter):
file_name, file_extension = os.path.splitext(file_path)
basename = os.path.basename(file_path)
return (
re.search(DIGESTED_FILE_REGEX, basename)
or file_extension in digest_blacklist_filter
or file_extension == ".gz"
or file_extension == ".br"
or basename == "cache_manifest.json"
)
def _generate_manifest(files, gzip_files, brotli_files, output_path):
manifest = {}
for file in files:
rel_file_path = os.path.relpath(file, output_path).replace("\\", "/")
file_name, file_extension = os.path.splitext(rel_file_path)
digest = _generate_digest(file)
digested_file_path = f"{file_name}-{digest}{file_extension}"
manifest[rel_file_path] = digested_file_path
_write_to_disk(
file, digested_file_path, gzip_files, brotli_files, output_path
)
return manifest
def _generate_digest(file):
digest = None
with open(file, "rb") as f:
digest = hashlib.md5(f.read()).hexdigest()
return digest
def _save_manifest(manifest, output_path):
manifest_content = json.dumps(manifest)
manifest_path = os.path.join(output_path, "cache_manifest.json")
with open(manifest_path, "w") as f:
f.write(manifest_content)
return None
def _write_to_disk(
file, digested_file_path, gzip_files, brotli_files, input_path
):
full_digested_file_path = os.path.join(input_path, digested_file_path)
# Copy file while preserving permissions and meta data if supported.
shutil.copy2(file, full_digested_file_path)
if gzip_files:
with open(file, "rb") as f_in:
with gzip.open(f"{file}.gz", "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
shutil.copy2(f"{file}.gz", f"{full_digested_file_path}.gz")
if brotli_files:
import brotli
compressor = brotli.Compressor(quality=11)
with open(file, "rb") as f_in:
with open(f"{file}.br", "wb") as f_out:
read_chunk = functools.partial(f_in.read, CHUNK_SIZE)
for data in iter(read_chunk, b""):
f_out.write(compressor.process(data))
f_out.write(compressor.finish())
shutil.copy2(f"{file}.br", f"{full_digested_file_path}.br")
return None | PypiClean |
/GeneticEngine-0.7.3.tar.gz/GeneticEngine-0.7.3/geneticengine/algorithms/gp/simplegp.py | from __future__ import annotations
from typing import Any
from typing import Callable
from typing import TypeVar
from geneticengine.algorithms.callbacks.callback import Callback
from geneticengine.algorithms.callbacks.callback import DebugCallback
from geneticengine.algorithms.callbacks.callback import PrintBestCallback
from geneticengine.algorithms.callbacks.callback import ProgressCallback
from geneticengine.algorithms.callbacks.csv_callback import CSVCallback
from geneticengine.algorithms.callbacks.pge import PGECallback
from geneticengine.algorithms.gp.gp import GP
from geneticengine.algorithms.gp.operators.combinators import ExclusiveParallelStep
from geneticengine.algorithms.gp.operators.combinators import ParallelStep
from geneticengine.algorithms.gp.operators.combinators import SequenceStep
from geneticengine.algorithms.gp.operators.crossover import GenericCrossoverStep
from geneticengine.algorithms.gp.operators.elitism import ElitismStep
from geneticengine.algorithms.gp.operators.initializers import StandardInitializer
from geneticengine.algorithms.gp.operators.mutation import GenericMutationStep
from geneticengine.algorithms.gp.operators.novelty import NoveltyStep
from geneticengine.algorithms.gp.operators.selection import LexicaseSelection
from geneticengine.algorithms.gp.operators.selection import TournamentSelection
from geneticengine.algorithms.gp.operators.stop import (
AllFitnessTargetStoppingCriterium,
AnyOfStoppingCriterium,
SingleFitnessTargetStoppingCriterium,
GenerationStoppingCriterium,
)
from geneticengine.algorithms.gp.operators.stop import TimeStoppingCriterium
from geneticengine.algorithms.gp.structure import GeneticStep
from geneticengine.algorithms.gp.structure import PopulationInitializer
from geneticengine.algorithms.gp.structure import StoppingCriterium
from geneticengine.core.grammar import Grammar
from geneticengine.core.evaluators import SequentialEvaluator
from geneticengine.core.problems import MultiObjectiveProblem
from geneticengine.core.problems import Problem
from geneticengine.core.problems import SingleObjectiveProblem
from geneticengine.core.problems import wrap_depth_minimization
from geneticengine.core.random.sources import RandomSource
from geneticengine.core.representations.api import Representation
from geneticengine.core.representations.tree.operators import (
FullInitializer,
GrowInitializer,
InjectInitialPopulationWrapper,
RampedHalfAndHalfInitializer,
)
from geneticengine.core.representations.tree.treebased import DefaultTBCrossover
from geneticengine.core.representations.tree.treebased import DefaultTBMutation
from geneticengine.core.representations.tree.treebased import TreeBasedRepresentation
from geneticengine.core.representations.tree.treebased import TypeSpecificTBCrossover
from geneticengine.core.representations.tree.treebased import TypeSpecificTBMutation
from geneticengine.core.evaluators import Evaluator
P = TypeVar("P")
class SimpleGP(GP):
"""A simpler API to create GP instances.
Defaults as given in A Field Guide to GP, p.17, by Poli and Mcphee.
Args:
grammar (Grammar): The grammar used to guide the search.
representation (Representation): The individual representation used by the GP program. The default is
TreeBasedRepresentation.
problem (Problem): The problem we are solving. Either a SingleObjectiveProblem or a MultiObjectiveProblem.
evaluation_function (Callable[[Any], float]): The fitness function. Should take in any valid individual and
return a float. The default is that the higher the fitness, the more applicable is the solution to the
problem. Turn on the parameter minimize to switch it around.
minimize (bool): When switch on, the fitness function is reversed, so that a higher result from the fitness
function corresponds to a less fit solution (default = False).
target_fitness (Optional[float]): Sets a target fitness. When this fitness is reached, the algorithm stops
running (default = None).
favor_less_complex_trees (bool): If set to True, this gives a tiny penalty to deeper trees to favor simpler
trees (default = False).
randomSource (Callable[[int], RandomSource]): The random source function used by the program. Should take in an
integer, representing the seed, and return a RandomSource.
population_size (int): The population size (default = 200). Apart from the first generation, each generation
the population is made up of the elites, novelties, and transformed individuals from the previous
generation. Note that population_size > (n_elites + n_novelties + 1) must hold.
n_elites (int): Number of elites, i.e. the number of best individuals that are preserved every generation
(default = 5).
n_novelties (int): Number of novelties, i.e. the number of newly generated individuals added to the population
each generation. (default = 10).
number_of_generations (int): Number of generations (default = 100).
max_depth (int): The maximum depth a tree can have (default = 15).
max_init_depth (int): The maximum depth a tree can have in the initialisation population (default = 15).
selection_method (Tuple[str, int]): Allows the user to define the method to choose individuals for the next
population (default = ("tournament", 5)).
probability_mutation (float): probability that an individual is mutated (default = 0.01).
probability_crossover (float): probability that an individual is chosen for cross-over (default = 0.9).
either_mut_or_cro (float | None): Switch evolution style to do either a mutation or a crossover. The given float
defines the chance of a mutation. Otherwise a crossover is performed. (default = None),
hill_climbing (bool): Allows the user to change the standard mutation operations to the hill-climbing mutation
operation, in which an individual is mutated to 5 different new individuals, after which the best is chosen
to survive (default = False).
specific_type_mutation (type): Specify a type that is given preference when mutation occurs (default = None),
specific_type_crossover (type): Specify a type that is given preference when crossover occurs (default = None),
depth_aware_mut (bool): If chosen, mutations are depth-aware, giving preference to operate on nodes closer to
the root. (default = True).
depth_aware_co (bool): If chosen, crossovers are depth-aware, giving preference to operate on nodes closer to
the root. (default = True).
force_individual (Any): Allows the incorporation of an individual in the first population (default = None).
seed (int): The seed of the RandomSource (default = 123).
timer_stop_criteria (bool): If set to True, the algorithm is stopped after the time limit
(default = 60 seconds). Then the fittest individual is returned (default = False).
timer_limit (int): The time limit of the timer.
save_to_csv (str): Saves a CSV file with the details of all the individuals of all generations.
save_genotype_as_string (bool): Turn this off if you don't want to safe all the genotypes as strings. This
saves memory and a bit of time.
test_data (Callable[[Any], Any]): Give test data (format: (X_test, y_test)) to test the individuals on test data
during training and save that to the csv (default = None).
only_record_best_inds (bool): Specify whether one or all individuals are saved to the csv files (default = True)
callbacks (List[Callback]): The callbacks to define what is done with the returned prints from the algorithm
(default = []).
parallel_evaluation (bool): Performs evaluation of fitness in multiprocessing (default = False).
"""
def __init__(
self,
grammar: Grammar,
representation: type | None = None,
problem: Problem | None = None,
evaluation_function: Callable[
[Any],
float,
]
| None = None, # DEPRECATE in the next version
minimize: bool = False, # DEPRECATE in the next version
target_fitness: float | None = None, # DEPRECATE in the next version
favor_less_complex_trees: bool = False, # DEPRECATE in the next version
source_generator: Callable[[int], RandomSource] = RandomSource,
seed: int = 123,
population_size: int = 200,
initialization_method: str = "ramped",
n_elites: int = 5, # Shouldn't this be a percentage of population size?
n_novelties: int = 10,
number_of_generations: int = 100,
max_depth: int = 15,
# now based on depth, maybe on number of nodes?
# selection-method is a tuple because tournament selection needs to receive the tournament size
# but lexicase does not need a tuple
selection_method: tuple[str, int | bool] = ("tournament", 5),
# -----
# As given in A Field Guide to GP, p.17, by Poli and Mcphee
probability_mutation: float = 0.01,
probability_crossover: float = 0.9,
either_mut_or_cro: float | None = None,
hill_climbing: bool = False, # TODO
specific_type_mutation: type | None = None,
specific_type_crossover: type | None = None,
depth_aware_mut: bool = False,
depth_aware_co: bool = False,
# -----
force_individual: Any = None,
# -----
timer_stop_criteria: bool = False,
timer_limit: int = 60,
# ---
evolve_grammar: bool = False,
evolve_learning_rate: float = 0.01,
# -----
save_to_csv: str | None = None,
save_genotype_as_string: bool = True,
test_data: None
| (
Callable[
[Any],
float,
]
) = None,
only_record_best_inds: bool = True,
# -----
verbose=1,
parallel_evaluation=False,
callbacks: list[Callback] | None = None,
**kwargs,
):
representation_class = representation or TreeBasedRepresentation
representation_instance: Representation = representation_class(
grammar,
max_depth,
)
population_initializer: PopulationInitializer
if representation_class != TreeBasedRepresentation:
population_initializer = StandardInitializer()
else:
population_initializer = {
"grow": GrowInitializer,
"full": FullInitializer,
"ramped": RampedHalfAndHalfInitializer,
}[
initialization_method
]() # type: ignore
if force_individual:
population_initializer = InjectInitialPopulationWrapper(
[representation_instance.phenotype_to_genotype(force_individual)],
population_initializer,
)
processed_problem: Problem = self.wrap_depth(
self.process_problem(
problem,
evaluation_function,
minimize,
),
favor_less_complex_trees,
)
random_source = source_generator(seed)
step: GeneticStep
mutation_operator = representation_instance.get_mutation()
crossover_operator = representation_instance.get_crossover()
if isinstance(representation_instance, TreeBasedRepresentation):
if specific_type_mutation:
mutation_operator = TypeSpecificTBMutation(specific_type_mutation, depth_aware=depth_aware_mut)
else:
mutation_operator = DefaultTBMutation(depth_aware=depth_aware_mut)
if specific_type_crossover:
crossover_operator = TypeSpecificTBCrossover(specific_type_crossover, depth_aware=depth_aware_co)
else:
crossover_operator = DefaultTBCrossover(depth_aware=depth_aware_co)
if either_mut_or_cro is not None:
mutation_step = GenericMutationStep(
1,
operator=mutation_operator,
)
crossover_step = GenericCrossoverStep(1, operator=crossover_operator)
step = ExclusiveParallelStep(
[mutation_step, crossover_step],
[either_mut_or_cro, 1 - either_mut_or_cro],
)
else:
mutation_step = GenericMutationStep(
probability_mutation,
operator=mutation_operator,
)
crossover_step = GenericCrossoverStep(probability_crossover, operator=crossover_operator)
step = SequenceStep(mutation_step, crossover_step)
selection_step: GeneticStep
if selection_method[0] == "tournament":
selection_step = TournamentSelection(selection_method[1])
elif selection_method[0] == "lexicase":
ep: bool = bool(selection_method[1]) if len(selection_method) > 1 else False
selection_step = LexicaseSelection(epsilon=ep)
else:
raise ValueError(
f"selection_method ({selection_method}) requires either tournament or lexicase",
)
step = SequenceStep(selection_step, step)
step = ParallelStep(
[ElitismStep(), NoveltyStep(), step],
[n_elites, n_novelties, population_size - n_novelties - n_elites],
)
stopping_criterium: StoppingCriterium
if timer_stop_criteria:
stopping_criterium = TimeStoppingCriterium(timer_limit)
else:
stopping_criterium = GenerationStoppingCriterium(number_of_generations)
if target_fitness is not None:
tg: StoppingCriterium
if isinstance(processed_problem, SingleObjectiveProblem):
tg = SingleFitnessTargetStoppingCriterium(target_fitness)
else:
tg = AllFitnessTargetStoppingCriterium([target_fitness])
stopping_criterium = AnyOfStoppingCriterium(stopping_criterium, tg)
self.callbacks: list[Callback] = []
self.callbacks.extend(callbacks or [])
evaluator: Evaluator = SequentialEvaluator()
if parallel_evaluation:
from geneticengine.core.parallel_evaluation import ParallelEvaluator
evaluator = ParallelEvaluator()
if evolve_grammar:
self.callbacks.append(PGECallback(evolve_learning_rate))
if verbose > 2:
self.callbacks.append(DebugCallback())
if verbose > 1:
self.callbacks.append(PrintBestCallback())
if verbose == 1:
self.callbacks.append(ProgressCallback())
if save_to_csv:
extra_columns = {}
if test_data is not None:
tf: Callable[[Any], float] = test_data
extra_columns["test_data"] = lambda gen, pop, time, gp, ind: str(
tf(ind.get_phenotype()),
)
if save_genotype_as_string:
extra_columns["genotype_as_str"] = lambda gen, pop, time, gp, ind: str(
ind.genotype,
)
c = CSVCallback(
save_to_csv,
only_record_best_ind=only_record_best_inds,
extra_columns=extra_columns,
)
self.callbacks.append(c)
super().__init__(
representation_instance,
processed_problem,
random_source,
population_size,
population_initializer,
step,
stopping_criterium,
self.callbacks,
evaluator=lambda: evaluator,
)
def process_problem(
self,
problem: Problem | None,
evaluation_function: Callable[[P], float] | None = None,
minimize: bool = False,
) -> Problem:
"""This function is a placeholder until we deprecate all the old usage
of GP class."""
if problem:
return problem
elif isinstance(minimize, list) and evaluation_function:
return MultiObjectiveProblem(minimize, evaluation_function)
elif isinstance(minimize, bool) and evaluation_function:
return SingleObjectiveProblem(evaluation_function, minimize)
else:
raise NotImplementedError(
"This combination of parameters to define the problem is not valid",
)
def wrap_depth(self, problem: Problem, favor_less_complex_trees: bool = False):
if isinstance(problem, SingleObjectiveProblem):
if favor_less_complex_trees:
return wrap_depth_minimization(problem)
else:
return problem
else:
assert isinstance(problem, MultiObjectiveProblem)
return problem | PypiClean |
/CHIP_IO-0.7.1.tar.gz/CHIP_IO-0.7.1/README.rst | CHIP_IO
============================
A CHIP GPIO library
Debian File Installation:
There are now pre-compiled binary deb files for the CHIP that do not require any build tools on a CHIP/CHIP Pro.
Go to this page: https://github.com/xtacocorex/CHIP_IO/releases/latest
Or
Go to this page: https://xtacocorex.github.io/chip_io_releases/index.html
Download the .deb file for the version of Python you are running.
Then install with dpkg, like the following example:
sudo dpkg -i python-chip-io_0.5.9-1_armhf.deb
Manual Installation::
For Python2.7::
sudo apt-get update
sudo apt-get install git build-essential python-dev python-pip flex bison chip-dt-overlays -y
git clone git://github.com/xtacocorex/CHIP_IO.git
cd CHIP_IO
sudo python setup.py install
cd ..
For Python3::
sudo apt-get update
sudo apt-get install git build-essential python3-dev python3-pip flex bison chip-dt-overlays -y
git clone git://github.com/xtacocorex/CHIP_IO.git
cd CHIP_IO
sudo python3 setup.py install
cd ..
PyPi Installation::
For Python2.7::
sudo apt-get update
sudo apt-get install git build-essential python-dev python-pip flex bison chip-dt-overlays -y
sudo pip install CHIP-IO
For Python3::
sudo apt-get update
sudo apt-get install git build-essential python3-dev python3-pip flex bison chip-dt-overlays -y
sudo pip3 install CHIP-IO
**Usage**
Using the library is very similar to the excellent RPi.GPIO library used on the Raspberry Pi. Below are some examples.
All scripts that require GPIO, PWM (HW and/or SW), and Overlay Manager need to be run with super user permissions!
**Allowable Pin Names for the Library**
The following "table" is the allowable pin names that are able to be used by the library. The Name column is the normal name used on the CHIP Headers, the Alt Name column is the value used by the PocketCHIP header (if it's broken out), and the Key is the Header and Pin Number the the Pin is physically located. Either of these 3 means is able to specify a pin in CHIP_IO.
+------------------+--------------------------+----------------+-----------------+-----------------+
| CHIP (Main Name) | PocketCHIP/CHIP Pro Name | Key (Alt Name) | HW Support | Edge Detect |
+------------------+--------------------------+----------------+-----------------+-----------------+
| TWI1-SDA | KPD-I2C-SDA | U13_9 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| TWI1-SCK | KPD-I2C-SCL | U13_11 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D2 | UART2-TX | U13_17 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| PWM0 | PWM0 | U13_18 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| PWM1 | PWM1 | EINT13 | CHIP PRO | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D4 | UART2-CTS | U13_19 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D3 | UART2-RX | U13_20 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D6 | LCD-D6 | U13_21 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D5 | UART2-RTS | U13_22 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D10 | LCD-D10 | U13_23 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D7 | LCD-D7 | U13_24 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D12 | LCD-D12 | U13_25 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D11 | LCD-D11 | U13_26 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D14 | LCD-D14 | U13_27 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D13 | LCD-D13 | U13_28 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D18 | LCD-D18 | U13_29 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D15 | LCD-D15 | U13_30 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D20 | LCD-D20 | U13_31 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D19 | LCD-D19 | U13_32 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D22 | LCD-D22 | U13_33 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D21 | LCD-D21 | U13_34 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-CLK | LCD-CLK | U13_35 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-D23 | LCD-D23 | U13_36 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-VSYNC | LCD-VSYNC | U13_37 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-HSYNC | LCD-HSYNC | U13_38 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LCD-DE | LCD-DE | U13_40 | CHIP | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| UART1-TX | UART-TX | U14_3 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| UART1-RX | UART-RX | U14_5 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| LRADC | ADC | U14_11 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P0 | XIO-P0 | U14_13 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P1 | XIO-P1 | U14_14 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P2 | GPIO1 | U14_15 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P3 | GPIO2 | U14_16 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P4 | GPIO3 | U14_17 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P5 | GPIO4 | U14_18 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P6 | GPIO5 | U14_19 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| XIO-P7 | GPIO6 | U14_20 | CHIP | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| AP-EINT1 | KPD-INT | U14_23 | CHIP/CHIP PRO | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| AP-EINT3 | AP-INT3 | U14_24 | CHIP/CHIP PRO | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| TWI2-SDA | I2C-SDA | U14_25 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| TWI2-SCK | I2C-SCL | U14_26 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSIPCK | SPI-SEL | U14_27 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSICK | SPI-CLK | U14_28 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSIHSYNC | SPI-MOSI | U14_29 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSIVSYNC | SPI-MISO | U14_30 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID0 | D0 | U14_31 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID1 | D1 | U14_32 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID2 | D2 | U14_33 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID3 | D3 | U14_34 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID4 | D4 | U14_35 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID5 | D5 | U14_36 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID6 | D6 | U14_37 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| CSID7 | D7 | U14_38 | CHIP/CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| I2S-MCLK | EINT19 | 21 | CHIP PRO | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
| I2S-BCLK | I2S-BCLK | 22 | CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| I2S-LCLK | I2S-LCLK | 23 | CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| I2S-DO | EINT19 | 24 | CHIP PRO | NO |
+------------------+--------------------------+----------------+-----------------+-----------------+
| I2S-DI | EINT24 | 25 | CHIP PRO | YES |
+------------------+--------------------------+----------------+-----------------+-----------------+
**GPIO Setup**
Import the library, and setup as GPIO.OUT or GPIO.IN::
import CHIP_IO.GPIO as GPIO
GPIO.setup("CSID0", GPIO.OUT)
You can also refer to the pin number::
GPIO.setup("U14_31", GPIO.OUT)
You can also refer to the bin based upon its alternate name::
GPIO.setup("GPIO1", GPIO.IN)
**GPIO Miscellaneous**
Debug can be enabled/disabled by the following command::
# Enable Debug
GPIO.toggle_debug()
You can determine if the hardware is a CHIP/CHIP Pro using the following::
# Determine hardware
# 0 For CHIP
# 1 For CHIP Pro
GPIO.is_chip_pro()
**GPIO Output**
Setup the pin for output, and write GPIO.HIGH or GPIO.LOW. Or you can use 1 or 0.::
import CHIP_IO.GPIO as GPIO
GPIO.setup("CSID0", GPIO.OUT)
GPIO.output("CSID0", GPIO.HIGH)
**GPIO Input**
Inputs work similarly to outputs.::
import CHIP_IO.GPIO as GPIO
GPIO.setup("CSID0", GPIO.IN)
Other options when setting up pins::
# Specify pull up/pull down settings on a pin
GPIO.setup("CSID0", GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Specify initial value for an output
GPIO.setup("CSID0", GPIO.OUT, initial=1)
Pull Up/Down values are only for pins that are provided by the R8, the XIO are not capable of this. The allowable values are: PUD_OFF, PUD_UP, and PUD_DOWN.
Polling inputs::
if GPIO.input("CSID0"):
print("HIGH")
else:
print("LOW")
Read lots of data::
# Get 8 bits of data in one shot
mybyte = GPIO.read_byte("LCD-D3")
# Get 16 bits of data in one shot
myword = GPIO.read_word("XIO-P4")
This code was initially added by brettcvz and I cleaned it up and expanded it.
You can quickly change a pins direction::
GPIO.direction("XIO-P3", GPIO.OUT)
GPIO.direction("XIO-P3", GPIO.IN)
You can also re-setup a pin in order to change direction, note that this is a slower operation::
GPIO.setup("XIO-P3", GPIO.OUT)
GPIO.setup("XIO-P3", GPIO.IN)
The edge detection code below only works for the AP-EINT1, AP-EINT3, and XPO Pins on the CHIP.
Waiting for an edge (GPIO.RISING, GPIO.FALLING, or GPIO.BOTH::
GPIO.wait_for_edge(channel, GPIO.RISING)
Detecting events::
GPIO.setup("XIO-P0", GPIO.IN)
GPIO.add_event_detect("XIO-P0", GPIO.FALLING)
#your amazing code here
#detect wherever:
if GPIO.event_detected("XIO-P0"):
print "event detected!"
CHIP_IO can also handle adding callback functions on any pin that supports edge detection. Note that only one callback function can be specified per Pin, if you try to set more, an exception will be thrown.::
def mycallback(channel):
print("we hit the edge we want")
GPIO.setup("GPIO3", GPIO.IN)
# Add Event Detect and Callback Separately for Falling Edge
GPIO.add_event_detect("GPIO3", GPIO.FALLING)
GPIO.add_event_callback("GPIO3", mycallback)
# Add Event Detect and Callback Separately for Rising Edge
GPIO.add_event_detect("GPIO3", GPIO.RISING)
GPIO.add_event_callback("GPIO3", mycallback)
# Add Callback for Both Edges using the add_event_detect() method
GPIO.add_event_detect("GPIO3", GPIO.BOTH, mycallback)
# Remove callback with the following
GPIO.remove_event_detect("GPIO3")
# bouncetime is also able to be set for both GPIO.add_event_detect() and GPIO.add_event_callback()
GPIO.add_event_detect("GPIO3", GPIO.FALLING, bouncetime=300)
GPIO.add_event_callback("GPIO3", GPIO.RISING, mycallback, bouncetime=300)
**GPIO Cleanup**
To clean up the GPIO when done, do the following::
# Clean up every exported GPIO Pin
GPIO.cleanup()
# Clean up a single pin (keeping everything else intact)
GPIO.cleanup("XIO-P0")
**PWM**::
Hardware PWM requires a DTB Overlay loaded on the CHIP to allow the kernel to know there is a PWM device available to use.
::
import CHIP_IO.PWM as PWM
# Determine hardware
# 0 For CHIP
# 1 For CHIP Pro
PWM.is_chip_pro()
# Enable/Disable Debug
PWM.toggle_debug()
#PWM.start(channel, duty, freq=2000, polarity=0)
#duty values are valid 0 (off) to 100 (on)
PWM.start("PWM0", 50)
PWM.set_duty_cycle("PWM0", 25.5)
PWM.set_frequency("PWM0", 10)
# To stop PWM
PWM.stop("PWM0")
PWM.cleanup()
#For specific polarity: this example sets polarity to 1 on start:
PWM.start("PWM0", 50, 2000, 1)
**SOFTPWM**::
import CHIP_IO.SOFTPWM as SPWM
# Determine hardware
# 0 For CHIP
# 1 For CHIP Pro
SPWM.is_chip_pro()
# Enable/Disable Debug
SPWM.toggle_debug()
#SPWM.start(channel, duty, freq=2000, polarity=0)
#duty values are valid 0 (off) to 100 (on)
#you can choose any pin
SPWM.start("XIO-P7", 50)
SPWM.set_duty_cycle("XIO-P7", 25.5)
SPWM.set_frequency("XIO-P7", 10)
# To Stop SPWM
SPWM.stop("XIO-P7")
# Cleanup
SPWM.cleanup()
#For specific polarity: this example sets polarity to 1 on start:
SPWM.start("XIO-P7", 50, 2000, 1)
Use SOFTPWM at low speeds (hundreds of Hz) for the best results. Do not use for anything that needs high precision or reliability.
If using SOFTPWM and PWM at the same time, import CHIP_IO.SOFTPWM as SPWM or something different than PWM as to not confuse the library.
**SERVO**::
import CHIP_IO.SERVO as SERVO
# Determine hardware
# 0 For CHIP
# 1 For CHIP Pro
SERVO.is_chip_pro()
# Enable/Disable Debug
SERVO.toggle_debug()
#SPWM.start(channel, angle=0, range=180)
#angle values are between +/- range/2)
#you can choose any pin except the XIO's
SERVO.start("CSID4", 50)
SERVO.set_angle("CSID4", 25.5)
SERVO.set_range("CSID4", 90)
# To Stop Servo
SERVO.stop("CSID4")
# Cleanup
SERVO.cleanup()
The Software Servo control only works on the LCD and CSI pins. The XIO is too slow to control.
**LRADC**::
The LRADC was enabled in the 4.4.13-ntc-mlc. This is a 6 bit ADC that is 2 Volt tolerant.
Sample code below details how to talk to the LRADC.::
import CHIP_IO.LRADC as ADC
# Enable/Disable Debug
ADC.toggle_debug()
# Check to see if the LRADC Device exists
# Returns True/False
ADC.get_device_exists()
# Setup the LRADC
# Specify a sampling rate if needed
ADC.setup(rate)
# Get the Scale Factor
factor = ADC.get_scale_factor()
# Get the allowable Sampling Rates
sampleratestuple = ADC.get_allowable_sample_rates()
# Set the sampling rate
ADC.set_sample_rate(rate)
# Get the current sampling rate
currentrate = ADC.get_sample_rate()
# Get the Raw Channel 0 or 1 data
raw = ADC.get_chan0_raw()
raw = ADC.get_chan1_raw()
# Get the factored ADC Channel data
fulldata = ADC.get_chan0()
fulldata = ADC.get_chan1()
**SPI**::
SPI requires a DTB Overlay to access. CHIP_IO does not contain any SPI specific code as the Python spidev module works when it can see the SPI bus.
**Overlay Manager**::
The Overlay Manager enables you to quickly load simple Device Tree Overlays. The options for loading are:
PWM0, SPI2, CUST. The Overlay Manager is smart enough to determine if you are trying to load PWM on a CHIP Pro and will fail due to the base DTB for the CHIP Pro supporting PWM0/1 out of the box.
Only one of each type of overlay can be loaded at a time, but all three options can be loaded simultaneously. So you can have SPI2 without PWM0, but you cannot have SPI2 loaded twice.
::
import CHIP_IO.OverlayManager as OM
# The toggle_debug() function turns on/off debug printing
OM.toggle_debug()
# To load an overlay, feed in the name to load()
OM.load("PWM0")
# To verify the overlay was properly loaded, the get_ functions return booleans
OM.get_pwm_loaded()
OM.get_spi_loaded()
# To unload an overlay, feed in the name to unload()
OM.unload("PWM0")
To use a custom overlay, you must build and compile it properly per the DIP Docs: http://docs.getchip.com/dip.html#development-by-example
There is no verification that the Custom Overlay is setup properly, it's fire and forget
::
import CHIP_IO.OverlayManager as OM
# The full path to the dtbo file needs to be specified
OM.load("CUST","/home/chip/projects/myfunproject/overlays/mycustomoverlay.dtbo")
# You can check for loading like above, but it's really just there for sameness
OM.get_custom_loaded()
# To unload, just call unload()
OM.unload("CUST")
**OverlayManager requires a 4.4 kernel with the CONFIG_OF_CONFIGFS option enabled in the kernel config.**
**Utilties**::
CHIP_IO now supports the ability to enable and disable the 1.8V port on U13. This voltage rail isn't enabled during boot.
To use the utilities, here is sample code::
import CHIP_IO.Utilities as UT
# Enable/Disable Debug
UT.toggle_debug()
# Enable 1.8V Output
UT.enable_1v8_pin()
# Set 2.0V Output
UT.set_1v8_pin_voltage(2.0)
# Set 2.6V Output
UT.set_1v8_pin_voltage(2.6)
# Set 3.3V Output
UT.set_1v8_pin_voltage(3.3)
# Disable 1.8V Output
UT.disable_1v8_pin()
# Get currently-configured voltage (returns False if the pin is not enabled as output)
UT.get_1v8_pin_voltage()
# Unexport Everything
UT.unexport_all()
# Determine if you are running a CHIP/CHIP Pro
# This returns True if the computer is a CHIP Pro and False if it is a CHIP
UT.is_chip_pro()
**Running tests**
Install py.test to run the tests. You'll also need the python compiler package for py.test.::
# Python 2.7
sudo apt-get install python-pytest
# Python 3
sudo apt-get install python3-pytest
To run the tests, do the following.::
# If only one version of Python is installed
# Python 2
sudo make pytest2
# Python 3
sudo make pytest3
# If more than one version of Python, run through both
sudo make test
**Credits**
The CHIP IO Python library was originally forked from the Adafruit Beaglebone IO Python Library.
The BeagleBone IO Python library was originally forked from the excellent MIT Licensed [RPi.GPIO](https://code.google.com/p/raspberry-gpio-python) library written by Ben Croston.
**License**
CHIP IO port by Robert Wolterman, released under the MIT License.
Beaglebone IO Library Written by Justin Cooper, Adafruit Industries. BeagleBone IO Python library is released under the MIT License.
| PypiClean |
/Nitrous-0.9.3-py3-none-any.whl/turbogears/i18n/data/ar_SA.py |
languages={'el': u'\u0627\u0644\u064a\u0648\u0646\u0627\u0646\u064a\u0629', 'gu': u'\u0627\u0644\u063a\u0648\u062c\u0627\u0631\u0627\u062a\u064a\u0629', 'en': u'\u0627\u0644\u0627\u0646\u062c\u0644\u064a\u0632\u064a\u0629', 'zh': u'\u0627\u0644\u0635\u064a\u0646\u064a\u0629', 'sw': u'\u0627\u0644\u0633\u0648\u0627\u062d\u0644\u064a\u0629', 'ca': u'\u0627\u0644\u0643\u0627\u062a\u0627\u0644\u0648\u064a\u0646\u064a\u0629', 'it': u'\u0627\u0644\u0627\u064a\u0637\u0627\u0644\u064a\u0629', 'ar': u'\u0627\u0644\u0639\u0631\u0628\u064a\u0629', 'id': u'\u0627\u0644\u0627\u0646\u062f\u0648\u0646\u064a\u0633\u064a\u0629', 'es': u'\u0627\u0644\u0627\u0633\u0628\u0627\u0646\u064a\u0629', 'ru': u'\u0627\u0644\u0631\u0648\u0633\u064a\u0629', 'nl': u'\u0627\u0644\u0647\u0648\u0644\u0646\u062f\u064a\u0629', 'pt': u'\u0627\u0644\u0628\u0631\u062a\u063a\u0627\u0644\u064a\u0629', 'tr': u'\u0627\u0644\u062a\u0631\u0643\u064a\u0629', 'ne': u'\u0627\u0644\u0646\u064a\u0628\u0627\u0644\u064a\u0629', 'lt': u'\u0627\u0644\u0644\u062a\u0648\u0627\u0646\u064a\u0629', 'pa': u'\u0627\u0644\u0628\u0646\u062c\u0627\u0628\u064a\u0629', 'th': u'\u0627\u0644\u062a\u0627\u064a\u0644\u0627\u0646\u062f\u064a\u0629', 'vi': u'\u0627\u0644\u0641\u064a\u062a\u0646\u0627\u0645\u064a\u0629', 'ro': u'\u0627\u0644\u0631\u0648\u0645\u0627\u0646\u064a\u0629', 'be': u'\u0627\u0644\u0628\u064a\u0644\u0648\u0631\u0648\u0633\u064a\u0629', 'fr': u'\u0627\u0644\u0641\u0631\u0646\u0633\u064a\u0629', 'bg': u'\u0627\u0644\u0628\u0644\u063a\u0627\u0631\u064a\u0629', 'uk': u'\u0627\u0644\u0627\u0648\u0643\u0631\u0627\u0646\u064a\u0629', 'hr': u'\u0627\u0644\u0643\u0631\u0648\u0627\u062a\u064a\u0629', 'bn': u'\u0627\u0644\u0628\u0646\u063a\u0627\u0644\u064a\u0629', 'bo': u'\u0627\u0644\u062a\u0628\u062a\u064a\u0629', 'da': u'\u0627\u0644\u062f\u0627\u0646\u0645\u0627\u0631\u0643\u064a\u0629', 'fa': u'\u0627\u0644\u0641\u0627\u0631\u0633\u064a\u0629', 'hi': u'\u0627\u0644\u0647\u0646\u062f\u064a\u0629', 'dz': u'\u0627\u0644\u0632\u0648\u0646\u062e\u0627\u064a\u0629', 'dv': u'\u0627\u0644\u0645\u0627\u0644\u062f\u064a\u0641\u064a\u0629', 'fi': u'\u0627\u0644\u0641\u0646\u0644\u0646\u062f\u064a\u0629', 'ja': u'\u0627\u0644\u064a\u0627\u0628\u0627\u0646\u064a\u0629', 'he': u'\u0627\u0644\u0639\u0628\u0631\u064a\u0629', 'tl': u'\u0627\u0644\u062a\u0627\u063a\u0627\u0644\u0648\u063a\u064a\u0629', 'sr': u'\u0627\u0644\u0635\u0631\u0628\u064a\u0629', 'sq': u'\u0627\u0644\u0627\u0644\u0628\u0627\u0646\u064a\u0629', 'mn': u'\u0627\u0644\u0645\u0646\u063a\u0648\u0644\u064a\u0629', 'ko': u'\u0627\u0644\u0643\u0648\u0631\u064a\u0629', 'km': u'\u0627\u0644\u062e\u0645\u064a\u0631\u064a\u0629', 'ur': u'\u0627\u0644\u0627\u0631\u062f\u064a\u0629', 'de': u'\u0627\u0644\u0627\u0644\u0645\u0627\u0646\u064a\u0629', 'ms': u'\u0644\u063a\u0629 \u0627\u0644\u0645\u0644\u0627\u064a\u0648', 'ug': u'\u0627\u0644\u0627\u063a\u0648\u0631\u064a\u0629', 'my': u'\u0627\u0644\u0628\u0648\u0631\u0645\u064a\u0629'}
countries={'BD': u'\u0628\u0646\u063a\u0644\u0627\u062f\u064a\u0634', 'BE': u'\u0628\u0644\u062c\u064a\u0643\u0627', 'BF': u'\u0628\u0648\u0631\u0643\u064a\u0646\u0627 \u0641\u0627\u0633\u0648', 'BG': u'\u0628\u0644\u063a\u0627\u0631\u064a\u0627', 'BA': u'\u0627\u0644\u0628\u0648\u0633\u0646\u0629 \u0648\u0627\u0644\u0647\u0631\u0633\u0643', 'BB': u'\u0628\u0631\u0628\u0627\u062f\u0648\u0633', 'BN': u'\u0628\u0631\u0648\u0646\u0627\u064a', 'BO': u'\u0628\u0648\u0644\u064a\u0641\u064a\u0627', 'BH': u'\u0627\u0644\u0628\u062d\u0631\u064a\u0646', 'BI': u'\u0628\u0648\u0631\u0648\u0646\u062f\u064a', 'BJ': u'\u0628\u0646\u064a\u0646', 'BT': u'\u0628\u0648\u062a\u0627\u0646', 'JM': u'\u062c\u0627\u0645\u0627\u064a\u0643\u0627', 'BW': u'\u0628\u0648\u062a\u0633\u0648\u0627\u0646\u0627', 'WS': u'\u0633\u0627\u0645\u0648\u0627', 'BR': u'\u0627\u0644\u0628\u0631\u0627\u0632\u064a\u0644', 'BS': u'\u0627\u0644\u0628\u0647\u0627\u0645\u0627', 'BY': u'\u0631\u0648\u0633\u064a\u0627 \u0627\u0644\u0628\u064a\u0636\u0627\u0621', 'BZ': u'\u0628\u0644\u064a\u0632', 'RU': u'\u0631\u0648\u0633\u064a\u0627', 'RW': u'\u0631\u0648\u0627\u0646\u062f\u0627', 'TM': u'\u062a\u0631\u0643\u0645\u0627\u0646\u0633\u062a\u0627\u0646', 'TJ': u'\u062a\u0627\u062c\u064a\u0643\u0633\u062a\u0627\u0646', 'RO': u'\u0631\u0648\u0645\u0627\u0646\u064a\u0627', 'GW': u'\u063a\u064a\u0646\u064a\u0627 \u0628\u064a\u0633\u0627\u0648', 'GT': u'\u063a\u0648\u0627\u062a\u064a\u0645\u0627\u0644\u0627', 'GR': u'\u0627\u0644\u064a\u0648\u0646\u0627\u0646', 'GQ': u'\u063a\u064a\u0646\u064a\u0627 \u0627\u0644\u0627\u0633\u062a\u0648\u0627\u0626\u064a\u0629', 'JP': u'\u0627\u0644\u064a\u0627\u0628\u0627\u0646', 'GY': u'\u063a\u0648\u0627\u064a\u0627\u0646\u0627', 'GE': u'\u062c\u0648\u0631\u062c\u064a\u0627', 'GD': u'\u063a\u0631\u064a\u0646\u0627\u062f\u0627', 'GB': u'\u0627\u0644\u0645\u0645\u0644\u0643\u0629 \u0627\u0644\u0645\u062a\u062d\u062f\u0629', 'GA': u'\u063a\u0627\u0628\u0648\u0646', 'SV': u'\u0627\u0644\u0633\u0644\u0641\u0627\u062f\u0648\u0631', 'GN': u'\u063a\u064a\u0646\u064a\u0627', 'GM': u'\u063a\u0627\u0645\u0628\u064a\u0627', 'GH': u'\u063a\u0627\u0646\u0627', 'OM': u'\u0639\u0645\u0627\u0646', 'TN': u'\u062a\u0648\u0646\u0633', 'JO': u'\u0627\u0644\u0627\u0631\u062f\u0646', 'HR': u'\u0643\u0631\u0648\u0627\u062a\u064a\u0627', 'HT': u'\u0647\u0627\u064a\u062a\u064a', 'HU': u'\u0647\u0646\u063a\u0627\u0631\u064a\u0627', 'HN': u'\u0647\u0646\u062f\u0648\u0631\u0627\u0633', 'VE': u'\u0641\u0646\u0632\u0648\u064a\u0644\u0627', 'PW': u'\u0628\u0627\u0644\u0627\u0648', 'PT': u'\u0627\u0644\u0628\u0631\u062a\u063a\u0627\u0644', 'PY': u'\u0628\u0627\u0631\u0627\u063a\u0648\u0627\u064a', 'IQ': u'\u0627\u0644\u0639\u0631\u0627\u0642', 'PA': u'\u0628\u0646\u0645\u0627', 'PG': u'\u0628\u0627\u0628\u0648\u0627 \u063a\u064a\u0646\u064a\u0627 \u0627\u0644\u062c\u062f\u064a\u062f\u0629', 'PE': u'\u0628\u064a\u0631\u0648', 'PK': u'\u0627\u0644\u0628\u0627\u0643\u0633\u062a\u0627\u0646', 'PH': u'\u0627\u0644\u0641\u064a\u0644\u0628\u064a\u0646', 'PL': u'\u0628\u0648\u0644\u0646\u062f\u0627', 'ZM': u'\u0632\u0627\u0645\u0628\u064a\u0627', 'EH': u'\u0627\u0644\u0635\u062d\u0631\u0627\u0621 \u0627\u0644\u063a\u0631\u0628\u064a\u0629', 'EE': u'\u0627\u0633\u062a\u0648\u0646\u064a\u0627', 'EG': u'\u0645\u0635\u0631', 'ZA': u'\u062c\u0646\u0648\u0628 \u0627\u0641\u0631\u064a\u0642\u064a\u0627', 'EC': u'\u0627\u0643\u0648\u0627\u062f\u0648\u0631', 'VN': u'\u0641\u064a\u062a\u0646\u0627\u0645', 'SB': u'\u062c\u0632\u0631 \u0633\u0644\u064a\u0645\u0627\u0646', 'ET': u'\u0627\u062b\u064a\u0648\u0628\u064a\u0627', 'SO': u'\u0627\u0644\u0635\u0648\u0645\u0627\u0644', 'ZW': u'\u0632\u064a\u0645\u0628\u0627\u0628\u0648\u064a', 'ES': u'\u0627\u0633\u0628\u0627\u0646\u064a\u0627', 'ER': u'\u0627\u0631\u062a\u064a\u0631\u064a\u0627', 'MD': u'\u0645\u0648\u0644\u062f\u0648\u0641\u0627', 'MG': u'\u0645\u062f\u063a\u0634\u0642\u0631', 'MA': u'\u0627\u0644\u0645\u063a\u0631\u0628', 'MC': u'\u0645\u0648\u0646\u0627\u0643\u0648', 'UZ': u'\u0627\u0632\u0628\u0643\u0633\u062a\u0627\u0646', 'MM': u'\u0645\u064a\u0627\u0646\u0645\u0627\u0631', 'ML': u'\u0645\u0627\u0644\u064a', 'MN': u'\u0645\u0646\u063a\u0648\u0644\u064a\u0627', 'MH': u'\u062c\u0632\u0631 \u0627\u0644\u0645\u0627\u0631\u0634\u0627\u0644', 'MK': u'\u0645\u0642\u062f\u0648\u0646\u064a\u0627', 'MU': u'\u0645\u0648\u0631\u064a\u0634\u0648\u0633', 'MT': u'\u0645\u0627\u0644\u0637\u0629', 'MW': u'\u0645\u0644\u0627\u0648\u064a', 'MV': u'\u0645\u0627\u0644\u062f\u064a\u0641', 'MR': u'\u0645\u0648\u0631\u064a\u062a\u0627\u0646\u064a\u0627', 'UG': u'\u0627\u0648\u063a\u0646\u062f\u0627', 'MY': u'\u0645\u0627\u0644\u064a\u0632\u064a\u0627', 'MX': u'\u0627\u0644\u0645\u0643\u0633\u064a\u0643', 'IL': u'\u0627\u0633\u0631\u0627\u0626\u064a\u0644', 'FR': u'\u0641\u0631\u0646\u0633\u0627', 'FI': u'\u0641\u0646\u0644\u0646\u062f\u0627', 'FJ': u'\u0641\u064a\u062c\u064a', 'FM': u'\u0645\u064a\u0643\u0631\u0648\u0646\u064a\u0632\u064a\u0627', 'NI': u'\u0646\u064a\u0643\u0627\u0631\u0627\u063a\u0648\u0627', 'NL': u'\u0647\u0648\u0644\u0646\u062f\u0627', 'NO': u'\u0627\u0644\u0646\u0631\u0648\u064a\u062c', 'NA': u'\u0646\u0627\u0645\u064a\u0628\u064a\u0627', 'VU': u'\u0641\u0627\u0646\u0648\u0622\u062a\u0648', 'NE': u'\u0627\u0644\u0646\u064a\u062c\u0631', 'NG': u'\u0646\u064a\u062c\u064a\u0631\u064a\u0627', 'NZ': u'\u0632\u064a\u0644\u0646\u062f\u0627 \u0627\u0644\u062c\u062f\u064a\u062f\u0629', 'NP': u'\u0627\u0644\u0646\u064a\u0628\u0627\u0644', 'NR': u'\u0646\u0627\u0648\u0631\u0648', 'CH': u'\u0633\u0648\u064a\u0633\u0631\u0627', 'CO': u'\u0643\u0648\u0644\u0648\u0645\u0628\u064a\u0627', 'CN': u'\u0627\u0644\u0635\u064a\u0646', 'CM': u'\u0627\u0644\u0643\u0627\u0645\u064a\u0631\u0648\u0646', 'CL': u'\u062a\u0634\u064a\u0644\u064a', 'CA': u'\u0643\u0646\u062f\u0627', 'CG': u'\u0627\u0644\u0643\u0648\u0646\u063a\u0648', 'CF': u'\u062c\u0645\u0647\u0648\u0631\u064a\u0629 \u0627\u0641\u0631\u064a\u0642\u064a\u0627 \u0627\u0644\u0648\u0633\u0637\u0649', 'CZ': u'\u062c\u0645\u0647\u0648\u0631\u064a\u0629 \u0627\u0644\u062a\u0634\u064a\u0643', 'CY': u'\u0642\u0628\u0631\u0635', 'CR': u'\u0643\u0648\u0633\u062a\u0627\u0631\u064a\u0643\u0627', 'CV': u'\u0627\u0644\u0631\u0623\u0633 \u0627\u0644\u0627\u062e\u0636\u0631', 'CU': u'\u0643\u0648\u0628\u0627', 'SZ': u'\u0633\u0648\u0627\u0632\u064a\u0644\u0627\u0646\u062f', 'SY': u'\u0633\u0648\u0631\u064a\u0629', 'KG': u'\u0642\u064a\u0631\u063a\u064a\u0632\u0633\u062a\u0627\u0646', 'KE': u'\u0643\u064a\u0646\u064a\u0627', 'SR': u'\u0633\u0648\u0631\u064a\u0646\u0627\u0645', 'KI': u'\u0643\u064a\u0631\u064a\u0628\u0627\u062a\u064a', 'KH': u'\u0643\u0645\u0628\u0648\u062f\u064a\u0627', 'KN': u'\u0633\u0627\u0646\u062a \u0643\u064a\u062a\u0633 \u0648\u0646\u064a\u0641\u064a\u0633', 'KM': u'\u062c\u0632\u0631 \u0627\u0644\u0642\u0645\u0631', 'ST': u'\u0633\u0627\u0646 \u062a\u0648\u0645\u064a \u0648\u0628\u0631\u064a\u0646\u0633\u064a\u0628\u064a', 'SK': u'\u0633\u0644\u0648\u0641\u0627\u0643\u064a\u0627', 'KR': u'\u0643\u0648\u0631\u064a\u0627 \u0627\u0644\u062c\u0646\u0648\u0628\u064a\u0629', 'SI': u'\u0633\u0644\u0648\u0641\u064a\u0646\u064a\u0627', 'KP': u'\u0643\u0648\u0631\u064a\u0627 \u0627\u0644\u0634\u0645\u0627\u0644\u064a\u0629', 'KW': u'\u0627\u0644\u0643\u0648\u064a\u062a', 'SN': u'\u0627\u0644\u0633\u0646\u063a\u0627\u0644', 'SM': u'\u0633\u0627\u0646 \u0645\u0627\u0631\u064a\u0646\u0648', 'SL': u'\u0633\u064a\u0631\u0627\u0644\u064a\u0648\u0646', 'SC': u'\u0633\u064a\u0634\u0644', 'KZ': u'\u0643\u0627\u0632\u0627\u062e\u0633\u062a\u0627\u0646', 'SA': u'\u0627\u0644\u0639\u0631\u0628\u064a\u0629 \u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629', 'SG': u'\u0633\u0646\u063a\u0627\u0641\u0648\u0631\u0629', 'SE': u'\u0627\u0644\u0633\u0648\u064a\u062f', 'SD': u'\u0627\u0644\u0633\u0648\u062f\u0627\u0646', 'DO': u'\u0627\u0644\u062c\u0645\u0647\u0648\u0631\u064a\u0629 \u0627\u0644\u062f\u0648\u0645\u064a\u0646\u064a\u0643\u064a\u0629', 'DM': u'\u062f\u0648\u0645\u064a\u0646\u064a\u0643\u0627', 'DJ': u'\u062c\u064a\u0628\u0648\u062a\u064a', 'DK': u'\u0627\u0644\u062f\u0627\u0646\u0645\u0631\u0643', 'DE': u'\u0627\u0644\u0645\u0627\u0646\u064a\u0627', 'YE': u'\u0627\u0644\u064a\u0645\u0646', 'DZ': u'\u0627\u0644\u062c\u0632\u0627\u0626\u0631', 'US': u'\u0627\u0644\u0627\u0648\u0644\u0627\u064a\u0627\u062a \u0627\u0644\u0645\u062a\u062d\u062f\u0629 \u0627\u0644\u0627\u0645\u0631\u064a\u0643\u064a\u0629', 'UY': u'\u0627\u0631\u0648\u063a\u0648\u0627\u064a', 'LB': u'\u0644\u0628\u0646\u0627\u0646', 'LC': u'\u0633\u0627\u0646\u062a \u0644\u0648\u0633\u064a\u0627', 'LA': u'\u0644\u0627\u0648\u0633', 'TV': u'\u062a\u0648\u0641\u0627\u0644\u0648', 'TW': u'\u062a\u0627\u064a\u0648\u0627\u0646', 'TT': u'\u062a\u0631\u064a\u0646\u064a\u062f\u0627\u062f \u0648\u062a\u0648\u0628\u0627\u063a\u0648', 'TR': u'\u062a\u0631\u0643\u064a\u0627', 'LK': u'\u0633\u0631\u064a \u0644\u0627\u0646\u0643\u0627', 'LI': u'\u0644\u064a\u062e\u062a\u0646\u0634\u062a\u0627\u064a\u0646', 'LV': u'\u0644\u0627\u062a\u0641\u064a\u0627', 'TO': u'\u062a\u0648\u0646\u063a\u0627', 'LT': u'\u0644\u064a\u062a\u0648\u0627\u0646\u064a\u0627', 'LU': u'\u0644\u0648\u0643\u0633\u0648\u0645\u0628\u0631\u063a', 'LR': u'\u0644\u064a\u0628\u064a\u0631\u064a\u0627', 'LS': u'\u0644\u064a\u0633\u0648\u062a\u0648', 'TH': u'\u062a\u0627\u064a\u0644\u0646\u062f', 'TG': u'\u062a\u0648\u063a\u0648', 'TD': u'\u062a\u0634\u0627\u062f', 'LY': u'\u0644\u064a\u0628\u064a\u0627', 'VA': u'\u0627\u0644\u0641\u0627\u062a\u064a\u0643\u0627\u0646', 'VC': u'\u0633\u0627\u0646\u062a \u0641\u0646\u0633\u0646\u062a \u0648\u062c\u0632\u0631 \u063a\u0631\u064a\u0646\u0627\u062f\u064a\u0646', 'AE': u'\u0627\u0644\u0627\u0645\u0627\u0631\u0627\u062a \u0627\u0644\u0639\u0631\u0628\u064a\u0629 \u0627\u0644\u0645\u062a\u062d\u062f\u0629', 'AD': u'\u0627\u0646\u062f\u0648\u0631\u0627', 'AG': u'\u0627\u0646\u062a\u064a\u063a\u0648\u0627 \u0648\u0628\u0631\u0628\u0648\u062f\u0627', 'AF': u'\u0627\u0641\u063a\u0627\u0646\u0633\u062a\u0627\u0646', 'AI': u'\u0627\u0644\u0628\u0627\u0646\u064a\u0627', 'IS': u'\u0627\u064a\u0633\u0644\u0646\u062f\u0627', 'IR': u'\u0627\u064a\u0631\u0627\u0646', 'AM': u'\u0627\u0631\u0645\u064a\u0646\u064a\u0627', 'IT': u'\u0627\u064a\u0637\u0627\u0644\u064a\u0627', 'AO': u'\u0627\u0646\u063a\u0648\u0644\u0627', 'AR': u'\u0627\u0644\u0627\u0631\u062c\u0646\u062a\u064a\u0646', 'AU': u'\u0627\u0633\u062a\u0631\u0627\u0644\u064a\u0627', 'AT': u'\u0627\u0644\u0646\u0645\u0633\u0627', 'IN': u'\u0627\u0644\u0647\u0646\u062f', 'TZ': u'\u062a\u0627\u0646\u0632\u0627\u0646\u064a\u0627', 'AZ': u'\u0622\u0630\u0631\u0628\u064a\u062c\u0627\u0646', 'IE': u'\u0627\u064a\u0631\u0644\u0646\u062f\u0627', 'ID': u'\u0627\u0646\u062f\u0648\u0646\u064a\u0633\u064a\u0627', 'UA': u'\u0627\u0648\u0643\u0631\u0627\u0646\u064a\u0627', 'QA': u'\u0642\u0637\u0631', 'MZ': u'\u0645\u0648\u0632\u0645\u0628\u064a\u0642'}
months=[u'\u064a\u0646\u0627\u064a\u0631', u'\u0641\u0628\u0631\u0627\u064a\u0631', u'\u0645\u0627\u0631\u0633', u'\u0623\u0628\u0631\u064a\u0644', u'\u0645\u0627\u064a\u0648', u'\u064a\u0648\u0646\u064a\u0648', u'\u064a\u0648\u0644\u064a\u0648', u'\u0623\u063a\u0633\u0637\u0633', u'\u0633\u0628\u062a\u0645\u0628\u0631', u'\u0623\u0643\u062a\u0648\u0628\u0631', u'\u0646\u0648\u0641\u0645\u0628\u0631', u'\u062f\u064a\u0633\u0645\u0628\u0631']
abbrMonths=[u'\u064a\u0646\u0627\u064a\u0631', u'\u0641\u0628\u0631\u0627\u064a\u0631', u'\u0645\u0627\u0631\u0633', u'\u0623\u0628\u0631\u064a\u0644', u'\u0645\u0627\u064a\u0648', u'\u064a\u0648\u0646\u064a\u0648', u'\u064a\u0648\u0644\u064a\u0648', u'\u0623\u063a\u0633\u0637\u0633', u'\u0633\u0628\u062a\u0645\u0628\u0631', u'\u0623\u0643\u062a\u0648\u0628\u0631', u'\u0646\u0648\u0641\u0645\u0628\u0631', u'\u062f\u064a\u0633\u0645\u0628\u0631']
days=[u'\u0627\u0644\u0627\u062b\u0646\u064a\u0646', u'\u0627\u0644\u062b\u0644\u0627\u062b\u0627\u0621', u'\u0627\u0644\u0623\u0631\u0628\u0639\u0627\u0621', u'\u0627\u0644\u062e\u0645\u064a\u0633', u'\u0627\u0644\u062c\u0645\u0639\u0629', u'\u0627\u0644\u0633\u0628\u062a', u'\u0627\u0644\u0623\u062d\u062f']
abbrDays=[u'\u0627\u0644\u0627\u062b\u0646\u064a\u0646', u'\u0627\u0644\u062b\u0644\u0627\u062b\u0627\u0621', u'\u0627\u0644\u0623\u0631\u0628\u0639\u0627\u0621', u'\u0627\u0644\u062e\u0645\u064a\u0633', u'\u0627\u0644\u062c\u0645\u0639\u0629', u'\u0627\u0644\u0633\u0628\u062a', u'\u0627\u0644\u0623\u062d\u062f']
dateFormats={'medium': '%d/%m/%Y', 'full': '%%(dayname)s, %d %%(monthname)s, %Y', 'long': '%d %%(monthname)s, %Y', 'short': '%d/%m/%Y'}
numericSymbols={'group': u'\u066c', 'nativeZeroDigit': u'\u0660', 'exponential': 'E', 'perMille': u'\u2030', 'nan': u'\ufffd', 'decimal': u'\u066b', 'percentSign': u'\u066a', 'list': ';', 'patternDigit': '#', 'plusSign': '+', 'infinity': u'\u221e', 'minusSign': '-'} | PypiClean |
/NVDA-addonTemplate-0.5.2.zip/NVDA-addonTemplate-0.5.2/NVDAAddonTemplate/data/{{cookiecutter.project_slug}}/scons-local-2.5.0/SCons/Script/SConscript.py | from __future__ import division
__revision__ = "src/engine/SCons/Script/SConscript.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import SCons
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Environment
import SCons.Errors
import SCons.Node
import SCons.Node.Alias
import SCons.Node.FS
import SCons.Platform
import SCons.SConf
import SCons.Script.Main
import SCons.Tool
import SCons.Util
import collections
import os
import os.path
import re
import sys
import traceback
class SConscriptReturn(Exception):
pass
launch_dir = os.path.abspath(os.curdir)
GlobalDict = None
# global exports set by Export():
global_exports = {}
# chdir flag
sconscript_chdir = 1
def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals
def compute_exports(exports):
"""Compute a dictionary of exports given one of the parameters
to the Export() function or the exports argument to SConscript()."""
loc, glob = get_calling_namespaces()
retval = {}
try:
for export in exports:
if SCons.Util.is_Dict(export):
retval.update(export)
else:
try:
retval[export] = loc[export]
except KeyError:
retval[export] = glob[export]
except KeyError, x:
raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x)
return retval
class Frame(object):
"""A frame on the SConstruct/SConscript call stack"""
def __init__(self, fs, exports, sconscript):
self.globals = BuildDefaultGlobals()
self.retval = None
self.prev_dir = fs.getcwd()
self.exports = compute_exports(exports) # exports from the calling SConscript
# make sure the sconscript attr is a Node.
if isinstance(sconscript, SCons.Node.Node):
self.sconscript = sconscript
elif sconscript == '-':
self.sconscript = None
else:
self.sconscript = fs.File(str(sconscript))
# the SConstruct/SConscript call stack:
call_stack = []
# For documentation on the methods in this file, see the scons man-page
def Return(*vars, **kw):
retval = []
try:
fvars = SCons.Util.flatten(vars)
for var in fvars:
for v in var.split():
retval.append(call_stack[-1].globals[v])
except KeyError, x:
raise SCons.Errors.UserError("Return of non-existent variable '%s'"%x)
if len(retval) == 1:
call_stack[-1].retval = retval[0]
else:
call_stack[-1].retval = tuple(retval)
stop = kw.get('stop', True)
if stop:
raise SConscriptReturn
stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
def _SConscript(fs, *files, **kw):
top = fs.Top
sd = fs.SConstruct_dir.rdir()
exports = kw.get('exports', [])
# evaluate each SConscript file
results = []
for fn in files:
call_stack.append(Frame(fs, exports, fn))
old_sys_path = sys.path
try:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1
if fn == "-":
exec sys.stdin in call_stack[-1].globals
else:
if isinstance(fn, SCons.Node.Node):
f = fn
else:
f = fs.File(str(fn))
_file_ = None
# Change directory to the top of the source
# tree to make sure the os's cwd and the cwd of
# fs match so we can open the SConscript.
fs.chdir(top, change_os_dir=1)
if f.rexists():
actual = f.rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.srcnode().rexists():
actual = f.srcnode().rfile()
_file_ = open(actual.get_abspath(), "r")
elif f.has_src_builder():
# The SConscript file apparently exists in a source
# code management system. Build it, but then clear
# the builder so that it doesn't get built *again*
# during the actual build phase.
f.build()
f.built()
f.builder_set(None)
if f.exists():
_file_ = open(f.get_abspath(), "r")
if _file_:
# Chdir to the SConscript directory. Use a path
# name relative to the SConstruct file so that if
# we're using the -f option, we're essentially
# creating a parallel SConscript directory structure
# in our local directory tree.
#
# XXX This is broken for multiple-repository cases
# where the SConstruct and SConscript files might be
# in different Repositories. For now, cross that
# bridge when someone comes to it.
try:
src_dir = kw['src_dir']
except KeyError:
ldir = fs.Dir(f.dir.get_path(sd))
else:
ldir = fs.Dir(src_dir)
if not ldir.is_under(f.dir):
# They specified a source directory, but
# it's above the SConscript directory.
# Do the sensible thing and just use the
# SConcript directory.
ldir = fs.Dir(f.dir.get_path(sd))
try:
fs.chdir(ldir, change_os_dir=sconscript_chdir)
except OSError:
# There was no local directory, so we should be
# able to chdir to the Repository directory.
# Note that we do this directly, not through
# fs.chdir(), because we still need to
# interpret the stuff within the SConscript file
# relative to where we are logically.
fs.chdir(ldir, change_os_dir=0)
os.chdir(actual.dir.get_abspath())
# Append the SConscript directory to the beginning
# of sys.path so Python modules in the SConscript
# directory can be easily imported.
sys.path = [ f.dir.get_abspath() ] + sys.path
# This is the magic line that actually reads up
# and executes the stuff in the SConscript file.
# The locals for this frame contain the special
# bottom-of-the-stack marker so that any
# exceptions that occur when processing this
# SConscript can base the printed frames at this
# level and not show SCons internals as well.
call_stack[-1].globals.update({stack_bottom:1})
old_file = call_stack[-1].globals.get('__file__')
try:
del call_stack[-1].globals['__file__']
except KeyError:
pass
try:
try:
exec _file_ in call_stack[-1].globals
except SConscriptReturn:
pass
finally:
if old_file is not None:
call_stack[-1].globals.update({__file__:old_file})
else:
SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning,
"Ignoring missing SConscript '%s'" % f.get_internal_path())
finally:
SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1
sys.path = old_sys_path
frame = call_stack.pop()
try:
fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir)
except OSError:
# There was no local directory, so chdir to the
# Repository directory. Like above, we do this
# directly.
fs.chdir(frame.prev_dir, change_os_dir=0)
rdir = frame.prev_dir.rdir()
rdir._create() # Make sure there's a directory there.
try:
os.chdir(rdir.get_abspath())
except OSError, e:
# We still couldn't chdir there, so raise the error,
# but only if actions are being executed.
#
# If the -n option was used, the directory would *not*
# have been created and we should just carry on and
# let things muddle through. This isn't guaranteed
# to work if the SConscript files are reading things
# from disk (for example), but it should work well
# enough for most configurations.
if SCons.Action.execute_actions:
raise e
results.append(frame.retval)
# if we only have one script, don't return a tuple
if len(results) == 1:
return results[0]
else:
return tuple(results)
def SConscript_exception(file=sys.stderr):
"""Print an exception stack trace just for the SConscript file(s).
This will show users who have Python errors where the problem is,
without cluttering the output with all of the internal calls leading
up to where we exec the SConscript."""
exc_type, exc_value, exc_tb = sys.exc_info()
tb = exc_tb
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find our exec statement, so this was actually a bug
# in SCons itself. Show the whole stack.
tb = exc_tb
stack = traceback.extract_tb(tb)
try:
type = exc_type.__name__
except AttributeError:
type = str(exc_type)
if type[:11] == "exceptions.":
type = type[11:]
file.write('%s: %s:\n' % (type, exc_value))
for fname, line, func, text in stack:
file.write(' File "%s", line %d:\n' % (fname, line))
file.write(' %s\n' % text)
def annotate(node):
"""Annotate a node with the stack frame describing the
SConscript file and line number that created it."""
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0]
# The following line would cause each Node to be annotated using the
# above function. Unfortunately, this is a *huge* performance hit, so
# leave this disabled until we find a more efficient mechanism.
#SCons.Node.Annotate = annotate
class SConsEnvironment(SCons.Environment.Base):
"""An Environment subclass that contains all of the methods that
are particular to the wrapper SCons interface and which aren't
(or shouldn't be) part of the build engine itself.
Note that not all of the methods of this class have corresponding
global functions, there are some private methods.
"""
#
# Private methods of an SConsEnvironment.
#
def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor))
def _get_major_minor_revision(self, version_string):
"""Split a version string into major, minor and (optionally)
revision parts.
This is complicated by the fact that a version string can be
something like 3.2b1."""
version = version_string.split(' ')[0].split('.')
v_major = int(version[0])
v_minor = int(re.match('\d+', version[1]).group())
if len(version) >= 3:
v_revision = int(re.match('\d+', version[2]).group())
else:
v_revision = 0
return v_major, v_minor, v_revision
def _get_SConscript_filenames(self, ls, kw):
"""
Convert the parameters passed to SConscript() calls into a list
of files and export variables. If the parameters are invalid,
throws SCons.Errors.UserError. Returns a tuple (l, e) where l
is a list of SConscript filenames and e is a list of exports.
"""
exports = []
if len(ls) == 0:
try:
dirs = kw["dirs"]
except KeyError:
raise SCons.Errors.UserError("Invalid SConscript usage - no parameters")
if not SCons.Util.is_List(dirs):
dirs = [ dirs ]
dirs = list(map(str, dirs))
name = kw.get('name', 'SConscript')
files = [os.path.join(n, name) for n in dirs]
elif len(ls) == 1:
files = ls[0]
elif len(ls) == 2:
files = ls[0]
exports = self.Split(ls[1])
else:
raise SCons.Errors.UserError("Invalid SConscript() usage - too many arguments")
if not SCons.Util.is_List(files):
files = [ files ]
if kw.get('exports'):
exports.extend(self.Split(kw['exports']))
variant_dir = kw.get('variant_dir') or kw.get('build_dir')
if variant_dir:
if len(files) != 1:
raise SCons.Errors.UserError("Invalid SConscript() usage - can only specify one SConscript with a variant_dir")
duplicate = kw.get('duplicate', 1)
src_dir = kw.get('src_dir')
if not src_dir:
src_dir, fname = os.path.split(str(files[0]))
files = [os.path.join(str(variant_dir), fname)]
else:
if not isinstance(src_dir, SCons.Node.Node):
src_dir = self.fs.Dir(src_dir)
fn = files[0]
if not isinstance(fn, SCons.Node.Node):
fn = self.fs.File(fn)
if fn.is_under(src_dir):
# Get path relative to the source directory.
fname = fn.get_path(src_dir)
files = [os.path.join(str(variant_dir), fname)]
else:
files = [fn.get_abspath()]
kw['src_dir'] = variant_dir
self.fs.VariantDir(variant_dir, src_dir, duplicate)
return (files, exports)
#
# Public methods of an SConsEnvironment. These get
# entry points in the global namespace so they can be called
# as global functions.
#
def Configure(self, *args, **kw):
if not SCons.Script.sconscript_reading:
raise SCons.Errors.UserError("Calling Configure from Builders is not supported.")
kw['_depth'] = kw.get('_depth', 0) + 1
return SCons.Environment.Base.Configure(self, *args, **kw)
def Default(self, *targets):
SCons.Script._Set_Default_Targets(self, targets)
def EnsureSConsVersion(self, major, minor, revision=0):
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print "SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__)
sys.exit(2)
def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print "Python %d.%d or greater required, but you have Python %s" %(major,minor,v)
sys.exit(2)
def Exit(self, value=0):
sys.exit(value)
def Export(self, *vars, **kw):
for var in vars:
global_exports.update(compute_exports(self.Split(var)))
global_exports.update(kw)
def GetLaunchDir(self):
global launch_dir
return launch_dir
def GetOption(self, name):
name = self.subst(name)
return SCons.Script.Main.GetOption(name)
def Help(self, text, append=False):
text = self.subst(text, raw=1)
SCons.Script.HelpFunction(text, append=append)
def Import(self, *vars):
try:
frame = call_stack[-1]
globals = frame.globals
exports = frame.exports
for var in vars:
var = self.Split(var)
for v in var:
if v == '*':
globals.update(global_exports)
globals.update(exports)
else:
if v in exports:
globals[v] = exports[v]
else:
globals[v] = global_exports[v]
except KeyError,x:
raise SCons.Errors.UserError("Import of non-existent variable '%s'"%x)
def SConscript(self, *ls, **kw):
if 'build_dir' in kw:
msg = """The build_dir keyword has been deprecated; use the variant_dir keyword instead."""
SCons.Warnings.warn(SCons.Warnings.DeprecatedBuildDirWarning, msg)
def subst_element(x, subst=self.subst):
if SCons.Util.is_List(x):
x = list(map(subst, x))
else:
x = subst(x)
return x
ls = list(map(subst_element, ls))
subst_kw = {}
for key, val in kw.items():
if SCons.Util.is_String(val):
val = self.subst(val)
elif SCons.Util.is_List(val):
result = []
for v in val:
if SCons.Util.is_String(v):
v = self.subst(v)
result.append(v)
val = result
subst_kw[key] = val
files, exports = self._get_SConscript_filenames(ls, subst_kw)
subst_kw['exports'] = exports
return _SConscript(self.fs, *files, **subst_kw)
def SConscriptChdir(self, flag):
global sconscript_chdir
sconscript_chdir = flag
def SetOption(self, name, value):
name = self.subst(name)
SCons.Script.Main.SetOption(name, value)
#
#
#
SCons.Environment.Environment = SConsEnvironment
def Configure(*args, **kw):
if not SCons.Script.sconscript_reading:
raise SCons.Errors.UserError("Calling Configure from Builders is not supported.")
kw['_depth'] = 1
return SCons.SConf.SConf(*args, **kw)
# It's very important that the DefaultEnvironmentCall() class stay in this
# file, with the get_calling_namespaces() function, the compute_exports()
# function, the Frame class and the SConsEnvironment.Export() method.
# These things make up the calling stack leading up to the actual global
# Export() or SConscript() call that the user issued. We want to allow
# users to export local variables that they define, like so:
#
# def func():
# x = 1
# Export('x')
#
# To support this, the get_calling_namespaces() function assumes that
# the *first* stack frame that's not from this file is the local frame
# for the Export() or SConscript() call.
_DefaultEnvironmentProxy = None
def get_DefaultEnvironmentProxy():
global _DefaultEnvironmentProxy
if not _DefaultEnvironmentProxy:
default_env = SCons.Defaults.DefaultEnvironment()
_DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env)
return _DefaultEnvironmentProxy
class DefaultEnvironmentCall(object):
"""A class that implements "global function" calls of
Environment methods by fetching the specified method from the
DefaultEnvironment's class. Note that this uses an intermediate
proxy class instead of calling the DefaultEnvironment method
directly so that the proxy can override the subst() method and
thereby prevent expansion of construction variables (since from
the user's point of view this was called as a global function,
with no associated construction environment)."""
def __init__(self, method_name, subst=0):
self.method_name = method_name
if subst:
self.factory = SCons.Defaults.DefaultEnvironment
else:
self.factory = get_DefaultEnvironmentProxy
def __call__(self, *args, **kw):
env = self.factory()
method = getattr(env, self.method_name)
return method(*args, **kw)
def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/Highton-2.3.0.tar.gz/Highton-2.3.0/highton/models/note.py | from highton.models import HightonModel
from highton.highton_constants import HightonConstants
from highton import (
fields,
call_mixins,
)
from highton.models.attachment import Attachment
class Note(
HightonModel,
call_mixins.DetailCallMixin,
call_mixins.CreateCallMixin,
call_mixins.UpdateCallMixin,
call_mixins.DeleteCallMixin,
call_mixins.ListCommentCallMixin,
):
"""
:ivar id: fields:IntegerField(name=HightonConstants.ID)
:ivar body: fields.StringField(name=HightonConstants.BODY, required=True)
:ivar author_id: fields.IntegerField(name=HightonConstants.AUTHOR_ID)
:ivar subject_id: fields.IntegerField(name=HightonConstants.SUBJECT_ID, required=True)
:ivar subject_type: fields.StringField(name=HightonConstants.SUBJECT_TYPE, required=True)
:ivar subject_name: fields.StringField(name=HightonConstants.SUBJECT_NAME)
:ivar collection_id: fields.IntegerField(name=HightonConstants.COLLECTION_ID)
:ivar collection_type: fields.StringField(name=HightonConstants.COLLECTION_TYPE)
:ivar visible_to: fields.StringField(name=HightonConstants.VISIBLE_TO)
:ivar owner_id: fields.IntegerField(name=HightonConstants.OWNER_ID)
:ivar group_id: fields.IntegerField(name=HightonConstants.GROUP_ID)
:ivar updated_at: fields.DatetimeField(name=HightonConstants.UPDATED_AT)
:ivar created_at: fields.DatetimeField(name=HightonConstants.CREATED_AT)
:ivar attachments: fields.ListField(name=HightonConstants.ATTACHMENTS, init_class=Attachment)
"""
TAG_NAME = HightonConstants.NOTE
ENDPOINT = HightonConstants.NOTES
def __init__(self, **kwargs):
self.body = fields.StringField(name=HightonConstants.BODY, required=True)
self.author_id = fields.IntegerField(name=HightonConstants.AUTHOR_ID)
self.subject_id = fields.IntegerField(name=HightonConstants.SUBJECT_ID, required=True)
self.subject_type = fields.StringField(name=HightonConstants.SUBJECT_TYPE, required=True)
self.subject_name = fields.StringField(name=HightonConstants.SUBJECT_NAME)
self.collection_id = fields.IntegerField(name=HightonConstants.COLLECTION_ID)
self.collection_type = fields.StringField(name=HightonConstants.COLLECTION_TYPE)
self.visible_to = fields.StringField(name=HightonConstants.VISIBLE_TO)
self.owner_id = fields.IntegerField(name=HightonConstants.OWNER_ID)
self.group_id = fields.IntegerField(name=HightonConstants.GROUP_ID)
self.updated_at = fields.DatetimeField(name=HightonConstants.UPDATED_AT)
self.created_at = fields.DatetimeField(name=HightonConstants.CREATED_AT)
self.attachments = fields.ListField(name=HightonConstants.ATTACHMENTS, init_class=Attachment)
super().__init__(**kwargs) | PypiClean |
/DeepManufacturing-0.0.7.tar.gz/DeepManufacturing-0.0.7/ManufacturingNet/models/alexnet.py | import datetime
import os
import sys
import time
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
from sklearn.metrics import confusion_matrix
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.optim.lr_scheduler as scheduler
import torch.utils.data as data_utils
import torchvision
from torch.utils import data as data_utils
from torch.utils.data import DataLoader, Dataset
from torchvision import transforms
from torchvision.models import alexnet
def conv2D_output_size(img_size, kernel_size, stride, padding):
outshape = (np.floor((img_size[0] + 2 * padding[0] - (kernel_size[0] - 1) - 1) / stride[0] + 1).astype(int),
np.floor((img_size[1] + 2 * padding[1] - (kernel_size[1] - 1) - 1) / stride[1] + 1).astype(int))
return outshape
def spacing():
print('='*25)
class Network(nn.Module):
def __init__(self, img_size, num_class):
super(Network, self).__init__()
self.num_class = num_class
self.channel=img_size[-1]
self.img_size = img_size[:-1]
self.get_pretrained_model()
def get_pretrained_model(self):
print("Question [2/7]: Model Selection:\n")
gate = 0
while gate != 1:
pretrained_input = input(
"Do you want the pretrained model (y/n)? ").lower().replace(' ','')
if pretrained_input == "y":
self.pretrained = True
gate = 1
elif pretrained_input == "n":
self.pretrained = False
gate = 1
else:
print("Please enter valid input")
model = alexnet(pretrained=self.pretrained)
model.features[0] = nn.Conv2d(self.channel, 64, kernel_size=(
3, 3), stride=(1, 1), padding=(1, 1), bias=False)
model.classifier[-1] = nn.Linear(4096, self.num_class)
self.net = model.double()
spacing()
# The following class will be called by a user. The class calls other necessary classes to build a complete pipeline required for training
class AlexNet():
"""
Documentation Link:
"""
def __init__(self, train_data_address, val_data_address, shuffle=True):
# Lists used in the functions below
self.criterion_list = {1: nn.CrossEntropyLoss(), 2: torch.nn.L1Loss(
), 3: torch.nn.SmoothL1Loss(), 4: torch.nn.MSELoss()}
self.train_address = train_data_address
self.val_address = val_data_address
self.shuffle = shuffle
self.get_default_paramters() # getting default parameters argument
self.num_classes = self.get_num_classes() # getting the number of classes
print('1/8 - Image size')
self.get_image_size() # getting the image size (resized or original)
# building a network architecture
self.net = (Network(self.img_size, self.num_classes)).net
print('='*25)
print('3/7 - Batch size input')
# getting a batch size for training and validation
self._get_batchsize_input()
print('='*25)
print('4/7- Loss function')
self._get_loss_function() # getting a loss function
print('='*25)
print('5/7 - Optimizer')
self._get_optimizer() # getting an optimizer input
print('='*25)
print('6/7 - Scheduler')
self._get_scheduler() # getting a scheduler input
self._set_device() # setting the device to gpu or cpu
print('='*25)
print('7/7 - Number of epochs')
self._get_epoch() # getting an input for number oftraining epochs
self.main() # run function
def get_default_paramters(self):
# Method for getting a binary input for default paramters
gate = 0
while gate != 1:
self.default = input(
'Do you want default values for all the training parameters (y/n)? ').replace(' ','')
if self.default == 'y' or self.default == 'Y' or self.default == 'n' or self.default == 'N':
if self.default.lower() == 'y':
self.default_gate = True
else:
self.default_gate = False
gate = 1
else:
print('Enter a valid input')
print(' ')
print(' ')
def check_address(self, address):
isfile = os.path.isfile(address)
return isfile
def get_num_classes(self):
train_num_folder = 0
train_num_files = 0
for _, dirnames, filenames in os.walk(self.train_address):
train_num_folder += len(dirnames)
train_num_files += len(filenames)
if train_num_files == 0:
print('Train data: Zero images found.\n System exit initialized')
sys.exit()
val_num_folder = 0
val_num_files = 0
for _, dirnames, filenames in os.walk(self.val_address):
val_num_folder += len(dirnames)
val_num_files += len(filenames)
if val_num_files == 0:
print('Validation data: Zero images found.\n System exit initialized')
sys.exit()
if train_num_folder != val_num_folder:
print(
'Warning: Number of folders in the Validation set and Training set is not the same.')
print('Number of classes: ', train_num_folder)
print('Total number of training images: ', train_num_files)
print('Total number of validation images: ', val_num_files)
spacing()
return train_num_folder
def get_image_size(self):
gate = 0
while gate != 1:
self.img_size = []
print('All the images must have same size.')
size_input = (input('Please enter the dimensions to which images need to be resized (heigth, width, channels): \nFor example - 228, 228, 1 (For gray scale conversion)\n If all images have same size, enter the actual image size (heigth, width, channels) :\n ')).replace(' ','')
size_input = size_input.split(',')
if len(size_input) == 3:
for i in range(len(size_input)):
if size_input[i].isnumeric() and (1 <= int(size_input[i])):
self.img_size.append(int(size_input[i]))
self.img_size = tuple(self.img_size)
if len(self.img_size) == 3 and (self.img_size[-1] == 1 or self.img_size[-1] == 3):
gate = 1
else:
print(
'Please enter a valid input.\n Image size must be positive integers and number of channels can be 1 or 3')
else:
print('Please enter a valid input')
spacing()
def _get_batchsize_input(self):
# Method for getting batch size input
gate = 0
while gate != 1:
self.batchsize = (input('Please enter the batch size: ')).replace(' ','')
if self.batchsize.isnumeric() and int(self.batchsize) > 0:
self.batchsize = int(self.batchsize)
gate = 1
else:
print('Please enter a valid input')
def _get_loss_function(self):
# Method for getting a loss function for training
self.criterion_input = '1'
self.criterion = self.criterion_list[int(self.criterion_input)]
print('Loss function: CrossEntropy()')
def _get_optimizer(self):
# Method for getting a optimizer input
gate = 0
while gate != 1:
if self.default_gate == True:
print('Default optimizer selected : Adam')
self.optimizer_input = '1'
else:
self.optimizer_input = (input(
'Please enter the optimizer index for the problem \n Optimizer_list - [1: Adam, 2: SGD] \n For default optimizer, please directly press enter without any input: ')).replace(' ','')
if self.optimizer_input == '': # handling default case for optimizer
print('Default optimizer selected : Adam')
self.optimizer_input = '1'
if self.optimizer_input.isnumeric() and int(self.optimizer_input) > 0 and int(self.optimizer_input) < 3:
gate = 1
else:
print('Please enter a valid input')
print(' ')
print(' ')
gate = 0
while gate != 1:
if self.default_gate == True:
print('Default value for learning rate selected : 0.001')
self.user_lr = '0.001'
else:
self.user_lr = input(
'Please enter a required value float input for learning rate (learning rate > 0) \n For default learning rate, please directly press enter without any input: ').replace(' ','')
if self.user_lr == '': # handling default case for learning rate
print('Default value for learning rate selected : 0.001')
self.user_lr = '0.001'
if self.user_lr.replace('.', '').isdigit():
if float(self.user_lr) > 0:
self.lr = float(self.user_lr)
gate = 1
else:
print('Please enter a valid input')
print(' ')
self.optimizer_list = {1: optim.Adam(self.net.parameters(
), lr=self.lr), 2: optim.SGD(self.net.parameters(), lr=self.lr)}
self.optimizer = self.optimizer_list[int(self.optimizer_input)]
print(' ')
def _get_scheduler(self):
# Method for getting scheduler
gate = 0
while gate != 1:
if self.default_gate == True:
print('By default no scheduler selected')
self.scheduler_input = '1'
else:
self.scheduler_input = input(
'Please enter the scheduler index for the problem: Scheduler_list - [1: None, 2:StepLR, 3:MultiStepLR] \n For default option of no scheduler, please directly press enter without any input: ').replace(' ','')
if self.scheduler_input == '':
print('By default no scheduler selected')
self.scheduler_input = '1'
if self.scheduler_input.isnumeric() and int(self.scheduler_input) > 0 and int(self.scheduler_input) < 4:
gate = 1
else:
print('Please enter a valid input')
print(' ')
if self.scheduler_input == '1':
print(' ')
self.scheduler = None
elif self.scheduler_input == '2':
print(' ')
gate = 0
while gate != 1:
self.step = (
input('Please enter a step value int input (step > 0): ')).replace(' ','')
if self.step.isnumeric() and int(self.step) > 0:
self.step = int(self.step)
gate = 1
else:
print('Please enter a valid input')
print(' ')
print(' ')
gate = 0
while gate != 1:
self.gamma = (input(
'Please enter a Multiplying factor value float input (Multiplying factor > 0): ')).replace(' ','')
if self.gamma.replace('.', '').isdigit():
if float(self.gamma) > 0:
self.gamma = float(self.gamma)
gate = 1
else:
print('Please enter a valid input')
print(' ')
self.scheduler = scheduler.StepLR(
self.optimizer, step_size=self.step, gamma=self.gamma)
elif self.scheduler_input == '3':
print(' ')
gate = 0
while gate != 1:
self.milestones_input = (
input('Please enter values of milestone epochs int input (Example: 2, 6, 10): ')).replace(' ','')
self.milestones_input = self.milestones_input.split(',')
for i in range(len(self.milestones_input)):
if self.milestones_input[i].isnumeric() and int(self.milestones_input[i]) > 0:
gate = 1
else:
gate = 0
break
if gate == 0:
print('Please enter a valid input')
print(' ')
self.milestones = [int(x)
for x in self.milestones_input if int(x) > 0]
print(' ')
gate = 0
while gate != 1:
self.gamma = (input(
'Please enter a Multiplying factor value float input (Multiplying factor > 0): ')).replace(' ','')
if self.gamma.replace('.', '').isdigit():
if float(self.gamma) > 0:
self.gamma = float(self.gamma)
gate = 1
else:
print('Please enter a valid input')
print(' ')
self.scheduler = scheduler.MultiStepLR(
self.optimizer, milestones=self.milestones, gamma=self.gamma)
def _set_device(self):
# Method for setting device type if GPU is available
if torch.cuda.is_available():
self.device = torch.device('cuda')
else:
self.device = torch.device('cpu')
def _get_epoch(self):
# Method for getting number of epochs for training the model
gate = 0
while gate != 1:
self.numEpochs = (
input('Please enter the number of epochs to train the model: ')).replace(' ','')
if self.numEpochs.isnumeric() and int(self.numEpochs) > 0:
self.numEpochs = int(self.numEpochs)
gate = 1
else:
print('Please enter a valid input')
def main(self):
# Method integrating all the functions and training the model
self.net.to(self.device)
print('='*25)
print('Neural network architecture: ')
print(' ')
print(self.net) # printing model architecture
print('='*25)
self.get_model_summary() # printing summaray of the model
print(' ')
print('='*25)
image_transform = transforms.Compose([transforms.Grayscale(
num_output_channels=self.img_size[-1]), transforms.Resize((self.img_size[:-1]), interpolation=2), transforms.ToTensor()])
self.train_dataset = torchvision.datasets.ImageFolder(
root=self.train_address, transform=image_transform) # creating the training dataset
self.val_dataset = torchvision.datasets.ImageFolder(
root=self.val_address, transform=image_transform) # creating the validation dataset
# creating the training dataset dataloadet
self.train_loader = torch.utils.data.DataLoader(
self.train_dataset, batch_size=self.batchsize, shuffle=True)
# creating the validation dataset dataloader
self.dev_loader = torch.utils.data.DataLoader(
self.val_dataset, batch_size=self.batchsize)
self.train_model() # training the model
self.get_loss_graph() # saving the loss graph
if self.criterion_input == '1':
self.get_accuracy_graph() # saving the accuracy graph
self.get_confusion_matrix() # printing confusion matrix
self._save_model() # saving model paramters
print(' Call get_prediction() to make predictions on new data')
print(' ')
print('=== End of training ===')
def _save_model(self):
# Method for saving the model parameters if user wants to
gate = 0
while gate != 1:
save_model = input(
'Do you want to save the model weights? (y/n): ').replace(' ','')
if save_model.lower() == 'y' or save_model.lower() == 'yes':
path = 'model_parameters.pth'
torch.save(self.net.state_dict(), path)
gate = 1
elif save_model.lower() == 'n' or save_model.lower() == 'no':
gate = 1
else:
print('Please enter a valid input')
print('='*25)
def get_model_summary(self):
# Method for getting the summary of the model
print('Model Summary:')
print(' ')
print('Criterion: ', self.criterion)
print('Optimizer: ', self.optimizer)
print('Scheduler: ', self.scheduler)
print('Batch size: ', self.batchsize)
print('Initial learning rate: ', self.lr)
print('Number of training epochs: ', self.numEpochs)
print('Device: ', self.device)
def train_model(self):
# Method for training the model
self.net.train()
self.training_loss = []
self.training_acc = []
self.dev_loss = []
self.dev_accuracy = []
total_predictions = 0.0
correct_predictions = 0.0
print('Training the model...')
for epoch in range(self.numEpochs):
start_time = time.time()
self.net.train()
print('Epoch_Number: ', epoch)
running_loss = 0.0
for batch_idx, (data, target) in enumerate(self.train_loader):
self.optimizer.zero_grad()
data = data.double().to(self.device)
target = target.to(self.device)
outputs = self.net(data)
# calculating the batch accuracy only if the loss function is Cross entropy
if self.criterion_input == '1':
loss = self.criterion(outputs, target.long())
_, predicted = torch.max(outputs.data, 1)
total_predictions += target.size(0)
correct_predictions += (predicted == target).sum().item()
else:
loss = self.criterion(outputs, target)
running_loss += loss.item()
loss.backward()
self.optimizer.step()
running_loss /= len(self.train_loader)
self.training_loss.append(running_loss)
print('Training Loss: ', running_loss)
# printing the epoch accuracy only if the loss function is Cross entropy
if self.criterion_input == '1':
acc = (correct_predictions/total_predictions)*100.0
self.training_acc.append(acc)
print('Training Accuracy: ', acc, '%')
dev_loss, dev_acc = self.validate_model()
if self.scheduler_input != '1':
self.scheduler.step()
print('Current scheduler status: ', self.optimizer)
end_time = time.time()
print('Epoch Time: ', end_time - start_time, 's')
print('#'*50)
self.dev_loss.append(dev_loss)
# saving the epoch validation accuracy only if the loss function is Cross entropy
if self.criterion_input == '1':
self.dev_accuracy.append(dev_acc)
def validate_model(self):
with torch.no_grad():
self.net.eval()
running_loss = 0.0
total_predictions = 0.0
correct_predictions = 0.0
acc = 0
self.actual = []
self.predict = []
for batch_idx, (data, target) in enumerate(self.dev_loader):
data = data.double().to(self.device)
target = target.to(self.device)
outputs = self.net(data)
if self.criterion_input == '1':
loss = self.criterion(outputs, target.long())
_, predicted = torch.max(outputs.data, 1)
total_predictions += target.size(0)
correct_predictions += (predicted == target).sum().item()
self.predict.append(predicted.detach().cpu().numpy())
else:
loss = self.criterion(outputs, target)
self.predict.append(outputs.detach().cpu().numpy())
running_loss += loss.item()
self.actual.append(target.detach().cpu().numpy())
running_loss /= len(self.dev_loader)
print('Validation Loss: ', running_loss)
# calculating and printing the epoch accuracy only if the loss function is Cross entropy
if self.criterion_input == '1':
acc = (correct_predictions/total_predictions)*100.0
print('Validation Accuracy: ', acc, '%')
return running_loss, acc
def get_loss_graph(self):
# Method for showing and saving the loss graph in the root directory
plt.figure(figsize=(8, 8))
plt.plot(self.training_loss, label='Training Loss')
plt.plot(self.dev_loss, label='Validation Loss')
plt.legend()
plt.title('Model Loss')
plt.xlabel('Epochs')
plt.ylabel('loss')
plt.savefig('loss.png')
def get_accuracy_graph(self):
# Method for showing and saving the accuracy graph in the root directory
plt.figure(figsize=(8, 8))
plt.plot(self.training_acc, label='Training Accuracy')
plt.plot(self.dev_accuracy, label='Validation Accuracy')
plt.legend()
plt.title('Model accuracy')
plt.xlabel('Epochs')
plt.ylabel('acc')
plt.savefig('accuracy.png')
def get_confusion_matrix(self):
# Method for getting the confusion matrix for classification problem
print('Confusion Matix: ')
np_predict = np.zeros((0,))
np_actual = np.zeros((0,))
for i in range(len(self.predict)):
np_predict = np.concatenate((np_predict,
np.asarray(self.predict[i]).reshape(-1)),
axis=0)
np_actual = np.concatenate((np_actual,
self.actual[i].reshape(-1)),
axis=0)
result = confusion_matrix(np_predict, np_actual)
print(result)
def get_prediction(self, x_input):
"""
Pass in an input numpy array for making prediction.
For passing multiple inputs, make sure to keep number of examples to be the first dimension of the input.
For example, 10 data points need to be checked and each point has (3, 50, 50) resized or original input size, the shape of the array must be (10, 3, 50, 50).
For more information, please see documentation.
"""
# Method to use at the time of inference
if len(x_input.shape) == 3: # handling the case of single
x_input = (x_input).reshape(
1, x_input.shape[0], x_input.shape[1], x_input.shape[2])
x_input = torch.from_numpy(x_input).to(self.device)
net_output = self.net.forward(x_input)
if self.criterion_input == '1': # handling the case of classification problem
_, net_output = torch.max(net_output.data, 1)
return net_output
def get_mapping(self):
mapped_labels = self.train_dataset.class_to_idx
return mapped_labels | PypiClean |
/DeepClaw-1.0.2-py3-none-any.whl/deepclaw/driver/arms/UR10eController.py | import time
import sys
import os
ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT)
from driver.arms.ArmController import ArmController
from driver.arms.URConnector import URConnector
from utils.Math import *
class UR10eController(ArmController):
def __init__(self, configuration_file):
super(UR10eController, self).__init__()
self.cfg = configuration_file
self.__home_pose = self.cfg['HOME_POSE']
self.__home_joints = self.cfg['HOME_JOINTS']
self.__connector = URConnector(self.cfg['SOCKET_CONFIGURATION']['robot_ip'],
self.cfg['SOCKET_CONFIGURATION']['port_number'])
def go_home(self):
joint = [self.__home_joints[0], self.__home_joints[1], self.__home_joints[2],
self.__home_joints[3], self.__home_joints[4], self.__home_joints[5]]
self.move_j(joint)
def move_j(self, joints_angle, velocity=0.5, accelerate=0.6,
solution_space='Joint'):
joints_angle = [joint * 3.14159 / 180.0 for joint in joints_angle]
move_command = ""
if solution_space == 'Joint':
move_command = (f"movej([{joints_angle[0]},{joints_angle[1]},{joints_angle[2]},"
f"{joints_angle[3]},{joints_angle[4]},{joints_angle[5]}],"
f"a={accelerate},v={velocity})\n")
elif solution_space == 'Space':
move_command = (f"movel([{joints_angle[0]},{joints_angle[1]},{joints_angle[2]},"
f"{joints_angle[3]},{joints_angle[4]},{joints_angle[5]}],"
f"a={accelerate},v={velocity})\n")
self.__connector.start()
self.__connector.send(move_command)
self.__connector.close()
return self.verify_state('q_actual', joints_angle)
def move_p(self, position, velocity=0.5, accelerate=0.6,
solution_space='Joint'):
x, y, z = position[0], position[1], position[2]
rx, ry, rz = rpy2rotation(position[3], position[4], position[5])
move_command = ""
if solution_space == 'Joint':
move_command = f"movej(p[{x},{y},{z},{rx},{ry},{rz}],a={accelerate},v={velocity})\n"
elif solution_space == 'Space':
move_command = f"movel(p[{x},{y},{z},{rx},{ry},{rz}],a={accelerate},v={velocity})\n"
self.__connector.start()
self.__connector.send(move_command)
self.__connector.close()
def get_state(self):
self.__connector.start()
msg = self.__connector.ur_get_state('UR10e')
self.__connector.close()
return msg
def verify_state(self, variable_name, target_value, error=0.01, time_out=100):
flag, count, interval, delta = True, 0, 1, 0
while flag and count < time_out:
current_value = self.get_state()[variable_name]
for c_value, t_value in zip(current_value, target_value):
if abs(c_value-t_value)>=delta:
delta = abs(c_value-t_value)
if delta < error:
flag = False
else:
time.sleep(interval)
count = count+1
if flag:
print('Time out!')
return not flag | PypiClean |
/125softNLP-0.0.1-py3-none-any.whl/pysoftNLP/ner/kashgari/embeddings/word_embedding.py |
# author: BrikerMan
# contact: eliyar917@gmail.com
# blog: https://eliyar.biz
# file: w2v_embedding.py
# time: 2019-05-20 17:32
import logging
from typing import Union, Optional, Dict, Any, List, Tuple
import numpy as np
from gensim.models import KeyedVectors
from tensorflow import keras
from kashgari.embeddings.base_embedding import Embedding
from kashgari.processors.base_processor import BaseProcessor
L = keras.layers
class WordEmbedding(Embedding):
"""Pre-trained word2vec embedding"""
def info(self):
info = super(WordEmbedding, self).info()
info['config'] = {
'w2v_path': self.w2v_path,
'w2v_kwargs': self.w2v_kwargs,
'sequence_length': self.sequence_length
}
return info
def __init__(self,
w2v_path: str,
task: str = None,
w2v_kwargs: Dict[str, Any] = None,
sequence_length: Union[Tuple[int, ...], str, int] = 'auto',
processor: Optional[BaseProcessor] = None,
from_saved_model: bool = False):
"""
Args:
task:
w2v_path: word2vec file path
w2v_kwargs: params pass to the ``load_word2vec_format()`` function of ``gensim.models.KeyedVectors`` -
https://radimrehurek.com/gensim/models/keyedvectors.html#module-gensim.models.keyedvectors
sequence_length: ``'auto'``, ``'variable'`` or integer. When using ``'auto'``, use the 95% of corpus length
as sequence length. When using ``'variable'``, model input shape will set to None, which can handle
various length of input, it will use the length of max sequence in every batch for sequence length.
If using an integer, let's say ``50``, the input output sequence length will set to 50.
processor:
"""
if w2v_kwargs is None:
w2v_kwargs = {}
self.w2v_path = w2v_path
self.w2v_kwargs = w2v_kwargs
self.w2v_model_loaded = False
super(WordEmbedding, self).__init__(task=task,
sequence_length=sequence_length,
embedding_size=0,
processor=processor,
from_saved_model=from_saved_model)
if not from_saved_model:
self._build_token2idx_from_w2v()
if self.sequence_length != 'auto':
self._build_model()
def _build_token2idx_from_w2v(self):
w2v = KeyedVectors.load_word2vec_format(self.w2v_path, **self.w2v_kwargs)
token2idx = {
self.processor.token_pad: 0,
self.processor.token_unk: 1,
self.processor.token_bos: 2,
self.processor.token_eos: 3
}
for token in w2v.index2word:
token2idx[token] = len(token2idx)
vector_matrix = np.zeros((len(token2idx), w2v.vector_size))
vector_matrix[1] = np.random.rand(w2v.vector_size)
vector_matrix[4:] = w2v.vectors
self.embedding_size = w2v.vector_size
self.w2v_vector_matrix = vector_matrix
self.w2v_token2idx = token2idx
self.w2v_top_words = w2v.index2entity[:50]
self.w2v_model_loaded = True
self.processor.token2idx = self.w2v_token2idx
self.processor.idx2token = dict([(value, key) for key, value in self.w2v_token2idx.items()])
logging.debug('------------------------------------------------')
logging.debug('Loaded gensim word2vec model')
logging.debug('model : {}'.format(self.w2v_path))
logging.debug('word count : {}'.format(len(self.w2v_vector_matrix)))
logging.debug('Top 50 word : {}'.format(self.w2v_top_words))
logging.debug('------------------------------------------------')
def _build_model(self, **kwargs):
if self.token_count == 0:
logging.debug('need to build after build_word2idx')
else:
input_tensor = L.Input(shape=(self.sequence_length,),
name=f'input')
layer_embedding = L.Embedding(self.token_count,
self.embedding_size,
weights=[self.w2v_vector_matrix],
trainable=False,
name=f'layer_embedding')
embedded_tensor = layer_embedding(input_tensor)
self.embed_model = keras.Model(input_tensor, embedded_tensor)
def analyze_corpus(self,
x: Union[Tuple[List[List[str]], ...], List[List[str]]],
y: Union[List[List[Any]], List[Any]]):
"""
Prepare embedding layer and pre-processor for labeling task
Args:
x:
y:
Returns:
"""
if not self.w2v_model_loaded:
self._build_token2idx_from_w2v()
super(WordEmbedding, self).analyze_corpus(x, y)
if __name__ == "__main__":
print('hello world') | PypiClean |
/Flappy-0.3.7.tar.gz/Flappy-0.3.7/flappy/geom/rectangle.py | from flappy.geom import Point, Matrix
class Rectangle(object):
def __init__(self, x=0.0, y=0.0, width=0.0, height=0.0):
self.x = x
self.y = y
self.width = width
self.height = height
def clone(self):
return Rectangle(self.x, self.y, self.width, self.height)
def contains(self, x, y):
return (self.x <= x < self.right) and (self.y <= y < self.bottom)
def containsPoint(self, p):
return self.contains(p.x, p.y)
def containsRect(self, rect):
if self.contains(rect.x, rect.y):
if self.containsPoint(rect.bottomRight):
return True
return False
def __eq__(self, rect):
return self.x == rect.x and self.y == rect.y and \
self.width == rect.width and self.height == rect.height
def extendBounds(self, r):
dx = self.x - r.x;
if dx > 0.0:
self.x -= dx
self.width += dx
dy = self.y - r.y
if dy > 0:
self.y -= dy
self.height += dy
if r.right > self.right:
self.right = r.right;
if r.bottom > self.bottom:
self.bottom = r.bottom;
def inflate(self, dx , dy):
self.x -= dx
self.width += dx * 2.0
self.y -= dy
self.height += dy * 2.0
def inflatePoint(self, p):
self.inflate(p.x, p.y)
def intersection(self, rect):
x0 = rect.x if self.x < rect.x else self.x
x1 = rect.right if self.right > rect.right else self.right
if x1 <= x0:
return Rectangle()
y0 = rect.y if self.y < rect.y else self.y
y1 = rect.bottom if self.bottom > rect.bottom else self.bottom
if y1 <= y0:
return Rectangle()
return Rectangle(x0, y0, x1 - x0, y1 - y0)
def intersects(self, rect):
x0 = rect.x if self.x < rect.x else self.x
x1 = rect.right if self.right > rect.right else self.right
if x1 <= x0:
return False
y0 = rect.y if self.y < rect.y else self.y
y1 = rect.bottom if self.bottom > rect.bottom else self.bottom
return y1 > y0
def isEmpty(self):
return self.width == 0.0 and self.height == 0.0
def offset(self, dx, dy):
self.x += dx
self.y += dy
def offstePoint(self, p):
self.x += p.x
self.y += p.y
def setEmpty(self):
self.x = self.y = self.width = self.height = 0
def transform(self, m):
tx0 = m.a * self.x + m.c * self.y;
tx1 = tx0;
ty0 = m.b * self.x + m.d * self.y;
ty1 = tx0;
tx = m.a * (self.x + self.width) + m.c * self.y
ty = m.b * (self.x + self.width) + m.d * self.y
if tx < tx0: tx0 = tx
if ty < ty0: ty0 = ty
if tx > tx1: tx1 = tx
if ty > ty1: ty1 = ty
tx = m.a * (self.x + self.width) + m.c * (self.y + self.height)
ty = m.b * (self.x + self.width) + m.d * (self.y + self.height)
if tx < tx0: tx0 = tx
if ty < ty0: ty0 = ty
if tx > tx1: tx1 = tx
if ty > ty1: ty1 = ty
tx = m.a * self.x + m.c * (self.y + self.height)
ty = m.b * self.x + m.d * (self.y + self.height)
if tx < tx0: tx0 = tx
if ty < ty0: ty0 = ty
if tx > tx1: tx1 = tx
if ty > ty1: ty1 = ty
return Rectangle(tx0 + m.tx, ty0 + m.ty, tx1 - tx0, ty1 - ty0)
def union(self, rect):
x0 = rect.x if self.x > rect.x else self.x
x1 = rect.right if self.right < rect.right else self.right
y0 = rect.y if self.y > rect.y else self.y
y1 = rect.bottom if self.bottom < rect.bottom else self.bottom
return Rectangle(x0, y0, x1 - x0, y1 - y0)
def __getitem__(self, key):
its = (self.x, self.y, self.width, self.height)
if key >= len(its):
raise IndexError('')
return its[key]
def __len__(self):
return 4
def __str__(self):
return 'Rectangle (x: %.2f, y: %.2f, width: %.2f, height: %.2f)' % \
(self.x, self.y, self.width, self.height)
@property
def bottom(self):
return self.y + self.height
@bottom.setter
def bottom(self, b):
self.height = b - self.y
return b
@property
def bottomRight(self):
return Point(self.x + self.width, self.y + self.height)
@bottomRight.setter
def bottomRight(self, p):
self.width = p.x - self.x
self.height = p.y - self.y
return p.clone()
@property
def left(self):
return self.x
@left.setter
def left(self, l):
self.width -= l - self.x
self.x = l
return l
@property
def right(self):
return self.x + self.width
@right.setter
def right(self, r):
self.width = r - self.x
return r
@property
def size(self):
return Point(self.width, self.height)
@size.setter
def size(self, p):
self.width = p.x
self.height = p.y
return p.clone()
@property
def top(self):
return self.y
@top.setter
def top(self, t):
self.height -= t - self.y
self.y = t
return t
@property
def topLeft(self):
return Point(self.x ,self.y)
@topLeft.setter
def topLeft(self, p):
self.x = p.x
self.y = p.y
return p.clone() | PypiClean |
/LitleSdkPython3-9.3.1b0.tar.gz/LitleSdkPython3-9.3.1b0/litleSdkPythonTestv2/certification/TestCert3.py |
import os, sys
lib_path = os.path.abspath('../all')
sys.path.append(lib_path)
from SetupTest import *
import unittest
class certTest3(unittest.TestCase):
#
def test32(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '32'
authorization.amount = 10010
authorization.orderSource = 'ecommerce'
billtoaddress = litleXmlFields.contact()
billtoaddress.name = "John Smith"
billtoaddress.addressLine1 = "1 Main St."
billtoaddress.city = "Burlington"
billtoaddress.state = "MA"
billtoaddress.zip = "01803-3747"
billtoaddress.country = 'US'
authorization.billToAddress = billtoaddress
card = litleXmlFields.cardType()
card.number = "4457010000000009"
card.expDate = "0112"
card.type = 'VI'
card.cardValidationNum = "349"
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEqual( "000",response.response)
self.assertEqual("Approved",response.message)
self.assertEqual("11111 ", response.authCode)
self.assertEqual("01", response.fraudResult.avsResult)
self.assertEqual("M", response.fraudResult.cardValidationResult)
capture = litleXmlFields.capture()
capture.litleTxnId = response.litleTxnId
capture.amount = 5005
captureResponse = litleXml.sendRequest(capture)
self.assertEqual("000", captureResponse.response)
self.assertEqual("Approved", captureResponse.message)
reversal = litleXmlFields.authReversal()
reversal.litleTxnId = response.litleTxnId
reversalResponse = litleXml.sendRequest(reversal)
self.assertEqual("111", reversalResponse.response)
self.assertEqual("Authorization amount has already been depleted", reversalResponse.message)
def test33(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '33'
authorization.amount = 20020
authorization.orderSource = 'ecommerce'
billtoaddress = litleXmlFields.contact()
billtoaddress.name = "Mike J. Hammer"
billtoaddress.addressLine1 = "2 Main St."
billtoaddress.addressLine2 = "Apt. 222"
billtoaddress.city = "Riverside"
billtoaddress.state = "RI"
billtoaddress.zip = "02915"
billtoaddress.country = 'US'
authorization.billToAddress = billtoaddress
card = litleXmlFields.cardType()
card.number = "5112010000000003"
card.expDate = "0212"
card.type = 'MC'
card.cardValidationNum = "261"
authorization.card = card
fraud = litleXmlFields.fraudCheckType()
fraud.authenticationValue = "BwABBJQ1AgAAAAAgJDUCAAAAAAA="
authorization.cardholderAuthentication = fraud
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEqual("000", response.response)
self.assertEqual("Approved", response.message)
self.assertEqual("22222", response.authCode)
self.assertEqual("10", response.fraudResult.avsResult)
self.assertEqual("M", response.fraudResult.cardValidationResult)
reversal = litleXmlFields.authReversal()
reversal.litleTxnId = response.litleTxnId
reversalResponse = litleXml.sendRequest(reversal)
self.assertEqual("000", reversalResponse.response)
self.assertEqual("Approved", reversalResponse.message)
def test34(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '34'
authorization.amount = 30030
authorization.orderSource = 'ecommerce'
billtoaddress = litleXmlFields.contact()
billtoaddress.name = "Eileen Jones"
billtoaddress.addressLine1 = "3 Main St."
billtoaddress.city = "Bloomfield"
billtoaddress.state = "CT"
billtoaddress.zip = "06002"
billtoaddress.country = 'US'
authorization.billToAddress = billtoaddress
card = litleXmlFields.cardType()
card.number = "6011010000000003"
card.expDate = "0312"
card.type = 'DI'
card.cardValidationNum = "758"
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEqual( "000",response.response)
self.assertEqual("Approved",response.message)
self.assertEqual("33333", response.authCode)
self.assertEqual("10", response.fraudResult.avsResult)
self.assertEqual("M", response.fraudResult.cardValidationResult)
reversal = litleXmlFields.authReversal()
reversal.litleTxnId = response.litleTxnId
reversalResponse = litleXml.sendRequest(reversal)
self.assertEqual("000", reversalResponse.response)
self.assertEqual("Approved", reversalResponse.message)
def test35(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '35'
authorization.amount = 40040
authorization.orderSource = 'ecommerce'
billtoaddress = litleXmlFields.contact()
billtoaddress.name = "Bob Black"
billtoaddress.addressLine1 = "4 Main St."
billtoaddress.city = "Laurel"
billtoaddress.state = "MD"
billtoaddress.zip = "20708"
billtoaddress.country = 'US'
authorization.billToAddress = billtoaddress
card = litleXmlFields.cardType()
card.number = "375001000000005"
card.expDate = "0412"
card.type = 'AX'
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEqual( "000",response.response)
self.assertEqual("Approved",response.message)
self.assertEqual("44444", response.authCode)
self.assertEqual("12", response.fraudResult.avsResult)
capture = litleXmlFields.capture()
capture.litleTxnId = response.litleTxnId
capture.amount = 20020
captureResponse = litleXml.sendRequest(capture)
self.assertEqual("000", captureResponse.response)
self.assertEqual("Approved", captureResponse.message)
reversal = litleXmlFields.authReversal()
reversal.litleTxnId = response.litleTxnId
reversalResponse = litleXml.sendRequest(reversal)
self.assertEqual("000", reversalResponse.response)
self.assertEqual("Approved", reversalResponse.message)
def test36(self):
authorization = litleXmlFields.authorization()
authorization.orderId = '36'
authorization.amount = 20500
authorization.orderSource = 'ecommerce'
card = litleXmlFields.cardType()
card.number = "375000026600004"
card.expDate = "0512"
card.type = 'AX'
authorization.card = card
litleXml = litleOnlineRequest(config)
response = litleXml.sendRequest(authorization)
self.assertEqual( "000",response.response)
self.assertEqual("Approved",response.message)
reversal = litleXmlFields.authReversal()
reversal.litleTxnId = response.litleTxnId
reversal.amount = 10000
reversalResponse = litleXml.sendRequest(reversal)
self.assertEqual("336", reversalResponse.response)
self.assertEqual("Reversal Amount does not match Authorization amount", reversalResponse.message)
def suite():
suite = unittest.TestSuite()
suite = unittest.TestLoader().loadTestsFromTestCase(certTest3)
return suite
if __name__ =='__main__':
unittest.main() | PypiClean |
/CosmoTech-SupplyChain-5.1.0.tar.gz/CosmoTech-SupplyChain-5.1.0/Supplychain/Generic/cosmo_api_parameters.py | import os
import json
from types import SimpleNamespace
from typing import Union
from pathlib import Path
from pprint import pformat
from Supplychain.Generic.timer import Timer
# prefix components:
space = ' '
branch = '│ '
# pointers:
tee = '├── '
last = '└── '
def tree(dir_path: Path, prefix: str = ''):
"""A recursive generator, given a directory Path object
will yield a visual tree structure line by line
with each line prefixed by the same characters
"""
contents = list(dir_path.iterdir())
# contents each get pointers that are ├── with a final └── :
pointers = [tee] * (len(contents) - 1) + [last]
for pointer, path in zip(pointers, contents):
yield prefix + pointer + path.name
if path.is_dir(): # extend the prefix and recurse:
extension = branch if pointer == tee else space
# i.e. space because last, └── , above so no more |
yield from tree(path, prefix=prefix + extension)
class CosmoAPIParameters(Timer):
def __update(self):
with open(os.path.join(self.folder_path, "parameters.json")) as f:
self.parameters_file = json.load(f,
object_hook=lambda d: SimpleNamespace(**d))
self.parameters_folders = [dir_name for dir_name in os.listdir(self.folder_path)]
self.datasets_folders = [dir_name for dir_name in os.listdir(self.dataset_folder_path)]
def get_all_parameters(self):
for p in self.parameters_file:
yield p.parameterId, p.value
def get_all_datasets_parameters(self):
for p in self.parameters_file:
if p.varType == "%DATASETID%":
yield p.parameterId, self.get_dataset_path(p.parameterId)
def get_dataset_path(self,
dataset_name: str) -> str:
path = None
try:
path = self.get_named_parameter(dataset_name).value
except ValueError:
raise ValueError(f"Dataset {dataset_name} is not defined")
if dataset_name not in self.parameters_folders and dataset_name not in self.datasets_folders:
raise ValueError(f"Dataset ID {path} ({dataset_name}) was not downloaded")
if dataset_name in self.parameters_folders:
return os.path.join(self.folder_path, dataset_name)
return os.path.join(self.dataset_folder_path, dataset_name)
def get_named_parameter(self,
parameter_name: str,
default_value: Union[dict, None] = None) -> SimpleNamespace:
for param in self.parameters_file:
if parameter_name == param.parameterId:
return param
if default_value is not None:
return SimpleNamespace(**default_value)
raise ValueError(f"Parameter {parameter_name} is not defined.")
def update_parameter(self, parameter: dict):
new_param = SimpleNamespace(**parameter)
for param in self.parameters_file:
if new_param.parameterId == param.parameterId:
assert new_param.varType == param.varType
param.value = new_param.value
return
self.parameters_file.append(new_param)
def update_parameters(self, parameters: list):
for parameter in parameters:
self.update_parameter(parameter)
def __init__(self,
parameter_folder: str,
dataset_folder: str):
Timer.__init__(self, "[API Parameters]")
self.folder_path = parameter_folder
self.dataset_folder_path = dataset_folder
self.parameters_file = None
self.parameters_folders = None
self.__update()
def display_infos(self):
self.display_message("Folders content")
self.display_message(self.folder_path)
for line in tree(Path(self.folder_path)):
self.display_message(line)
self.display_message(self.dataset_folder_path)
for line in tree(Path(self.dataset_folder_path)):
self.display_message(line)
self.display_message("Parameters value")
for _l in pformat(dict(self.get_all_parameters())).split('\n'):
print(self.prefix, _l)
self.display_message("Datasets parameters")
for _l in pformat(dict(self.get_all_datasets_parameters())).split('\n'):
print(self.prefix, _l) | PypiClean |
/Mopidy-Slack-0.1.0.tar.gz/Mopidy-Slack-0.1.0/mopidy_slack/__init__.py | from __future__ import unicode_literals
import logging
import os
from .song_notification import SongNotification
from .command import help, request, next, keep, start
from .connector import SlackConnector
import tornado.web
from mopidy import config, ext
import json
import time
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class EventsHandler(tornado.web.RequestHandler):
def initialize(self, core, slack_connector, listeners):
self.core = core
self.slack_connector = slack_connector
self.listeners = listeners
def post(self):
data = json.loads(self.request.body.decode('utf-8'))
callType = data['type']
if callType == "url_verification":
self.verify_url(data)
if callType == "event_callback":
event = data["event"]
if event["type"] == "message":
self.apply_message(event)
def apply_message(self, event):
text = event["text"]
logger.debug("[SlackHandler] got message: {} from {} on chan {}".format(text, event["user"], event["channel"]))
for listener in self.listeners:
if text.startswith(listener.command()):
message_back = listener.action(text, event["user"], event["channel"])
self.slack_connector.send_message(message_back, event["channel"])
self.set_header("Content-type","application/json")
self.write({ 'status' : 'ok' })
def verify_url(self, data):
challenge = data['challenge']
self.set_header("Content-type","application/json")
self.write({ 'challenge' : challenge })
class Extension(ext.Extension):
dist_name = 'Mopidy-Slack'
ext_name = 'slack'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['bot_token'] = config.String()
return schema
def factory(self, config, core):
next_counter = next.NextCounter()
channel_holder = ChannelHolder()
self.song_notification = SongNotification.start(SlackConnector(config, False), next_counter, channel_holder)
self.listeners = []
self.listeners.append(start.StartListener(core, channel_holder))
self.listeners.append(request.RequestListener(core))
self.listeners.append(next.NextListener(core, next_counter))
self.listeners.append(keep.KeepListener(core, next_counter))
# list listeners usages
self.listeners.append(help.HelpListener(self.listeners))
return [
('/events', EventsHandler, {"core": core, "slack_connector": SlackConnector(config, True), "listeners": self.listeners})
]
def setup(self, registry):
registry.add('http:app', {
'name': self.ext_name,
'factory': self.factory
})
class ChannelHolder():
def __init__(self):
self.channel = ""
def set_channel(self, channel):
self.channel = channel
def get_channel(self):
return self.channel | PypiClean |
/BlueWhale3-3.31.3.tar.gz/BlueWhale3-3.31.3/Orange/widgets/data/owcreateinstance.py | from typing import Optional, Callable, List, Union, Dict
from collections import namedtuple
from functools import singledispatch
import numpy as np
from AnyQt.QtCore import Qt, QSortFilterProxyModel, QSize, QDateTime, \
QModelIndex, Signal, QPoint, QRect, QEvent
from AnyQt.QtGui import QStandardItemModel, QStandardItem, QIcon, QPainter, \
QColor
from AnyQt.QtWidgets import QLineEdit, QTableView, QSlider, \
QComboBox, QStyledItemDelegate, QWidget, QDateTimeEdit, QHBoxLayout, \
QDoubleSpinBox, QSizePolicy, QStyleOptionViewItem, QLabel, QMenu, QAction
from orangewidget.gui import Slider
from Orange.data import DiscreteVariable, ContinuousVariable, \
TimeVariable, Table, StringVariable, Variable, Domain
from Orange.widgets import gui
from Orange.widgets.utils.itemmodels import TableModel
from Orange.widgets.settings import Setting
from Orange.widgets.utils.widgetpreview import WidgetPreview
from Orange.widgets.widget import OWWidget, Input, Output, Msg
from Orange.i18n_config import *
def __(key):
return i18n.t("widget.data.data.owcreateinstance." + key)
VariableRole = next(gui.OrangeUserRole)
ValuesRole = next(gui.OrangeUserRole)
ValueRole = next(gui.OrangeUserRole)
class VariableEditor(QWidget):
valueChanged = Signal(float)
def __init__(self, parent: QWidget, callback: Callable):
super().__init__(parent)
layout = QHBoxLayout()
layout.setContentsMargins(6, 0, 6, 0)
layout.setAlignment(Qt.AlignLeft)
self.setLayout(layout)
self.valueChanged.connect(callback)
@property
def value(self) -> Union[int, float, str]:
return NotImplemented
@value.setter
def value(self, value: Union[float, str]):
raise NotImplementedError
def sizeHint(self):
return QSize(super().sizeHint().width(), 40)
class DiscreteVariableEditor(VariableEditor):
valueChanged = Signal(int)
def __init__(self, parent: QWidget, items: List[str], callback: Callable):
super().__init__(parent, callback)
self._combo = QComboBox(
parent,
maximumWidth=180,
sizePolicy=QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
)
self._combo.addItems(items)
self._combo.currentIndexChanged.connect(self.valueChanged)
self.layout().addWidget(self._combo)
@property
def value(self) -> int:
return self._combo.currentIndex()
@value.setter
def value(self, value: float):
assert value == int(value)
self._combo.setCurrentIndex(int(value))
class ContinuousVariableEditor(VariableEditor):
MAX_FLOAT = 2147483647
def __init__(self, parent: QWidget, variable: ContinuousVariable,
min_value: float, max_value: float, callback: Callable):
super().__init__(parent, callback)
if np.isnan(min_value) or np.isnan(max_value):
raise ValueError("Min/Max cannot be NaN.")
n_decimals = variable.number_of_decimals
abs_max = max(abs(min_value), max_value)
if abs_max * 10 ** n_decimals > self.MAX_FLOAT:
n_decimals = int(np.log10(self.MAX_FLOAT / abs_max))
self._value: float = min_value
self._n_decimals: int = n_decimals
self._min_value: float = self.__round_value(min_value)
self._max_value: float = self.__round_value(max_value)
sp_spin = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
sp_spin.setHorizontalStretch(1)
sp_slider = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
sp_slider.setHorizontalStretch(5)
sp_edit = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
sp_edit.setHorizontalStretch(1)
class DoubleSpinBox(QDoubleSpinBox):
def sizeHint(self) -> QSize:
size: QSize = super().sizeHint()
return QSize(size.width(), size.height() + 2)
self._spin = DoubleSpinBox(
parent,
value=self._min_value,
minimum=-np.inf,
maximum=np.inf,
singleStep=10 ** (-self._n_decimals),
decimals=self._n_decimals,
minimumWidth=70,
sizePolicy=sp_spin,
)
self._slider = Slider(
parent,
minimum=self.__map_to_slider(self._min_value),
maximum=self.__map_to_slider(self._max_value),
singleStep=1,
orientation=Qt.Horizontal,
sizePolicy=sp_slider,
)
self._label_min = QLabel(
parent,
text=variable.repr_val(min_value),
alignment=Qt.AlignRight,
minimumWidth=60,
sizePolicy=sp_edit,
)
self._label_max = QLabel(
parent,
text=variable.repr_val(max_value),
alignment=Qt.AlignLeft,
minimumWidth=60,
sizePolicy=sp_edit,
)
self._slider.valueChanged.connect(self._apply_slider_value)
self._spin.valueChanged.connect(self._apply_spin_value)
self.layout().addWidget(self._spin)
self.layout().addWidget(self._label_min)
self.layout().addWidget(self._slider)
self.layout().addWidget(self._label_max)
self.setFocusProxy(self._spin)
def deselect():
self._spin.lineEdit().deselect()
try:
self._spin.lineEdit().selectionChanged.disconnect(deselect)
except TypeError:
pass
# Invoking self.setFocusProxy(self._spin), causes the
# self._spin.lineEdit()s to have selected texts (focus is set to
# provide keyboard functionality, i.e.: pressing ESC after changing
# spinbox value). Since the spin text is selected only after the
# delegate draws it, it cannot be deselected during initialization.
# Therefore connect the deselect() function to
# self._spin.lineEdit().selectionChanged only for editor creation.
self._spin.lineEdit().selectionChanged.connect(deselect)
self._slider.installEventFilter(self)
self._spin.installEventFilter(self)
@property
def value(self) -> float:
return self.__round_value(self._value)
@value.setter
def value(self, value: float):
if self._value is None or self.__round_value(value) != self.value:
self._value = value
self.valueChanged.emit(self.value)
self._spin.setValue(self.value)
# prevent emitting self.valueChanged again, due to slider change
slider_value = self.__map_to_slider(self.value)
self._value = self.__map_from_slider(slider_value)
self._slider.setValue(slider_value)
self._value = value
def _apply_slider_value(self):
self.value = self.__map_from_slider(self._slider.value())
def _apply_spin_value(self):
self.value = self._spin.value()
def __round_value(self, value):
return round(value, self._n_decimals)
def __map_to_slider(self, value: float) -> int:
value = min(self._max_value, max(self._min_value, value))
return round(value * 10 ** self._n_decimals)
def __map_from_slider(self, value: int) -> float:
return value * 10 ** (-self._n_decimals)
def eventFilter(self, obj: Union[QSlider, QDoubleSpinBox], event: QEvent) \
-> bool:
if event.type() == QEvent.Wheel:
return True
return super().eventFilter(obj, event)
class StringVariableEditor(VariableEditor):
valueChanged = Signal()
def __init__(self, parent: QWidget, callback: Callable):
super().__init__(parent, callback)
self._edit = QLineEdit(
parent,
sizePolicy=QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
)
self._edit.textChanged.connect(self.valueChanged)
self.layout().addWidget(self._edit)
self.setFocusProxy(self._edit)
@property
def value(self) -> str:
return self._edit.text()
@value.setter
def value(self, value: str):
self._edit.setText(value)
class TimeVariableEditor(VariableEditor):
DATE_FORMAT = "yyyy-MM-dd"
TIME_FORMAT = "hh:mm:ss"
def __init__(self, parent: QWidget, variable: TimeVariable,
callback: Callable):
super().__init__(parent, callback)
self._value: float = 0
self._variable: TimeVariable = variable
if variable.have_date and not variable.have_time:
self._format = TimeVariableEditor.DATE_FORMAT
elif not variable.have_date and variable.have_time:
self._format = TimeVariableEditor.TIME_FORMAT
else:
self._format = f"{TimeVariableEditor.DATE_FORMAT} " \
f"{TimeVariableEditor.TIME_FORMAT}"
class DateTimeEdit(QDateTimeEdit):
def sizeHint(self) -> QSize:
size: QSize = super().sizeHint()
return QSize(size.width(), size.height() + 2)
self._edit = DateTimeEdit(
parent,
dateTime=self.__map_to_datetime(self._value),
displayFormat=self._format,
sizePolicy=QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Fixed)
)
self._edit.dateTimeChanged.connect(self._apply_edit_value)
self.layout().addWidget(self._edit)
self.setFocusProxy(self._edit)
self._edit.installEventFilter(self)
@property
def value(self) -> float:
return self._value
@value.setter
def value(self, value: float):
if value != self.value:
self._value = value
self.valueChanged.emit(self.value)
self._edit.setDateTime(self.__map_to_datetime(self.value))
def _apply_edit_value(self):
self.value = self.__map_from_datetime(self._edit.dateTime())
def __map_from_datetime(self, date_time: QDateTime) -> float:
return self._variable.to_val(date_time.toString(self._format))
def __map_to_datetime(self, value: float) -> QDateTime:
return QDateTime.fromString(self._variable.repr_val(value),
self._format)
def eventFilter(self, obj: QDateTimeEdit, event: QEvent) -> bool:
if event.type() == QEvent.Wheel:
return True
return super().eventFilter(obj, event)
class VariableDelegate(QStyledItemDelegate):
def paint(self, painter: QPainter, option: QStyleOptionViewItem,
index: QModelIndex):
self.parent().view.openPersistentEditor(index)
super().paint(painter, option, index)
def createEditor(self, parent: QWidget, _: QStyleOptionViewItem,
index: QModelIndex) -> VariableEditor:
variable = index.data(VariableRole)
values = index.data(ValuesRole)
return _create_editor(variable, values, parent, self._commit_data)
def _commit_data(self):
editor = self.sender()
assert isinstance(editor, VariableEditor)
self.commitData.emit(editor)
# pylint: disable=no-self-use
def setEditorData(self, editor: VariableEditor, index: QModelIndex):
editor.value = index.model().data(index, ValueRole)
# pylint: disable=no-self-use
def setModelData(self, editor: VariableEditor,
model: QSortFilterProxyModel, index: QModelIndex):
model.setData(index, editor.value, ValueRole)
# pylint: disable=no-self-use
def updateEditorGeometry(self, editor: VariableEditor,
option: QStyleOptionViewItem, _: QModelIndex):
rect: QRect = option.rect
if isinstance(editor, ContinuousVariableEditor):
width = editor.sizeHint().width()
if width > rect.width():
rect.setWidth(width)
editor.setGeometry(rect)
# pylint: disable=no-self-use
def sizeHint(self, _: QStyleOptionViewItem, index: QModelIndex) -> QSize:
return _create_editor(index.data(role=VariableRole), np.array([0]),
None, lambda: 1).sizeHint()
@singledispatch
def _create_editor(*_) -> VariableEditor:
raise NotImplementedError
@_create_editor.register(DiscreteVariable)
def _(variable: DiscreteVariable, _: np.ndarray,
parent: QWidget, callback: Callable) -> DiscreteVariableEditor:
return DiscreteVariableEditor(parent, variable.values, callback)
@_create_editor.register(ContinuousVariable)
def _(variable: ContinuousVariable, values: np.ndarray,
parent: QWidget, callback: Callable) -> ContinuousVariableEditor:
return ContinuousVariableEditor(parent, variable, np.nanmin(values),
np.nanmax(values), callback)
@_create_editor.register(StringVariable)
def _(_: StringVariable, __: np.ndarray, parent: QWidget,
callback: Callable) -> StringVariableEditor:
return StringVariableEditor(parent, callback)
@_create_editor.register(TimeVariable)
def _(variable: TimeVariable, _: np.ndarray,
parent: QWidget, callback: Callable) -> TimeVariableEditor:
return TimeVariableEditor(parent, variable, callback)
def majority(values: np.ndarray) -> int:
return np.bincount(values[~np.isnan(values)].astype(int)).argmax()
def disc_random(values: np.ndarray) -> int:
return np.random.randint(low=np.nanmin(values), high=np.nanmax(values) + 1)
def cont_random(values: np.ndarray) -> float:
return np.random.uniform(low=np.nanmin(values), high=np.nanmax(values))
class VariableItemModel(QStandardItemModel):
dataHasNanColumn = Signal()
# pylint: disable=dangerous-default-value
def set_data(self, data: Table, saved_values={}):
domain = data.domain
variables = [(TableModel.Attribute, a) for a in domain.attributes] + \
[(TableModel.ClassVar, c) for c in domain.class_vars] + \
[(TableModel.Meta, m) for m in domain.metas]
for place, variable in variables:
if variable.is_primitive():
values = data.get_column_view(variable)[0].astype(float)
if all(np.isnan(values)):
self.dataHasNanColumn.emit()
continue
else:
values = np.array([])
color = TableModel.ColorForRole.get(place)
self._add_row(variable, values, color,
saved_values.get(variable.name))
def _add_row(self, variable: Variable, values: np.ndarray, color: QColor,
saved_value: Optional[Union[int, float, str]]):
var_item = QStandardItem()
var_item.setData(variable.name, Qt.DisplayRole)
var_item.setToolTip(variable.name)
var_item.setIcon(self._variable_icon(variable))
var_item.setEditable(False)
if color:
var_item.setBackground(color)
control_item = QStandardItem()
control_item.setData(variable, VariableRole)
control_item.setData(values, ValuesRole)
if color:
control_item.setBackground(color)
value = self._default_for_variable(variable, values)
if saved_value is not None and not \
(variable.is_discrete and saved_value >= len(variable.values)):
value = saved_value
control_item.setData(value, ValueRole)
self.appendRow([var_item, control_item])
@staticmethod
def _default_for_variable(variable: Variable, values: np.ndarray) \
-> Union[float, int, str]:
if variable.is_continuous:
return round(np.nanmedian(values), variable.number_of_decimals)
elif variable.is_discrete:
return majority(values)
elif variable.is_string:
return ""
else:
raise NotImplementedError
@staticmethod
def _variable_icon(variable: Variable) -> QIcon:
if variable.is_discrete:
return gui.attributeIconDict[1]
elif variable.is_time:
return gui.attributeIconDict[4]
elif variable.is_continuous:
return gui.attributeIconDict[2]
elif variable.is_string:
return gui.attributeIconDict[3]
else:
return gui.attributeIconDict[-1]
class OWCreateInstance(OWWidget):
name = __("name")
description = __("desc")
icon = "icons/CreateInstance.svg"
category = "Data"
keywords = ["simulator"]
priority = 4000
class Inputs:
data = Input("Data", Table, label=i18n.t("widget.data.data.common.data"))
reference = Input("Reference", Table, label=i18n.t("widget.data.data.common.reference"))
class Outputs:
data = Output("Data", Table, label=i18n.t("widget.data.data.common.data"))
class Information(OWWidget.Information):
nans_removed = Msg(__("msg_nans_removed"))
want_main_area = False
ACTIONS = [["median", __("btn.median")], ["mean", __("btn.mean")], ["random", __("btn.random")],
["input", __("btn.input")]]
HEADER = [["name", __("label.variable")],
["variable", __("label.value")]]
Header = namedtuple(
"header", [tag for tag, _ in HEADER]
)(*range(len(HEADER)))
values: Dict[str, Union[float, str]] = Setting({}, schema_only=True)
append_to_data = Setting(True)
auto_commit = Setting(True)
def __init__(self):
super().__init__()
self.data: Optional[Table] = None
self.reference: Optional[Table] = None
self.filter_edit = QLineEdit(textChanged=self.__filter_edit_changed,
placeholderText=__("placeholder_filter"))
self.view = QTableView(sortingEnabled=True,
contextMenuPolicy=Qt.CustomContextMenu,
selectionMode=QTableView.NoSelection)
self.view.customContextMenuRequested.connect(self.__menu_requested)
self.view.setItemDelegateForColumn(
self.Header.variable, VariableDelegate(self)
)
self.view.verticalHeader().hide()
self.view.horizontalHeader().setStretchLastSection(True)
self.view.horizontalHeader().setMaximumSectionSize(350)
self.model = VariableItemModel(self)
self.model.setHorizontalHeaderLabels([x for _, x in self.HEADER])
self.model.dataChanged.connect(self.__table_data_changed)
self.model.dataHasNanColumn.connect(self.Information.nans_removed)
self.proxy_model = QSortFilterProxyModel()
self.proxy_model.setFilterKeyColumn(-1)
self.proxy_model.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.proxy_model.setSourceModel(self.model)
self.view.setModel(self.proxy_model)
vbox = gui.vBox(self.controlArea, box=True)
vbox.layout().addWidget(self.filter_edit)
vbox.layout().addWidget(self.view)
box = gui.hBox(vbox, objectName="buttonBox")
gui.rubber(box)
for name in self.ACTIONS:
gui.button(
box, self, name[1],
lambda *args, fun=name[0]: self._initialize_values(fun),
autoDefault=False
)
gui.rubber(box)
gui.checkBox(self.buttonsArea, self, "append_to_data",
__("checkbox_append"),
callback=self.commit.deferred)
gui.rubber(self.buttonsArea)
gui.auto_apply(self.buttonsArea, self, "auto_commit")
self.settingsAboutToBePacked.connect(self.pack_settings)
def __filter_edit_changed(self):
self.proxy_model.setFilterFixedString(self.filter_edit.text().strip())
def __table_data_changed(self):
self.commit.deferred()
def __menu_requested(self, point: QPoint):
index = self.view.indexAt(point)
model: QSortFilterProxyModel = index.model()
source_index = model.mapToSource(index)
menu = QMenu(self)
for action in self._create_actions(source_index):
menu.addAction(action)
menu.popup(self.view.viewport().mapToGlobal(point))
def _create_actions(self, index: QModelIndex) -> List[QAction]:
actions = []
for name in self.ACTIONS:
action = QAction(name.capitalize(), self)
action.triggered.connect(
lambda *args, fun=name: self._initialize_values(fun, [index])
)
actions.append(action)
return actions
def _initialize_values(self, fun: str, indices: List[QModelIndex] = None):
cont_fun = {"median": np.nanmedian,
"mean": np.nanmean,
"random": cont_random,
"input": np.nanmean}.get(fun, NotImplemented)
disc_fun = {"median": majority,
"mean": majority,
"random": disc_random,
"input": majority}.get(fun, NotImplemented)
if not self.data or fun == "input" and not self.reference:
return
self.model.dataChanged.disconnect(self.__table_data_changed)
rows = range(self.proxy_model.rowCount()) if indices is None else \
[index.row() for index in indices]
for row in rows:
index = self.model.index(row, self.Header.variable)
variable = self.model.data(index, VariableRole)
if fun == "input":
if variable not in self.reference.domain:
continue
values = self.reference.get_column_view(variable)[0]
if variable.is_primitive():
values = values.astype(float)
if all(np.isnan(values)):
continue
else:
values = self.model.data(index, ValuesRole)
if variable.is_continuous:
value = cont_fun(values)
value = round(value, variable.number_of_decimals)
elif variable.is_discrete:
value = disc_fun(values)
elif variable.is_string:
value = ""
else:
raise NotImplementedError
self.model.setData(index, value, ValueRole)
self.model.dataChanged.connect(self.__table_data_changed)
self.commit.deferred()
@Inputs.data
def set_data(self, data: Table):
self.data = data
self._set_model_data()
self.commit.now()
def _set_model_data(self):
self.Information.nans_removed.clear()
self.model.removeRows(0, self.model.rowCount())
if not self.data:
return
self.model.set_data(self.data, self.values)
self.values = {}
self.view.horizontalHeader().setStretchLastSection(False)
self.view.resizeColumnsToContents()
self.view.resizeRowsToContents()
self.view.horizontalHeader().setStretchLastSection(True)
@Inputs.reference
def set_reference(self, data: Table):
self.reference = data
@gui.deferred
def commit(self):
output_data = None
if self.data:
output_data = self._create_data_from_values()
if self.append_to_data:
output_data = self._append_to_data(output_data)
self.Outputs.data.send(output_data)
def _create_data_from_values(self) -> Table:
data = Table.from_domain(self.data.domain, 1)
with data.unlocked():
data.name = "created"
data.X[:] = np.nan
data.Y[:] = np.nan
for i, m in enumerate(self.data.domain.metas):
data.metas[:, i] = "" if m.is_string else np.nan
values = self._get_values()
for var_name, value in values.items():
data[:, var_name] = value
return data
def _append_to_data(self, data: Table) -> Table:
assert self.data
assert len(data) == 1
var = DiscreteVariable("Source ID", values=(self.data.name, data.name))
data = Table.concatenate([self.data, data], axis=0)
domain = Domain(data.domain.attributes, data.domain.class_vars,
data.domain.metas + (var,))
data = data.transform(domain)
with data.unlocked(data.metas):
data.metas[: len(self.data), -1] = 0
data.metas[len(self.data):, -1] = 1
return data
def _get_values(self) -> Dict[str, Union[str, float]]:
values = {}
for row in range(self.model.rowCount()):
index = self.model.index(row, self.Header.variable)
values[self.model.data(index, VariableRole).name] = \
self.model.data(index, ValueRole)
return values
def send_report(self):
if not self.data:
return
self.report_domain("Input", self.data.domain)
self.report_domain("Output", self.data.domain)
items = []
values: Dict = self._get_values()
for var in self.data.domain.variables + self.data.domain.metas:
val = values.get(var.name, np.nan)
if var.is_primitive():
val = var.repr_val(val)
items.append([f"{var.name}:", val])
self.report_table("Values", items)
@staticmethod
def sizeHint():
return QSize(600, 500)
def pack_settings(self):
self.values: Dict[str, Union[str, float]] = self._get_values()
if __name__ == "__main__": # pragma: no cover
table = Table("housing")
WidgetPreview(OWCreateInstance).run(set_data=table,
set_reference=table[:1]) | PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/prism/components/prism-core.min.js | var _self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{},Prism=function(){var e=/\blang(?:uage)?-(\w+)\b/i,t=0,n=_self.Prism={manual:_self.Prism&&_self.Prism.manual,disableWorkerMessageHandler:_self.Prism&&_self.Prism.disableWorkerMessageHandler,util:{encode:function(e){return e instanceof r?new r(e.type,n.util.encode(e.content),e.alias):"Array"===n.util.type(e)?e.map(n.util.encode):e.replace(/&/g,"&").replace(/</g,"<").replace(/\u00a0/g," ")},type:function(e){return Object.prototype.toString.call(e).match(/\[object (\w+)\]/)[1]},objId:function(e){return e.__id||Object.defineProperty(e,"__id",{value:++t}),e.__id},clone:function(e){var t=n.util.type(e);switch(t){case"Object":var r={};for(var a in e)e.hasOwnProperty(a)&&(r[a]=n.util.clone(e[a]));return r;case"Array":return e.map(function(e){return n.util.clone(e)})}return e}},languages:{extend:function(e,t){var r=n.util.clone(n.languages[e]);for(var a in t)r[a]=t[a];return r},insertBefore:function(e,t,r,a){a=a||n.languages;var l=a[e];if(2==arguments.length){r=arguments[1];for(var i in r)r.hasOwnProperty(i)&&(l[i]=r[i]);return l}var o={};for(var s in l)if(l.hasOwnProperty(s)){if(s==t)for(var i in r)r.hasOwnProperty(i)&&(o[i]=r[i]);o[s]=l[s]}return n.languages.DFS(n.languages,function(t,n){n===a[e]&&t!=e&&(this[t]=o)}),a[e]=o},DFS:function(e,t,r,a){a=a||{};for(var l in e)e.hasOwnProperty(l)&&(t.call(e,l,e[l],r||l),"Object"!==n.util.type(e[l])||a[n.util.objId(e[l])]?"Array"!==n.util.type(e[l])||a[n.util.objId(e[l])]||(a[n.util.objId(e[l])]=!0,n.languages.DFS(e[l],t,l,a)):(a[n.util.objId(e[l])]=!0,n.languages.DFS(e[l],t,null,a)))}},plugins:{},highlightAll:function(e,t){n.highlightAllUnder(document,e,t)},highlightAllUnder:function(e,t,r){var a={callback:r,selector:'code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code'};n.hooks.run("before-highlightall",a);for(var l,i=a.elements||e.querySelectorAll(a.selector),o=0;l=i[o++];)n.highlightElement(l,t===!0,a.callback)},highlightElement:function(t,r,a){for(var l,i,o=t;o&&!e.test(o.className);)o=o.parentNode;o&&(l=(o.className.match(e)||[,""])[1].toLowerCase(),i=n.languages[l]),t.className=t.className.replace(e,"").replace(/\s+/g," ")+" language-"+l,t.parentNode&&(o=t.parentNode,/pre/i.test(o.nodeName)&&(o.className=o.className.replace(e,"").replace(/\s+/g," ")+" language-"+l));var s=t.textContent,g={element:t,language:l,grammar:i,code:s};if(n.hooks.run("before-sanity-check",g),!g.code||!g.grammar)return g.code&&(n.hooks.run("before-highlight",g),g.element.textContent=g.code,n.hooks.run("after-highlight",g)),n.hooks.run("complete",g),void 0;if(n.hooks.run("before-highlight",g),r&&_self.Worker){var u=new Worker(n.filename);u.onmessage=function(e){g.highlightedCode=e.data,n.hooks.run("before-insert",g),g.element.innerHTML=g.highlightedCode,a&&a.call(g.element),n.hooks.run("after-highlight",g),n.hooks.run("complete",g)},u.postMessage(JSON.stringify({language:g.language,code:g.code,immediateClose:!0}))}else g.highlightedCode=n.highlight(g.code,g.grammar,g.language),n.hooks.run("before-insert",g),g.element.innerHTML=g.highlightedCode,a&&a.call(t),n.hooks.run("after-highlight",g),n.hooks.run("complete",g)},highlight:function(e,t,a){var l=n.tokenize(e,t);return r.stringify(n.util.encode(l),a)},matchGrammar:function(e,t,r,a,l,i,o){var s=n.Token;for(var g in r)if(r.hasOwnProperty(g)&&r[g]){if(g==o)return;var u=r[g];u="Array"===n.util.type(u)?u:[u];for(var c=0;c<u.length;++c){var h=u[c],f=h.inside,d=!!h.lookbehind,m=!!h.greedy,p=0,y=h.alias;if(m&&!h.pattern.global){var v=h.pattern.toString().match(/[imuy]*$/)[0];h.pattern=RegExp(h.pattern.source,v+"g")}h=h.pattern||h;for(var b=a,k=l;b<t.length;k+=t[b].length,++b){var w=t[b];if(t.length>e.length)return;if(!(w instanceof s)){h.lastIndex=0;var _=h.exec(w),P=1;if(!_&&m&&b!=t.length-1){if(h.lastIndex=k,_=h.exec(e),!_)break;for(var A=_.index+(d?_[1].length:0),j=_.index+_[0].length,x=b,O=k,N=t.length;N>x&&(j>O||!t[x].type&&!t[x-1].greedy);++x)O+=t[x].length,A>=O&&(++b,k=O);if(t[b]instanceof s||t[x-1].greedy)continue;P=x-b,w=e.slice(k,O),_.index-=k}if(_){d&&(p=_[1].length);var A=_.index+p,_=_[0].slice(p),j=A+_.length,S=w.slice(0,A),C=w.slice(j),M=[b,P];S&&(++b,k+=S.length,M.push(S));var E=new s(g,f?n.tokenize(_,f):_,y,_,m);if(M.push(E),C&&M.push(C),Array.prototype.splice.apply(t,M),1!=P&&n.matchGrammar(e,t,r,b,k,!0,g),i)break}else if(i)break}}}}},tokenize:function(e,t){var r=[e],a=t.rest;if(a){for(var l in a)t[l]=a[l];delete t.rest}return n.matchGrammar(e,r,t,0,0,!1),r},hooks:{all:{},add:function(e,t){var r=n.hooks.all;r[e]=r[e]||[],r[e].push(t)},run:function(e,t){var r=n.hooks.all[e];if(r&&r.length)for(var a,l=0;a=r[l++];)a(t)}}},r=n.Token=function(e,t,n,r,a){this.type=e,this.content=t,this.alias=n,this.length=0|(r||"").length,this.greedy=!!a};if(r.stringify=function(e,t,a){if("string"==typeof e)return e;if("Array"===n.util.type(e))return e.map(function(n){return r.stringify(n,t,e)}).join("");var l={type:e.type,content:r.stringify(e.content,t,a),tag:"span",classes:["token",e.type],attributes:{},language:t,parent:a};if(e.alias){var i="Array"===n.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(l.classes,i)}n.hooks.run("wrap",l);var o=Object.keys(l.attributes).map(function(e){return e+'="'+(l.attributes[e]||"").replace(/"/g,""")+'"'}).join(" ");return"<"+l.tag+' class="'+l.classes.join(" ")+'"'+(o?" "+o:"")+">"+l.content+"</"+l.tag+">"},!_self.document)return _self.addEventListener?(n.disableWorkerMessageHandler||_self.addEventListener("message",function(e){var t=JSON.parse(e.data),r=t.language,a=t.code,l=t.immediateClose;_self.postMessage(n.highlight(a,n.languages[r],r)),l&&_self.close()},!1),_self.Prism):_self.Prism;var a=document.currentScript||[].slice.call(document.getElementsByTagName("script")).pop();return a&&(n.filename=a.src,n.manual||a.hasAttribute("data-manual")||("loading"!==document.readyState?window.requestAnimationFrame?window.requestAnimationFrame(n.highlightAll):window.setTimeout(n.highlightAll,16):document.addEventListener("DOMContentLoaded",n.highlightAll))),_self.Prism}();"undefined"!=typeof module&&module.exports&&(module.exports=Prism),"undefined"!=typeof global&&(global.Prism=Prism); | PypiClean |
/DjangoFileStorageHandler-1.0.tar.gz/DjangoFileStorageHandler-1.0/README.md | # DjangoFileStorageHandler
You can handle your storage files to MINIO, S3 or Local through Settings
About
-----
This project is useful if you want to store and serve your file to different types of servers. You need to change a switch in settings.py and storage server will change their server in one go.
Installation
------------
To install DjangoFileStoragesHandler, simply use pip:
.. code-block:: bash
$ pip install DjangoFileStoragesHandler
Documentation
-------------
You need to add few variables in your settings.py .
FILE_STORAGE_TO: This can be s3, local or Minio as per your need.
.. code-block:: python
FILE_STORAGE_TO = 's3' / 'local' / 'minio'
BUCKET_NAME: Enter the bucket name
.. code-block:: python
BUCKET_NAME = '*******'
SUPPORTED_FORMAT_LIST
.. code-block:: python
SUPPORTED_FORMAT_LIST = ['.pdf', '.png', '.bmp', '.jpeg', '.jpg', '.doc', '.txt', '.docx', '.PDF', '.PNG', '.BMP',
'.JPEG', '.JPG', '.DOC', '.TXT', '.DOCX']
In case of AWS S3 you need to add below keys:
AWS_ACCESS_KEY: Access key of AWS
.. code-block:: python
AWS_ACCESS_KEY = '************'
AWS_SECRET_KEY: SECRET Access key of AWS
.. code-block:: python
AWS_SECRET_KEY = '************'
AWS_REGION_NAME: Region name AWS
.. code-block:: python
AWS_REGION_NAME = '*******'
In case of Minio:
MINIO_ACCESS_KEY: Add Minio Access key.
.. code-block:: python
MINIO_ACCESS_KEY = '*******'
MINIO_SECURE: Add MINIO SECURE key
.. code-block:: python
MINIO_SECURE = '*******'
Example
--------
>>>sh = storage_handler(path='/docs/', filename='uploadfilename',file_data=open('example.pdf','r')
>>>sh.upload_file() # upload file to Server.
{'success': 1, 'message': 'successful'}
>>>sh.get_file_byte() # gives file in byte format
{'success': 1, 'file':b'****************************....'}
>>>sh.get_file_base64() # gives file in base64 format
{'success': 1, 'message': 'successful', 'file': base64****************, 'file_extension':'***'}
>>>sh.download_file() # Gives file in HTTPResponse.
>>>sh.delete() #will delete you file
{'success': 1, 'message': 'successful'}
Found a Bug? Something Unsupported?
---------------
Please write me shuklashashank@outlook.com.
| PypiClean |
/MetaGram-2.0.2.tar.gz/MetaGram-2.0.2/pyrogram/methods/messages/forward_messages.py |
from datetime import datetime
from typing import Union, List, Iterable
import pyrogram
from pyrogram import raw, utils
from pyrogram import types
class ForwardMessages:
async def forward_messages(
self: "pyrogram.Client",
chat_id: Union[int, str],
from_chat_id: Union[int, str],
message_ids: Union[int, Iterable[int]],
disable_notification: bool = None,
schedule_date: datetime = None,
protect_content: bool = None
) -> Union["types.Message", List["types.Message"]]:
"""Forward messages of any kind.
.. include:: /_includes/usable-by/users-bots.rst
Parameters:
chat_id (``int`` | ``str``):
Unique identifier (int) or username (str) of the target chat.
For your personal cloud (Saved Messages) you can simply use "me" or "self".
For a contact that exists in your Telegram address book you can use his phone number (str).
from_chat_id (``int`` | ``str``):
Unique identifier (int) or username (str) of the source chat where the original message was sent.
For your personal cloud (Saved Messages) you can simply use "me" or "self".
For a contact that exists in your Telegram address book you can use his phone number (str).
message_ids (``int`` | Iterable of ``int``):
An iterable of message identifiers in the chat specified in *from_chat_id* or a single message id.
disable_notification (``bool``, *optional*):
Sends the message silently.
Users will receive a notification with no sound.
schedule_date (:py:obj:`~datetime.datetime`, *optional*):
Date when the message will be automatically sent.
protect_content (``bool``, *optional*):
Protects the contents of the sent message from forwarding and saving.
Returns:
:obj:`~pyrogram.types.Message` | List of :obj:`~pyrogram.types.Message`: In case *message_ids* was not
a list, a single message is returned, otherwise a list of messages is returned.
Example:
.. code-block:: python
# Forward a single message
await app.forward_messages(to_chat, from_chat, 123)
# Forward multiple messages at once
await app.forward_messages(to_chat, from_chat, [1, 2, 3])
"""
is_iterable = not isinstance(message_ids, int)
message_ids = list(message_ids) if is_iterable else [message_ids]
r = await self.invoke(
raw.functions.messages.ForwardMessages(
to_peer=await self.resolve_peer(chat_id),
from_peer=await self.resolve_peer(from_chat_id),
id=message_ids,
silent=disable_notification or None,
random_id=[self.rnd_id() for _ in message_ids],
schedule_date=utils.datetime_to_timestamp(schedule_date),
noforwards=protect_content
)
)
forwarded_messages = []
users = {i.id: i for i in r.users}
chats = {i.id: i for i in r.chats}
for i in r.updates:
if isinstance(i, (raw.types.UpdateNewMessage,
raw.types.UpdateNewChannelMessage,
raw.types.UpdateNewScheduledMessage)):
forwarded_messages.append(
await types.Message._parse(
self, i.message,
users, chats
)
)
return types.List(forwarded_messages) if is_iterable else forwarded_messages[0] | PypiClean |
/NREL_shift-0.1.0a0-py3-none-any.whl/shift/utils.py |
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" This module contains utility functions used through out the package. """
from typing import List, Union, Sequence
import numpy as np
import networkx as nx
import geopy.distance
from cerberus import Validator
import shapefile
import shapely.geometry
import shapely
import pandas as pd
from networkx.algorithms import approximation as ax
from shift.exceptions import ValidationError
from shift.graph import RoadNetworkFromPolygon
def df_validator(schema: dict, df: pd.DataFrame) -> bool:
"""Validates the content of pandas dataframe.
Uses cerberus for validation. So refer to cerberus
documentation for scheme.
Args:
schema (dict): Schema for validating the content of pandas dataframe
df (pd.DataFrame): Pandas dataframe to be validated
Raises:
ValidationError: If error is found
Returns
bool: True if validation passes.
"""
errors = []
csv_validator = Validator()
csv_validator.schema = schema
csv_validator.require_all = True
for idx, record in enumerate(df.to_dict(orient="records")):
if not csv_validator.validate(record):
errors.append(
f"Item {idx}: {csv_validator.errors}, Record: {record}"
)
if errors:
raise ValidationError(errors)
return True
def get_point_from_curve(curve: List[List[float]], x: float) -> float:
"""Returns a y coordinate for a given x coordinate
by following piecewise linear function.
Args:
curve (List[List[float]]): List of list containing two floats
x (float): x coordinate
Returns:
float: y coordinate
"""
x_ = np.array([el[0] for el in curve])
y_ = np.array([el[1] for el in curve])
index = sum(x_ <= x)
if index == len(x_):
y = (y_[index - 1] - y_[index - 2]) * (x - x_[index - 2]) / (
x_[index - 1] - x_[index - 2]
) + y_[index - 2]
elif index == 0:
y = (y_[index + 1] - y_[index]) * (x - x_[index]) / (
x_[index + 1] - x_[index]
) + y_[index]
else:
y = (y_[index] - y_[index - 1]) * (x - x_[index - 1]) / (
x_[index] - x_[index - 1]
) + y_[index - 1]
return y
def get_distance(
point1: List[float], point2: List[float], latlon=False
) -> float:
"""Returns distance between two geopoints in meter assuming
eliposoidal earth model.
Args:
point1 (List[float]): location coordinate for point 1
point2 (List[float]): location coordinate for point 2
latlon (bool): Specfies that latitude is first and
longitude is second if true
Returns:
float: distance in meter
"""
# Assuming point1 and point2 are tuples with
# first element representing longitude and
# second element representing latitude
# Geopy however requires (lat, lon) pair
if not latlon:
return (
geopy.distance.distance(
(point1[1], point1[0]), (point2[1], point2[0])
).km
* 1000
)
else:
return geopy.distance.distance(point1, point2).km * 1000
def get_nearest_points_in_the_network(
graph: nx.Graph, points: List[List[float]]
) -> dict:
"""Retrieve nearest node from the graph for given points
Args:
graph (nx.Graph): Networkx graph instance
points (List[List[float]]): List of points for which nearest
nodes are to be found
Todo:
* Fix the issue if returned nodes are same for two points.
Returns:
dict: mapping between nearest node and point
"""
nearest_points = {}
graph_node_data = {
key: val["pos"] for key, val in dict(graph.nodes(data=True)).items()
}
for point in points:
min_distance, nearest_node = None, None
for node, coords in graph_node_data.items():
distance = get_distance(point, coords)
if min_distance is None:
min_distance = distance
nearest_node = node
else:
if distance < min_distance:
min_distance = distance
nearest_node = node
nearest_points[nearest_node] = {
"centre": point,
"longitude": graph_node_data[nearest_node][0],
"latitude": graph_node_data[nearest_node][1],
}
return nearest_points
def slice_up_network_edges(graph: nx.Graph, slice_in_meter: float) -> nx.Graph:
"""Creates a new graph with edges sliced by given distance in meter.
Args:
graph (nx.Graph): Networkx graph instance
slice_in_meter (float): Maximum length of edge in meter for
use in slicing
Returns:
nx.Graph: Sliced network
"""
sliced_graph = nx.Graph()
graph_nodes = {
x[0]: x[1]["pos"] if "pos" in x[1] else [x[1]["x"], x[1]["y"]]
for x in graph.nodes.data()
}
for edge in graph.edges():
edge_length = get_distance(
(graph_nodes[edge[0]][0], graph_nodes[edge[0]][1]),
(graph_nodes[edge[1]][0], graph_nodes[edge[1]][1]),
)
edge_slices = [
x / edge_length for x in np.arange(0, edge_length, slice_in_meter)
] + [1]
x1, y1 = (graph_nodes[edge[0]][0], graph_nodes[edge[0]][1])
x2, y2 = (graph_nodes[edge[1]][0], graph_nodes[edge[1]][1])
sliced_nodes = []
for slice_ in edge_slices:
new_x, new_y = x1 + (x2 - x1) * slice_, y1 + (y2 - y1) * slice_
sliced_graph.add_node(
f"{new_x}_{new_y}_node",
pos=(new_x, new_y),
type="node",
data={},
)
sliced_nodes.append(f"{new_x}_{new_y}_node")
for i in range(len(sliced_nodes) - 1):
sliced_graph.add_edge(
sliced_nodes[i], sliced_nodes[i + 1], type="edge"
)
return sliced_graph
def get_forbidden_polygons(shp_file: str) -> List[shapely.geometry.Polygon]:
"""Get all the polygons from a shape file.
Args:
shp_file (str): Path to .shp file
Returns:
List[shapely.geometry.Polygon]: List of shapely polygons
"""
shape = shapefile.Reader(shp_file)
forbidden_polygons = []
for feature in shape.shapeRecords():
feature_object = feature.shape.__geo_interface__
if feature_object["type"] == "Polygon":
forbidden_polygons.append(
shapely.geometry.Polygon(feature_object["coordinates"][0])
)
return forbidden_polygons
def get_slices(start: float, end: float, num_steps: int) -> List[float]:
"""Get slices between two numbers"""
return [
start + i * (end - start) / (num_steps) for i in range(num_steps + 1)
]
def create_rectangular_mesh_network(
lower_left: tuple,
upper_right: tuple,
vertical_space_meter: float = 32,
horizontal_space_meter: float = 32,
forbidden_areas: Union[str, None] = None,
node_append_str: Union[str, None] = None,
) -> Sequence[tuple[nx.Graph, dict]]:
"""Creates a rectangular mesh network from a given set of points.
Args:
lower_left (tuple): (longitude, latitude) representing lower left point
upper_right (tuple): (longitude, latitude) representing
upper right point
vertical_space_meter (float): Vertical spacing in meter
horizontal_space_meter (float): Horizontal spacing in meter
forbidden_areas (Union[str, None]): Shp file representing
forbidden polygons
node_append_str (Union[str, None]): String to be appended
at the end of node name
Returns:
Sequence[tuple[nx.Graph, dict]]: Graph and mapping between
nodes and coordinates
"""
# Assuming tuples first element is longitude and second element is latitude
# 50m is a common distance between low tension pole
# First initialize the network
graph = nx.Graph()
# Find coordinates for four corners
north_west = (lower_left[0], upper_right[1])
north_east = upper_right
south_west = lower_left
south_east = (upper_right[0], lower_left[1])
# Print some lengths
horizontal_distance = get_distance(south_west, south_east)
vertical_distance = get_distance(south_west, north_west)
print(
f"Vertical distance {vertical_distance}m,"
+ f" Horizontal distance {horizontal_distance}m"
)
# Compute number of sections required in horizontal
# (wet-east) and vertical (north-south) direction
horizontal_sections = max(
int(horizontal_distance / horizontal_space_meter), 1
)
vertical_sections = max(int(vertical_distance / vertical_space_meter), 1)
print(
f"Vertical sections: {vertical_sections},"
+ f"horizontal sections: {horizontal_sections}"
)
# Let's create node and edges for the rectangular mesh
vertical_edges, horizontal_edges = [], []
for lon in get_slices(lower_left[0], upper_right[0], horizontal_sections):
vertical_node_list = []
for lat in get_slices(lower_left[1], upper_right[1], vertical_sections):
node_name = f"{lon}_{lat}_{node_append_str}_node"
graph.add_node(node_name, pos=(lon, lat))
vertical_node_list.append(node_name)
vertical_edges.append(vertical_node_list)
for lat in get_slices(lower_left[1], upper_right[1], vertical_sections):
horizontal_node_list = []
for lon in get_slices(
lower_left[0], upper_right[0], horizontal_sections
):
node_name = f"{lon}_{lat}_{node_append_str}_node"
horizontal_node_list.append(node_name)
horizontal_edges.append(horizontal_node_list)
# Let's create edges
for vertical_points in vertical_edges:
for i in range(len(vertical_points) - 1):
graph.add_edge(vertical_points[i], vertical_points[i + 1])
for horizontal_points in horizontal_edges:
for i in range(len(horizontal_points) - 1):
graph.add_edge(horizontal_points[i], horizontal_points[i + 1])
# Let's plot the mesh
points = {
key: val["pos"] for key, val in dict(graph.nodes(data=True)).items()
}
# Let's see if the road_network exists
try:
road_ = RoadNetworkFromPolygon(
[north_west, north_east, south_east, south_west, north_west]
)
road_.get_network(node_append_str)
# Let's try to remove nodes that are near to the road network
d_threshold = min(horizontal_space_meter, vertical_space_meter)
# First we need to slice the road_edges to be no larger than d_threshold
sliced_road = slice_up_network_edges(road_.updated_network, d_threshold)
sliced_road = nx.relabel_nodes(
sliced_road, {n: n + node_append_str for n in sliced_road.nodes()}
)
# Let's loop through sliced road nodes and remove closer nodes
sliced_road_nodes = {
key: val["pos"]
for key, val in dict(sliced_road.nodes(data=True)).items()
}
for _, road_node_coords in sliced_road_nodes.items():
for node, node_coords in points.items():
if get_distance(node_coords, road_node_coords) < d_threshold:
try:
graph.remove_node(node)
# print(f"{node} node removed")
except nx.NetworkXError as e:
print(e)
pass
# Now let's connect the sliced road netowork to truncated mesh network
# First step is to find the nearest node for each of the
# sliced road nodes to truncated mesh network
# updated the node_coords
points = {
key: val["pos"] for key, val in dict(graph.nodes(data=True)).items()
}
nearest_nodes_meshed_network = {}
for node, coords in sliced_road_nodes.items():
min_distance, nearest_node = None, None
for mesh_node, mesh_node_coords in points.items():
distance = get_distance(coords, mesh_node_coords)
if min_distance is None:
min_distance = distance
nearest_node = mesh_node
else:
if distance < min_distance:
min_distance = distance
nearest_node = mesh_node
if min_distance < (1.5 * d_threshold):
nearest_nodes_meshed_network[node] = nearest_node
# Second step is to add the sliced road edges to truncted mesh network
for node, coords in sliced_road_nodes.items():
graph.add_node(node, pos=coords)
for edge in sliced_road.edges():
graph.add_edge(edge[0], edge[1])
# Add edges to connect the road to mesh network
for node1, node2 in nearest_nodes_meshed_network.items():
graph.add_edge(node1, node2)
# updated the node_coords
points = {
key: val["pos"] for key, val in dict(graph.nodes(data=True)).items()
}
except (nx.NetworkXPointlessConcept, ValueError) as e:
print(e)
# Now let's try to fetch lakes and rives and try to a
if forbidden_areas is not None:
# get all forbidden polygons
forbidden_polygons = get_forbidden_polygons(forbidden_areas)
# Let's create a polygon
customer_polygon = shapely.geometry.Polygon(
[north_west, north_east, south_east, south_west, north_west]
)
forbidden_polygon_subset = []
for polygon in forbidden_polygons:
if polygon.intersects(customer_polygon):
forbidden_polygon_subset.append(polygon)
if forbidden_polygon_subset:
for polygon in forbidden_polygon_subset:
for node, coords in points.items():
node_point = shapely.geometry.Point(coords)
if node_point.within(polygon):
try:
graph.remove_node(node)
except nx.NetworkXError as e:
# print(e)
pass
if not nx.is_connected(graph):
largest_component = max(nx.connected_components(graph), key=len)
graph = graph.subgraph(largest_component)
points = {
key: val["pos"] for key, val in dict(graph.nodes(data=True)).items()
}
return graph, points
def mesh_pruning(
mesh_graph: nx.Graph, customers: List[List[float]]
) -> Sequence[tuple[nx.Graph, dict]]:
"""Prunes the mesh graph by keeping the nodes specified.
Args:
mesh_graph (nx.Graph): Graph to be pruned
customers: List[List[float]]: List of points to be used for pruning
Returns:
Sequence[tuple[nx.Graph, dict]]: Pruned network and
mapping between customer and node
"""
# Let's find the nodes we absolutey need to keep
points = {
key: val["pos"]
for key, val in dict(mesh_graph.nodes(data=True)).items()
}
nodes_to_keep = []
customer_to_node_mapper = {}
for customer in customers:
min_distance, nearest_node = None, None
for point, coords in points.items():
distance = get_distance(customer, coords)
if min_distance is None:
min_distance = distance
nearest_node = point
else:
if distance < min_distance:
min_distance = distance
nearest_node = point
if nearest_node not in nodes_to_keep:
nodes_to_keep.append(nearest_node)
customer_to_node_mapper[
f"{customer[0]}_{customer[1]}_customer"
] = nearest_node
# Let's start pruning the network
graph_mst = ax.steinertree.steiner_tree(mesh_graph, nodes_to_keep)
return graph_mst, customer_to_node_mapper
def triangulate_using_mesh(
customers: List[List[float]],
forbidden_areas: Union[str, None] = None,
node_append_str: Union[str, None] = None,
) -> Sequence[tuple[nx.Graph, dict, dict]]:
"""Creates a minimum spanning graph connecting
customers by avoiding forbidden region.
Args:
customers (List[List[float]]): List of points to be used
to create graph
forbidden_areas (Union[str, None]): Path to .shp file
node_append_str (Union[str, None]): String to be appended
to node name
Returns:
Sequence[tuple[nx.Graph, dict, dict]]: Minimum spannnig tree,
mapping between point and coordinates
and customer to node mapping.
"""
# find the edge coordinates
lats = [x[1] for x in customers]
lons = [x[0] for x in customers]
graph, points = create_rectangular_mesh_network(
(min(lons), min(lats)),
(max(lons), max(lats)),
forbidden_areas=forbidden_areas,
node_append_str=node_append_str,
)
graph_mst, customer_to_node_mapper = mesh_pruning(graph, customers)
# graph, points = add_customer_nodes_and_edges(graph_mst,
# customer_to_node_mapper)
return graph_mst, points, customer_to_node_mapper
def set_node_edge_type(network: nx.Graph) -> nx.Graph:
"""Sets the type to node and edge.
Args:
network (nx.Graph): Networkx graph instance
Returns:
nx.Graph: Updated graph
"""
nx.set_node_attributes(network, "node", name="type")
nx.set_node_attributes(network, {"type": "node"}, name="data")
nx.set_edge_attributes(network, "edge", name="type")
return network | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/djblets/mail/utils.py |
from email.utils import escapesre, parseaddr, specialsre
from django.conf import settings
def build_email_address(email, full_name=None):
"""Build an e-mail address for a To/CC/BCC field from a user's information.
Args:
email (unicode):
The e-mail address.
full_name (unicode, optional):
The optional full name associated with the e-mail address.
Returns:
unicode:
A formatted e-mail address intended for a To/CC/BCC field.
"""
if full_name:
escaped_name = escapesre.sub(r'\\\g<0>', full_name)
if specialsre.search(full_name):
escaped_name = '"%s"' % escaped_name
return '%s <%s>' % (escaped_name, email)
return email
def build_email_address_for_user(user):
"""Build an e-mail address for a To/CC/BCC field from a User.
Args:
user (django.contrib.auth.models.User):
The user.
Returns:
unicode:
A formatted e-mail address intended for a To/CC/BCC field.
"""
return build_email_address(email=user.email,
full_name=user.get_full_name())
def build_email_address_via_service(email, full_name=None, service_name=None,
sender_email=None):
"""Build an e-mail address for sending on behalf of a user via a service.
This will construct a formatted e-mail address that can be safely used
in a :mailheader:`From` field without risking being quarantined/rejected
by DMARC rules.
The address will be in the form of "Full Name via Service Name
<sender@domain.tld>".
Args:
email (unicode):
The unformatted e-mail address of the user.
full_name (unicode, optional):
The full name of the user. If not provided, the username in the
e-mail address will be used.
service_name (unicode, optional):
The name of the service sending the e-mail. If not provided,
``settings.EMAIL_DEFAULT_SENDER_SERVICE_NAME`` will be used.
sender_email (unicode, optional):
The unformatted e-mail address for the sending service. If not
provided, the e-mail address in
:django:setting:`DEFAULT_FROM_EMAIL` will be used.
Returns:
unicode:
A formatted e-mail address safe to use in a :mailheader:`From` field.
"""
if not service_name:
# A service name wasn't specified. We'll try to use the one from
# settings, and if that doesn't exist, we'll use the domain name
# from the sender (assuming it parsed, and if it didn't, there are
# bigger problems we're not going to deal with here).
service_name = (
getattr(settings, 'EMAIL_DEFAULT_SENDER_SERVICE_NAME', None) or
email.split('@')[-1]
)
if not sender_email:
sender_email = parseaddr(settings.DEFAULT_FROM_EMAIL)[1]
# We need a name from the user. If a full name wasn't
# available, use the first part of the e-mail address.
if not full_name:
full_name = email.split('@')[0]
return build_email_address(
email=sender_email,
full_name='%s via %s' % (full_name, service_name)) | PypiClean |
/NlpToolkit-Classification-1.0.16.tar.gz/NlpToolkit-Classification-1.0.16/Classification/Model/GaussianModel.py | from abc import abstractmethod
from io import TextIOWrapper
from Math.DiscreteDistribution import DiscreteDistribution
from Math.Vector import Vector
from Classification.Instance.CompositeInstance import CompositeInstance
from Classification.Instance.Instance import Instance
from Classification.Model.ValidatedModel import ValidatedModel
class GaussianModel(ValidatedModel):
prior_distribution: DiscreteDistribution
@abstractmethod
def calculateMetric(self,
instance: Instance,
Ci: str) -> float:
pass
def loadPriorDistribution(self, inputFile: TextIOWrapper):
size = int(inputFile.readline().strip())
self.prior_distribution = DiscreteDistribution()
for i in range(size):
line = inputFile.readline().strip()
items = line.split(" ")
for j in range(int(items[1])):
self.prior_distribution.addItem(items[0])
return size
def loadVectors(self,
inputFile: TextIOWrapper,
size: int) -> dict:
hash_map = dict()
for i in range(size):
line = inputFile.readline().strip()
items = line.split(" ")
vector = Vector(int(items[1]), 0)
for j in range(2, len(items)):
vector.setValue(j - 2, float(items[j]))
hash_map[items[0]] = vector
return hash_map
def predict(self, instance: Instance) -> str:
"""
The predict method takes an Instance as an input. First it gets the size of prior distribution and loops this
size times. Then it gets the possible class labels and and calculates metric value. At the end, it returns the
class which has the maximum value of metric.
PARAMETERS
----------
instance : Instance
Instance to predict.
RETURNS
-------
str
The class which has the maximum value of metric.
"""
max_metric = -10000000
if isinstance(instance, CompositeInstance):
predicted_class = instance.getPossibleClassLabels()[0]
size = len(instance.getPossibleClassLabels())
else:
predicted_class = self.prior_distribution.getMaxItem()
size = len(self.prior_distribution)
for i in range(size):
if isinstance(instance, CompositeInstance):
Ci = instance.getPossibleClassLabels()[i]
else:
Ci = self.prior_distribution.getItem(i)
if self.prior_distribution.containsItem(Ci):
metric = self.calculateMetric(instance, Ci)
if metric > max_metric:
max_metric = metric
predicted_class = Ci
return predicted_class | PypiClean |
/Cibyl-1.0.0.0rc1.tar.gz/Cibyl-1.0.0.0rc1/cibyl/outputs/cli/ci/system/impls/base/colored.py | import logging
from overrides import overrides
from cibyl.cli.query import QueryType
from cibyl.models.ci.base.system import System
from cibyl.models.product.feature import Feature
from cibyl.outputs.cli.ci.system.common.features import is_features_query
from cibyl.outputs.cli.ci.system.printer import CISystemPrinter
from cibyl.outputs.cli.ci.system.utils.sorting.builds import SortBuildsByUUID
from cibyl.outputs.cli.ci.system.utils.sorting.jobs import SortJobsByName
from cibyl.outputs.cli.printer import ColoredPrinter
from cibyl.utils.colors import ColorPalette, DefaultPalette
from cibyl.utils.sorting import BubbleSortAlgorithm, SortingAlgorithm
from cibyl.utils.strings import IndentedTextBuilder
LOG = logging.getLogger(__name__)
class ColoredBaseSystemPrinter(ColoredPrinter, CISystemPrinter):
"""Default printer for all system models. This one is decorated with
colors for easier read.
"""
def __init__(self,
query: QueryType = QueryType.NONE,
verbosity: int = 0,
palette: ColorPalette = DefaultPalette(),
job_sorter: SortingAlgorithm
= BubbleSortAlgorithm(SortJobsByName()),
build_sorter: SortingAlgorithm
= BubbleSortAlgorithm(SortBuildsByUUID())):
"""Constructor. See parent for more information.
:param job_sorter: Determines the order on which jobs are printed.
:param build_sorter: Determines the order on which builds are printed.
"""
super().__init__(query, verbosity, palette)
self._job_sorter = job_sorter
self._build_sorter = build_sorter
@overrides
def print_system(self, system: System) -> str:
printer = IndentedTextBuilder()
printer.add(self._palette.blue('System: '), 0)
printer[-1].append(system.name.value)
if self.verbosity > 0:
printer[-1].append(f' (type: {system.system_type.value})')
if is_features_query(self.query):
for feature in system.features.values():
printer.add(self.print_feature(feature), 1)
return printer.build()
def print_feature(self, feature: Feature) -> str:
printer = IndentedTextBuilder()
name = feature.name.value
present = feature.present.value
printer.add(self.palette.blue(f'{name} feature: '), 0)
printer[-1].append(present)
return printer.build() | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/extensions/TeX/AMSmath.js | MathJax.Extension["TeX/AMSmath"]={version:"2.7.9",number:0,startNumber:0,IDs:{},eqIDs:{},labels:{},eqlabels:{},refs:[]};MathJax.Hub.Register.StartupHook("TeX Jax Ready",function(){var b=MathJax.ElementJax.mml,h=MathJax.InputJax.TeX,g=MathJax.Extension["TeX/AMSmath"];var d=h.Definitions,f=h.Stack.Item,a=h.config.equationNumbers;var c=function(k){var n=[];for(var l=0,j=k.length;l<j;l++){n[l]=h.Parse.prototype.Em(k[l])}return n.join(" ")};var e=(document.getElementsByTagName("base").length===0)?"":String(document.location).replace(/#.*$/,"");d.Add({mathchar0mo:{iiiint:["2A0C",{texClass:b.TEXCLASS.OP}]},macros:{mathring:["Accent","2DA"],nobreakspace:"Tilde",negmedspace:["Spacer",b.LENGTH.NEGATIVEMEDIUMMATHSPACE],negthickspace:["Spacer",b.LENGTH.NEGATIVETHICKMATHSPACE],idotsint:["MultiIntegral","\\int\\cdots\\int"],dddot:["Accent","20DB"],ddddot:["Accent","20DC"],sideset:["Macro","\\mathop{\\mathop{\\rlap{\\phantom{#3}}}\\nolimits#1\\!\\mathop{#3}\\nolimits#2}",3],boxed:["Macro","\\fbox{$\\displaystyle{#1}$}",1],tag:"HandleTag",notag:"HandleNoTag",label:"HandleLabel",ref:"HandleRef",eqref:["HandleRef",true],substack:["Macro","\\begin{subarray}{c}#1\\end{subarray}",1],injlim:["NamedOp","inj lim"],projlim:["NamedOp","proj lim"],varliminf:["Macro","\\mathop{\\underline{\\mmlToken{mi}{lim}}}"],varlimsup:["Macro","\\mathop{\\overline{\\mmlToken{mi}{lim}}}"],varinjlim:["Macro","\\mathop{\\underrightarrow{\\mmlToken{mi}{lim}}}"],varprojlim:["Macro","\\mathop{\\underleftarrow{\\mmlToken{mi}{lim}}}"],DeclareMathOperator:"HandleDeclareOp",operatorname:"HandleOperatorName",SkipLimits:"SkipLimits",genfrac:"Genfrac",frac:["Genfrac","","","",""],tfrac:["Genfrac","","","",1],dfrac:["Genfrac","","","",0],binom:["Genfrac","(",")","0",""],tbinom:["Genfrac","(",")","0",1],dbinom:["Genfrac","(",")","0",0],cfrac:"CFrac",shoveleft:["HandleShove",b.ALIGN.LEFT],shoveright:["HandleShove",b.ALIGN.RIGHT],xrightarrow:["xArrow",8594,5,6],xleftarrow:["xArrow",8592,7,3]},environment:{align:["AMSarray",null,true,true,"rlrlrlrlrlrl",c([0,2,0,2,0,2,0,2,0,2,0])],"align*":["AMSarray",null,false,true,"rlrlrlrlrlrl",c([0,2,0,2,0,2,0,2,0,2,0])],multline:["Multline",null,true],"multline*":["Multline",null,false],split:["AMSarray",null,false,false,"rl",c([0])],gather:["AMSarray",null,true,true,"c"],"gather*":["AMSarray",null,false,true,"c"],alignat:["AlignAt",null,true,true],"alignat*":["AlignAt",null,false,true],alignedat:["AlignAt",null,false,false],aligned:["AlignedAMSArray",null,null,null,"rlrlrlrlrlrl",c([0,2,0,2,0,2,0,2,0,2,0]),".5em","D"],gathered:["AlignedAMSArray",null,null,null,"c",null,".5em","D"],subarray:["Array",null,null,null,null,c([0]),"0.1em","S",1],smallmatrix:["Array",null,null,null,"c",c([1/3]),".2em","S",1],equation:["EquationBegin","Equation",true],"equation*":["EquationBegin","EquationStar",false],eqnarray:["AMSarray",null,true,true,"rcl","0 "+b.LENGTH.THICKMATHSPACE,".5em"],"eqnarray*":["AMSarray",null,false,true,"rcl","0 "+b.LENGTH.THICKMATHSPACE,".5em"]},delimiter:{"\\lvert":["007C",{texClass:b.TEXCLASS.OPEN}],"\\rvert":["007C",{texClass:b.TEXCLASS.CLOSE}],"\\lVert":["2016",{texClass:b.TEXCLASS.OPEN}],"\\rVert":["2016",{texClass:b.TEXCLASS.CLOSE}]}},null,true);h.Parse.Augment({HandleTag:function(k){var m=this.GetStar();var j=this.trimSpaces(this.GetArgument(k)),i=j;if(!m){j=a.formatTag(j)}var l=this.stack.global;l.tagID=i;if(l.notags){h.Error(["CommandNotAllowedInEnv","%1 not allowed in %2 environment",k,l.notags])}if(l.tag){h.Error(["MultipleCommand","Multiple %1",k])}l.tag=b.mtd.apply(b,this.InternalMath(j)).With({id:a.formatID(i)})},HandleNoTag:function(i){if(this.stack.global.tag){delete this.stack.global.tag}this.stack.global.notag=true},HandleLabel:function(j){var k=this.stack.global,i=this.GetArgument(j);if(i===""){return}if(!g.refUpdate){if(k.label){h.Error(["MultipleCommand","Multiple %1",j])}k.label=i;if(g.labels[i]||g.eqlabels[i]){h.Error(["MultipleLabel","Label '%1' multiply defined",i])}g.eqlabels[i]={tag:"???",id:""}}},HandleRef:function(k,m){var j=this.GetArgument(k);var l=g.labels[j]||g.eqlabels[j];if(!l){l={tag:"???",id:""};g.badref=!g.refUpdate}var i=l.tag;if(m){i=a.formatTag(i)}this.Push(b.mrow.apply(b,this.InternalMath(i)).With({href:a.formatURL(l.id,e),"class":"MathJax_ref"}))},HandleDeclareOp:function(j){var i=(this.GetStar()?"":"\\nolimits\\SkipLimits");var k=this.trimSpaces(this.GetArgument(j));if(k.charAt(0)=="\\"){k=k.substr(1)}var l=this.GetArgument(j);if(!l.match(/\\text/)){l=l.replace(/\*/g,"\\text{*}").replace(/-/g,"\\text{-}")}this.setDef(k,["Macro","\\mathop{\\rm "+l+"}"+i])},HandleOperatorName:function(j){var i=(this.GetStar()?"":"\\nolimits\\SkipLimits");var k=this.trimSpaces(this.GetArgument(j));if(!k.match(/\\text/)){k=k.replace(/\*/g,"\\text{*}").replace(/-/g,"\\text{-}")}this.string="\\mathop{\\rm "+k+"}"+i+" "+this.string.slice(this.i);this.i=0},SkipLimits:function(j){var l=this.GetNext(),k=this.i;if(l==="\\"&&++this.i&&this.GetCS()!=="limits"){this.i=k}},HandleShove:function(j,i){var k=this.stack.Top();if(k.type!=="multline"){h.Error(["CommandInMultline","%1 can only appear within the multline environment",j])}if(k.data.length){h.Error(["CommandAtTheBeginingOfLine","%1 must come at the beginning of the line",j])}k.data.shove=i},CFrac:function(l){var i=this.trimSpaces(this.GetBrackets(l,"")),k=this.GetArgument(l),m=this.GetArgument(l);var j=b.mfrac(h.Parse("\\strut\\textstyle{"+k+"}",this.stack.env).mml(),h.Parse("\\strut\\textstyle{"+m+"}",this.stack.env).mml());i=({l:b.ALIGN.LEFT,r:b.ALIGN.RIGHT,"":""})[i];if(i==null){h.Error(["IllegalAlign","Illegal alignment specified in %1",l])}if(i){j.numalign=j.denomalign=i}this.Push(j)},Genfrac:function(j,l,q,n,i){if(l==null){l=this.GetDelimiterArg(j)}if(q==null){q=this.GetDelimiterArg(j)}if(n==null){n=this.GetArgument(j)}if(i==null){i=this.trimSpaces(this.GetArgument(j))}var m=this.ParseArg(j);var p=this.ParseArg(j);var k=b.mfrac(m,p);if(n!==""){k.linethickness=n}if(l||q){k=h.fixedFence(l,k.With({texWithDelims:true}),q)}if(i!==""){var o=(["D","T","S","SS"])[i];if(o==null){h.Error(["BadMathStyleFor","Bad math style for %1",j])}k=b.mstyle(k);if(o==="D"){k.displaystyle=true;k.scriptlevel=0}else{k.displaystyle=false;k.scriptlevel=i-1}}this.Push(k)},Multline:function(j,i){this.Push(j);this.checkEqnEnv();return f.multline(i,this.stack).With({arraydef:{displaystyle:true,rowspacing:".5em",width:h.config.MultLineWidth,columnwidth:"100%",side:h.config.TagSide,minlabelspacing:h.config.TagIndent}})},AMSarray:function(k,j,i,m,l){this.Push(k);if(i){this.checkEqnEnv()}m=m.replace(/[^clr]/g,"").split("").join(" ");m=m.replace(/l/g,"left").replace(/r/g,"right").replace(/c/g,"center");return f.AMSarray(k.name,j,i,this.stack).With({arraydef:{displaystyle:true,rowspacing:".5em",columnalign:m,columnspacing:(l||"1em"),rowspacing:"3pt",side:h.config.TagSide,minlabelspacing:h.config.TagIndent}})},AlignedAMSArray:function(i){var j=this.GetBrackets("\\begin{"+i.name+"}");return this.setArrayAlign(this.AMSarray.apply(this,arguments),j)},AlignAt:function(l,j,i){var q,k,p="",o=[];if(!i){k=this.GetBrackets("\\begin{"+l.name+"}")}q=this.GetArgument("\\begin{"+l.name+"}");if(q.match(/[^0-9]/)){h.Error(["PositiveIntegerArg","Argument to %1 must me a positive integer","\\begin{"+l.name+"}"])}while(q>0){p+="rl";o.push("0em 0em");q--}o=o.join(" ");if(i){return this.AMSarray(l,j,i,p,o)}var m=this.AMSarray(l,j,i,p,o);return this.setArrayAlign(m,k)},EquationBegin:function(i,j){this.checkEqnEnv();this.stack.global.forcetag=(j&&a.autoNumber!=="none");return i},EquationStar:function(i,j){this.stack.global.tagged=true;return j},checkEqnEnv:function(){if(this.stack.global.eqnenv){h.Error(["ErroneousNestingEq","Erroneous nesting of equation structures"])}this.stack.global.eqnenv=true},MultiIntegral:function(j,m){var l=this.GetNext();if(l==="\\"){var k=this.i;l=this.GetArgument(j);this.i=k;if(l==="\\limits"){if(j==="\\idotsint"){m="\\!\\!\\mathop{\\,\\,"+m+"}"}else{m="\\!\\!\\!\\mathop{\\,\\,\\,"+m+"}"}}}this.string=m+" "+this.string.slice(this.i);this.i=0},xArrow:function(k,o,n,i){var m={width:"+"+(n+i)+"mu",lspace:n+"mu"};var p=this.GetBrackets(k),q=this.ParseArg(k);var s=b.mo(b.chars(String.fromCharCode(o))).With({stretchy:true,texClass:b.TEXCLASS.REL});var j=b.munderover(s);j.SetData(j.over,b.mpadded(q).With(m).With({voffset:".15em"}));if(p){p=h.Parse(p,this.stack.env).mml();j.SetData(j.under,b.mpadded(p).With(m).With({voffset:"-.24em"}))}this.Push(j.With({subsupOK:true}))},GetDelimiterArg:function(i){var j=this.trimSpaces(this.GetArgument(i));if(j==""){return null}if(j in d.delimiter){return j}h.Error(["MissingOrUnrecognizedDelim","Missing or unrecognized delimiter for %1",i])},GetStar:function(){var i=(this.GetNext()==="*");if(i){this.i++}return i}});f.Augment({autoTag:function(){var j=this.global;if(!j.notag){g.number++;j.tagID=a.formatNumber(g.number.toString());var i=h.Parse("\\text{"+a.formatTag(j.tagID)+"}",{}).mml();j.tag=b.mtd(i).With({id:a.formatID(j.tagID)})}},getTag:function(){var m=this.global,k=m.tag;m.tagged=true;if(m.label){if(a.useLabelIds){k.id=a.formatID(m.label)}g.eqlabels[m.label]={tag:m.tagID,id:k.id}}if(document.getElementById(k.id)||g.IDs[k.id]||g.eqIDs[k.id]){var l=0,j;do{l++;j=k.id+"_"+l}while(document.getElementById(j)||g.IDs[j]||g.eqIDs[j]);k.id=j;if(m.label){g.eqlabels[m.label].id=j}}g.eqIDs[k.id]=1;this.clearTag();return k},clearTag:function(){var i=this.global;delete i.tag;delete i.tagID;delete i.label},fixInitialMO:function(l){for(var k=0,j=l.length;k<j;k++){if(l[k]&&(l[k].type!=="mspace"&&(l[k].type!=="texatom"||(l[k].data[0]&&l[k].data[0].data.length)))){if(l[k].isEmbellished()||(l[k].type==="texatom"&&l[k].texClass===b.TEXCLASS.REL)){l.unshift(b.mi())}break}}}});f.multline=f.array.Subclass({type:"multline",Init:function(j,i){this.SUPER(arguments).Init.apply(this);this.numbered=(j&&a.autoNumber!=="none");this.save={notag:i.global.notag};i.global.tagged=!j&&!i.global.forcetag},EndEntry:function(){if(this.table.length){this.fixInitialMO(this.data)}var i=b.mtd.apply(b,this.data);if(this.data.shove){i.columnalign=this.data.shove}this.row.push(i);this.data=[]},EndRow:function(){if(this.row.length!=1){h.Error(["MultlineRowsOneCol","The rows within the %1 environment must have exactly one column","multline"])}this.table.push(this.row);this.row=[]},EndTable:function(){this.SUPER(arguments).EndTable.call(this);if(this.table.length){var k=this.table.length-1,n,l=-1;if(!this.table[0][0].columnalign){this.table[0][0].columnalign=b.ALIGN.LEFT}if(!this.table[k][0].columnalign){this.table[k][0].columnalign=b.ALIGN.RIGHT}if(!this.global.tag&&this.numbered){this.autoTag()}if(this.global.tag&&!this.global.notags){l=(this.arraydef.side==="left"?0:this.table.length-1);this.table[l]=[this.getTag()].concat(this.table[l])}for(n=0,k=this.table.length;n<k;n++){var j=(n===l?b.mlabeledtr:b.mtr);this.table[n]=j.apply(b,this.table[n])}}this.global.notag=this.save.notag}});f.AMSarray=f.array.Subclass({type:"AMSarray",Init:function(l,k,j,i){this.SUPER(arguments).Init.apply(this);this.numbered=(k&&a.autoNumber!=="none");this.save={notags:i.global.notags,notag:i.global.notag};i.global.notags=(j?null:l);i.global.tagged=!k&&j&&!i.global.forcetag},EndEntry:function(){if(this.row.length%2===1){this.fixInitialMO(this.data)}this.row.push(b.mtd.apply(b,this.data));this.data=[]},EndRow:function(){var i=b.mtr;if(!this.global.tag&&this.numbered){this.autoTag()}if(!this.global.notags){if(this.global.tag){this.row=[this.getTag()].concat(this.row);i=b.mlabeledtr}else{this.clearTag()}}if(this.numbered){delete this.global.notag}this.table.push(i.apply(b,this.row));this.row=[]},EndTable:function(){this.SUPER(arguments).EndTable.call(this);this.global.notags=this.save.notags;this.global.notag=this.save.notag}});f.start.Augment({oldCheckItem:f.start.prototype.checkItem,checkItem:function(k){if(k.type==="stop"){var i=this.mmlData(),j=this.global;if(g.display&&!j.tag&&!j.tagged&&!j.isInner&&(a.autoNumber==="all"||j.forcetag)){this.autoTag()}if(j.tag){var m=[this.getTag(),b.mtd(i)];var l={side:h.config.TagSide,minlabelspacing:h.config.TagIndent,displaystyle:"inherit"};i=b.mtable(b.mlabeledtr.apply(b,m)).With(l)}return f.mml(i)}return this.oldCheckItem.call(this,k)}});h.prefilterHooks.Add(function(i){g.display=i.display;g.number=g.startNumber;g.eqlabels={};g.eqIDs={};g.badref=false;if(g.refUpdate){g.number=i.script.MathJax.startNumber}});h.postfilterHooks.Add(function(i){i.script.MathJax.startNumber=g.startNumber;g.startNumber=g.number;MathJax.Hub.Insert(g.IDs,g.eqIDs);MathJax.Hub.Insert(g.labels,g.eqlabels);if(g.badref&&!i.math.texError){g.refs.push(i.script)}},100);MathJax.Hub.Register.MessageHook("Begin Math Input",function(){g.refs=[];g.refUpdate=false});MathJax.Hub.Register.MessageHook("End Math Input",function(l){if(g.refs.length){g.refUpdate=true;for(var k=0,j=g.refs.length;k<j;k++){g.refs[k].MathJax.state=MathJax.ElementJax.STATE.UPDATE}return MathJax.Hub.processInput({scripts:g.refs,start:new Date().getTime(),i:0,j:0,jax:{},jaxIDs:[]})}return null});h.resetEquationNumbers=function(j,i){g.startNumber=(j||0);if(!i){g.labels={};g.IDs={}}};MathJax.Hub.Startup.signal.Post("TeX AMSmath Ready")});MathJax.Ajax.loadComplete("[MathJax]/extensions/TeX/AMSmath.js"); | PypiClean |
/NAT-PMP-1.3.2.tar.gz/NAT-PMP-1.3.2/README.rst | .. image:: https://img.shields.io/pypi/v/NAT-PMP.svg
:target: `PyPI link`_
.. image:: https://img.shields.io/pypi/pyversions/NAT-PMP.svg
:target: `PyPI link`_
.. _PyPI link: https://pypi.org/project/NAT-PMP
.. image:: https://github.com/jaraco/NAT-PMP/workflows/tests/badge.svg
:target: https://github.com/jaraco/NAT-PMP/actions?query=workflow%3A%22tests%22
:alt: tests
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/psf/black
:alt: Code style: Black
.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
Provides functions to interact with NAT-PMP gateways implementing version 0
of the NAT-PMP draft specification.
Forked from `py-natpmp <https://github.com/yimingliu/py-natpmp>`_ by
Yiming Liu. See `this blog <http://blog.yimingliu.com/2008/01/07/nat-pmp-client-library-for-python>`_
for more background.
Introduction
============
py-natpmp is a NAT-PMP (Network Address Translation Port Mapping Protocol) library and testing client in Python. The client allows you to set up dynamic port mappings on NAT-PMP compatible routers. Thus this is a means for dynamic NAT traversal with routers that talk NAT-PMP. In practical terms, this is basically limited to the newer Apple AirPort base stations and the AirPort Express, which have support for this protocol.
In any case, this library puts a thin layer of Python abstraction over the NAT-PMP protocol, version 0, as specified by the NAT-PMP draft standard.
Library
=======
The library provides a set of high-level and low-level functions to interact via the NAT-PMP protocol. The functions map_port and get_public_address provide the two high-level functions offered by NAT-PMP. Responses are stored as Python objects.
Client
======
To use the client, grab it and the above library. Make sure you have the library in the same directory as the client script or otherwise on your Python instance’s sys.path. Invoke the client on the command-line (Terminal.app) as ``python -m natpmp [-u] [-l lifetime] [-g gateway_addr] public_port private_port``.
For example:
``python -m natpmp -u -l 1800 60009 60009``
Create a mapping for the public UDP port 60009 to the private UDP port 60009 for 1,800 seconds (30 minutes)
``python -m natpmp 60010 60010``
Create a mapping for the public TCP port 60010 to the private TCP port 60010
``python -m natpmp -g 10.0.1.1 60011 60022``
Explicitly instruct the gateway router 10.0.1.1 to create the TCP mapping from 60010 to 60022
Remember to turn off your firewall for those ports that you map.
Caveats
=======
This is an incomplete implementation of the specification. When the router reboots, all dynamic mappings are lost. The specification provides for notification packets to be sent by the router to each client when this happens. There is no support in this library and client to monitor for such notifications, nor does it implement a daemon process to do so. The specification recommends queuing requests – that is, all NAT-PMP interactions should happen serially. This simple library does not queue requests – if you abuse it with multithreading, it will send those requests in parallel and possibly overwhelm the router.
The library will attempt to auto-detect your NAT gateway. This is done via a popen to netstat on BSDs/Darwin and ip on Linux. This is likely to fail miserably, depending on how standard the output is. In the library, a keyword argument is provided to override the default and specify your own gateway address. In the client, use the -g switch to manually specify your gateway.
| PypiClean |
/Mezzanine-6.0.0.tar.gz/Mezzanine-6.0.0/mezzanine/core/static/mezzanine/tinymce/langs/nb_NO.js | tinymce.addI18n('nb_NO',{
"Cut": "Klipp ut",
"Heading 5": "Overskrift 5",
"Header 2": "Overskrift 2",
"Your browser doesn't support direct access to the clipboard. Please use the Ctrl+X\/C\/V keyboard shortcuts instead.": "Nettleseren din st\u00f8tter ikke direkte tilgang til utklippsboken. Bruk istedet tastatur-snarveiene Ctrl+X\/C\/V, eller Cmd+X\/C\/V p\u00e5 Mac.",
"Heading 4": "Overskrift 4",
"Div": "Delblokk <div>",
"Heading 2": "Overskrift 2",
"Paste": "Lim inn",
"Close": "Lukk",
"Font Family": "Skriftsnitt",
"Pre": "Definert <pre>",
"Align right": "H\u00f8yrejustert",
"New document": "Nytt dokument",
"Blockquote": "Sitatblokk <blockquote>",
"Numbered list": "Nummerliste",
"Heading 1": "Overskrift 1",
"Headings": "Overskrifter",
"Increase indent": "\u00d8k innrykk",
"Formats": "Stiler",
"Headers": "Overskrifter",
"Select all": "Marker alt",
"Header 3": "Overskrift 3",
"Blocks": "Blokker",
"Undo": "Angre",
"Strikethrough": "Gjennomstreket",
"Bullet list": "Punktliste",
"Header 1": "Overskrift 1",
"Superscript": "Hevet skrift",
"Clear formatting": "Fjern formateringer",
"Font Sizes": "St\u00f8rrelse",
"Subscript": "Senket skrift",
"Header 6": "Overskrift 6",
"Redo": "Utf\u00f8r likevel",
"Paragraph": "Avsnitt <p>",
"Ok": "OK",
"Bold": "Halvfet",
"Code": "Kode <code>",
"Italic": "Kursiv",
"Align center": "Midtstilt",
"Header 5": "Overskrift 5",
"Heading 6": "Overskrift 6",
"Heading 3": "Overskrift 3",
"Decrease indent": "Reduser innrykk",
"Header 4": "Overskrift 4",
"Paste is now in plain text mode. Contents will now be pasted as plain text until you toggle this option off.": "Lim inn er n\u00e5 i ren-tekst modus. Kopiert innhold vil bli limt inn som ren tekst inntil du sl\u00e5r av dette valget.",
"Underline": "Understreket",
"Cancel": "Avbryt",
"Justify": "Juster alle linjer",
"Inline": "Innkapslet <span>",
"Copy": "Kopier",
"Align left": "Venstrejustert",
"Visual aids": "Visuelle hjelpemidler",
"Lower Greek": "Greske minuskler",
"Square": "Fylt firkant",
"Default": "Normal",
"Lower Alpha": "Minuskler",
"Circle": "\u00c5pen sirkel",
"Disc": "Fylt sirkel",
"Upper Alpha": "Versaler",
"Upper Roman": "Romerske versaler",
"Lower Roman": "Romerske minuskler",
"Name": "Navn",
"Anchor": "Anker",
"You have unsaved changes are you sure you want to navigate away?": "Du har ikke arkivert endringene. Vil du fortsette uten \u00e5 arkivere?",
"Restore last draft": "Gjenopprett siste utkast",
"Special character": "Spesialtegn",
"Source code": "Kildekode",
"Color": "Farge",
"Right to left": "H\u00f8yre til venstre",
"Left to right": "Venstre til h\u00f8yre",
"Emoticons": "Hum\u00f8rfjes",
"Robots": "Roboter",
"Document properties": "Dokumentegenskaper",
"Title": "Tittel",
"Keywords": "N\u00f8kkelord",
"Encoding": "Tegnkoding",
"Description": "Beskrivelse",
"Author": "Forfatter",
"Fullscreen": "Fullskjerm",
"Horizontal line": "Horisontal linje",
"Horizontal space": "Horisontal marg",
"B": "B",
"Insert\/edit image": "Sett inn\/endre bilde",
"General": "Generelt",
"Advanced": "Avansert",
"G": "G",
"R": "R",
"Source": "Bildelenke",
"Border": "Ramme",
"Constrain proportions": "Behold proporsjoner",
"Vertical space": "Vertikal marg",
"Image description": "Bildebeskrivelse",
"Style": "Stil",
"Dimensions": "Dimensjoner",
"Insert image": "Sett inn bilde",
"Insert date\/time": "Sett inn dato\/tid",
"Remove link": "Fjern lenke",
"Url": "Url",
"Text to display": "Tekst som skal vises",
"Anchors": "Anker",
"Insert link": "Sett inn lenke",
"New window": "Nytt vindu",
"None": "Ingen",
"The URL you entered seems to be an external link. Do you want to add the required http:\/\/ prefix?": "Oppgitt URL ser ut til \u00e5 v\u00e6re en e-postadresse. \u00d8nsker du \u00e5 sette inn p\u00e5krevd mailto:-prefiks foran e-postadressen?",
"Target": "M\u00e5l",
"The URL you entered seems to be an email address. Do you want to add the required mailto: prefix?": "Oppgitte URL ser ut til \u00e5 v\u00e6re en epost-adresse. \u00d8nsker du \u00e5 sette inn p\u00e5krevet mailto: prefiks forran epost-adressen?",
"Insert\/edit link": "Sett inn\/endre lenke",
"Insert\/edit video": "Sett inn\/rediger video",
"Poster": "Plakatbilde",
"Alternative source": "Alternativ kilde",
"Paste your embed code below:": "Lim inn inkluderings-koden nedenfor",
"Insert video": "Sett inn video",
"Embed": "Inkluder",
"Nonbreaking space": "Hardt mellomrom",
"Page break": "Sideskifte",
"Paste as text": "Lim inn som tekst",
"Preview": "Forh\u00e5ndsvisning",
"Print": "Skriv ut",
"Save": "Arkiver",
"Could not find the specified string.": "Kunne ikke finne den spesifiserte teksten",
"Replace": "Erstatt",
"Next": "Neste",
"Whole words": "Hele ord",
"Find and replace": "Finn og erstatt",
"Replace with": "Erstatt med",
"Find": "Finn",
"Replace all": "Erstatt alle",
"Match case": "Match store og sm\u00e5 bokstaver",
"Prev": "Forrige",
"Spellcheck": "Stavekontroll",
"Finish": "Avslutt",
"Ignore all": "Ignorer alle",
"Ignore": "Ignorer",
"Add to Dictionary": "Legg til i ordliste",
"Insert row before": "Sett inn rad f\u00f8r",
"Rows": "Rader",
"Height": "H\u00f8yde",
"Paste row after": "Lim inn rad etter",
"Alignment": "Justering",
"Border color": "Rammefarge",
"Column group": "Kolonnegruppe",
"Row": "Rad",
"Insert column before": "Sett inn kolonne f\u00f8r",
"Split cell": "Splitt celle",
"Cell padding": "Cellemarg",
"Cell spacing": "Celleavstand",
"Row type": "Rad-type",
"Insert table": "Sett inn tabell",
"Body": "Br\u00f8dtekst",
"Caption": "Tittel",
"Footer": "Bunntekst",
"Delete row": "Slett rad",
"Paste row before": "Lim inn rad f\u00f8r",
"Scope": "Omfang",
"Delete table": "Slett tabell",
"H Align": "H Justering",
"Top": "Topp",
"Header cell": "Topptekst-celle",
"Column": "Kolonne",
"Row group": "Radgruppe",
"Cell": "Celle",
"Middle": "Midten",
"Cell type": "Celletype",
"Copy row": "Kopier rad",
"Row properties": "Rad egenskaper",
"Table properties": "Tabell egenskaper",
"Bottom": "Bunn",
"V Align": "V Justering",
"Header": "Topptekst",
"Right": "H\u00f8yre",
"Insert column after": "Sett inn kolonne etter",
"Cols": "Kolonner",
"Insert row after": "Sett in rad etter",
"Width": "Bredde",
"Cell properties": "Celle egenskaper",
"Left": "Venstre",
"Cut row": "Klipp ut rad",
"Delete column": "Slett kolonne",
"Center": "Midtstilt",
"Merge cells": "Sl\u00e5 sammen celler",
"Insert template": "Sett inn mal",
"Templates": "Maler",
"Background color": "Bakgrunnsfarge",
"Custom...": "Tilpass...",
"Custom color": "Tilpasset farge",
"No color": "Ingen farge",
"Text color": "Tekstfarge",
"Show blocks": "Vis blokker",
"Show invisible characters": "Vis skjulte tegn",
"Words: {0}": "Antall ord: {0}",
"Insert": "Sett inn",
"File": "Arkiv",
"Edit": "Rediger",
"Rich Text Area. Press ALT-F9 for menu. Press ALT-F10 for toolbar. Press ALT-0 for help": "Tekstredigering. Tast ALT-F9 for meny. Tast ALT-F10 for verkt\u00f8ys-rader. Tast ALT-0 for hjelp.",
"Tools": "Verkt\u00f8y",
"View": "Vis",
"Table": "Tabell",
"Format": "Format"
}); | PypiClean |
/Muntjac-1.1.2.tar.gz/Muntjac-1.1.2/muntjac/public/VAADIN/widgetsets/org.muntiacus.MuntjacWidgetSet/mode/rust/rust.js | CodeMirror.defineMode("rust", function() {
var indentUnit = 4, altIndentUnit = 2;
var valKeywords = {
"if": "if-style", "while": "if-style", "else": "else-style",
"do": "else-style", "ret": "else-style", "fail": "else-style",
"break": "atom", "cont": "atom", "const": "let", "resource": "fn",
"let": "let", "fn": "fn", "for": "for", "alt": "alt", "obj": "fn",
"lambda": "fn", "type": "type", "tag": "tag", "mod": "mod",
"as": "op", "true": "atom", "false": "atom", "assert": "op", "check": "op",
"claim": "op", "native": "ignore", "unsafe": "ignore", "import": "else-style",
"export": "else-style", "copy": "op", "log": "op", "log_err": "op",
"use": "op", "bind": "op"
};
var typeKeywords = function() {
var keywords = {"fn": "fn", "block": "fn", "obj": "obj"};
var atoms = "bool uint int i8 i16 i32 i64 u8 u16 u32 u64 float f32 f64 str char".split(" ");
for (var i = 0, e = atoms.length; i < e; ++i) keywords[atoms[i]] = "atom";
return keywords;
}();
var operatorChar = /[+\-*&%=<>!?|\.@]/;
// Tokenizer
// Used as scratch variable to communicate multiple values without
// consing up tons of objects.
var tcat, content;
function r(tc, style) {
tcat = tc;
return style;
}
function tokenBase(stream, state) {
var ch = stream.next();
if (ch == '"') {
state.tokenize = tokenString;
return state.tokenize(stream, state);
}
if (ch == "'") {
tcat = "atom";
if (stream.eat("\\")) {
if (stream.skipTo("'")) { stream.next(); return "string"; }
else { return "error"; }
} else {
stream.next();
return stream.eat("'") ? "string" : "error";
}
}
if (ch == "/") {
if (stream.eat("/")) { stream.skipToEnd(); return "comment"; }
if (stream.eat("*")) {
state.tokenize = tokenComment(1);
return state.tokenize(stream, state);
}
}
if (ch == "#") {
if (stream.eat("[")) { tcat = "open-attr"; return null; }
stream.eatWhile(/\w/);
return r("macro", "meta");
}
if (ch == ":" && stream.match(":<")) {
return r("op", null);
}
if (ch.match(/\d/) || (ch == "." && stream.eat(/\d/))) {
var flp = false;
if (!stream.match(/^x[\da-f]+/i) && !stream.match(/^b[01]+/)) {
stream.eatWhile(/\d/);
if (stream.eat(".")) { flp = true; stream.eatWhile(/\d/); }
if (stream.match(/^e[+\-]?\d+/i)) { flp = true; }
}
if (flp) stream.match(/^f(?:32|64)/);
else stream.match(/^[ui](?:8|16|32|64)/);
return r("atom", "number");
}
if (ch.match(/[()\[\]{}:;,]/)) return r(ch, null);
if (ch == "-" && stream.eat(">")) return r("->", null);
if (ch.match(operatorChar)) {
stream.eatWhile(operatorChar);
return r("op", null);
}
stream.eatWhile(/\w/);
content = stream.current();
if (stream.match(/^::\w/)) {
stream.backUp(1);
return r("prefix", "variable-2");
}
if (state.keywords.propertyIsEnumerable(content))
return r(state.keywords[content], content.match(/true|false/) ? "atom" : "keyword");
return r("name", "variable");
}
function tokenString(stream, state) {
var ch, escaped = false;
while (ch = stream.next()) {
if (ch == '"' && !escaped) {
state.tokenize = tokenBase;
return r("atom", "string");
}
escaped = !escaped && ch == "\\";
}
// Hack to not confuse the parser when a string is split in
// pieces.
return r("op", "string");
}
function tokenComment(depth) {
return function(stream, state) {
var lastCh = null, ch;
while (ch = stream.next()) {
if (ch == "/" && lastCh == "*") {
if (depth == 1) {
state.tokenize = tokenBase;
break;
} else {
state.tokenize = tokenComment(depth - 1);
return state.tokenize(stream, state);
}
}
if (ch == "*" && lastCh == "/") {
state.tokenize = tokenComment(depth + 1);
return state.tokenize(stream, state);
}
lastCh = ch;
}
return "comment";
};
}
// Parser
var cx = {state: null, stream: null, marked: null, cc: null};
function pass() {
for (var i = arguments.length - 1; i >= 0; i--) cx.cc.push(arguments[i]);
}
function cont() {
pass.apply(null, arguments);
return true;
}
function pushlex(type, info) {
var result = function() {
var state = cx.state;
state.lexical = {indented: state.indented, column: cx.stream.column(),
type: type, prev: state.lexical, info: info};
};
result.lex = true;
return result;
}
function poplex() {
var state = cx.state;
if (state.lexical.prev) {
if (state.lexical.type == ")")
state.indented = state.lexical.indented;
state.lexical = state.lexical.prev;
}
}
function typecx() { cx.state.keywords = typeKeywords; }
function valcx() { cx.state.keywords = valKeywords; }
poplex.lex = typecx.lex = valcx.lex = true;
function commasep(comb, end) {
function more(type) {
if (type == ",") return cont(comb, more);
if (type == end) return cont();
return cont(more);
}
return function(type) {
if (type == end) return cont();
return pass(comb, more);
};
}
function block(type) {
if (type == "}") return cont();
if (type == "let") return cont(pushlex("stat", "let"), letdef1, poplex, block);
if (type == "fn") return cont(pushlex("stat"), fndef, poplex, block);
if (type == "type") return cont(pushlex("stat"), tydef, endstatement, poplex, block);
if (type == "tag") return cont(pushlex("stat"), tagdef, poplex, block);
if (type == "mod") return cont(pushlex("stat"), mod, poplex, block);
if (type == "open-attr") return cont(pushlex("]"), commasep(expression, "]"), poplex);
if (type == "ignore" || type.match(/[\]\);,]/)) return cont(block);
return pass(pushlex("stat"), expression, poplex, endstatement, block);
}
function endstatement(type) {
if (type == ";") return cont();
return pass();
}
function expression(type) {
if (type == "atom" || type == "name") return cont(maybeop);
if (type == "{") return cont(pushlex("}"), exprbrace, poplex);
if (type.match(/[\[\(]/)) return matchBrackets(type, expression);
if (type.match(/[\]\)\};,]/)) return pass();
if (type == "if-style") return cont(expression, expression);
if (type == "else-style" || type == "op") return cont(expression);
if (type == "for") return cont(pattern, maybetype, inop, expression, expression);
if (type == "alt") return cont(expression, altbody);
if (type == "fn") return cont(fndef);
if (type == "macro") return cont(macro);
return cont();
}
function maybeop(type) {
if (content == ".") return cont(maybeprop);
if (content == "::<"){return cont(typarams, maybeop);}
if (type == "op" || content == ":") return cont(expression);
if (type == "(" || type == "[") return matchBrackets(type, expression);
return pass();
}
function maybeprop(type) {
if (content.match(/^\w+$/)) {cx.marked = "variable"; return cont(maybeop);}
return pass(expression);
}
function exprbrace(type) {
if (type == "op") {
if (content == "|") return cont(blockvars, poplex, pushlex("}", "block"), block);
if (content == "||") return cont(poplex, pushlex("}", "block"), block);
}
if (content == "mutable" || (content.match(/^\w+$/) && cx.stream.peek() == ":"
&& !cx.stream.match("::", false)))
return pass(record_of(expression));
return pass(block);
}
function record_of(comb) {
function ro(type) {
if (content == "mutable" || content == "with") {cx.marked = "keyword"; return cont(ro);}
if (content.match(/^\w*$/)) {cx.marked = "variable"; return cont(ro);}
if (type == ":") return cont(comb, ro);
if (type == "}") return cont();
return cont(ro);
}
return ro;
}
function blockvars(type) {
if (type == "name") {cx.marked = "def"; return cont(blockvars);}
if (type == "op" && content == "|") return cont();
return cont(blockvars);
}
function letdef1(type) {
if (type.match(/[\]\)\};]/)) return cont();
if (content == "=") return cont(expression, letdef2);
if (type == ",") return cont(letdef1);
return pass(pattern, maybetype, letdef1);
}
function letdef2(type) {
if (type.match(/[\]\)\};,]/)) return pass(letdef1);
else return pass(expression, letdef2);
}
function maybetype(type) {
if (type == ":") return cont(typecx, rtype, valcx);
return pass();
}
function inop(type) {
if (type == "name" && content == "in") {cx.marked = "keyword"; return cont();}
return pass();
}
function fndef(type) {
if (type == "name") {cx.marked = "def"; return cont(fndef);}
if (content == "<") return cont(typarams, fndef);
if (type == "{") return pass(expression);
if (type == "(") return cont(pushlex(")"), commasep(argdef, ")"), poplex, fndef);
if (type == "->") return cont(typecx, rtype, valcx, fndef);
return cont(fndef);
}
function tydef(type) {
if (type == "name") {cx.marked = "def"; return cont(tydef);}
if (content == "<") return cont(typarams, tydef);
if (content == "=") return cont(typecx, rtype, valcx);
return cont(tydef);
}
function tagdef(type) {
if (type == "name") {cx.marked = "def"; return cont(tagdef);}
if (content == "<") return cont(typarams, tagdef);
if (content == "=") return cont(typecx, rtype, valcx, endstatement);
if (type == "{") return cont(pushlex("}"), typecx, tagblock, valcx, poplex);
return cont(tagdef);
}
function tagblock(type) {
if (type == "}") return cont();
if (type == "(") return cont(pushlex(")"), commasep(rtype, ")"), poplex, tagblock);
if (content.match(/^\w+$/)) cx.marked = "def";
return cont(tagblock);
}
function mod(type) {
if (type == "name") {cx.marked = "def"; return cont(mod);}
if (type == "{") return cont(pushlex("}"), block, poplex);
return pass();
}
function typarams(type) {
if (content == ">") return cont();
if (content == ",") return cont(typarams);
return pass(rtype, typarams);
}
function argdef(type) {
if (type == "name") {cx.marked = "def"; return cont(argdef);}
if (type == ":") return cont(typecx, rtype, valcx);
return pass();
}
function rtype(type) {
if (type == "name") {cx.marked = "variable-3"; return cont(rtypemaybeparam); }
if (content == "mutable") {cx.marked = "keyword"; return cont(rtype);}
if (type == "atom") return cont(rtypemaybeparam);
if (type == "op" || type == "obj") return cont(rtype);
if (type == "fn") return cont(fntype);
if (type == "{") return cont(pushlex("{"), record_of(rtype), poplex);
return matchBrackets(type, rtype);
}
function rtypemaybeparam(type) {
if (content == "<") return cont(typarams);
return pass();
}
function fntype(type) {
if (type == "(") return cont(pushlex("("), commasep(rtype, ")"), poplex, fntype);
if (type == "->") return cont(rtype);
return pass();
}
function pattern(type) {
if (type == "name") {cx.marked = "def"; return cont(patternmaybeop);}
if (type == "atom") return cont(patternmaybeop);
if (type == "op") return cont(pattern);
if (type.match(/[\]\)\};,]/)) return pass();
return matchBrackets(type, pattern);
}
function patternmaybeop(type) {
if (type == "op" && content == ".") return cont();
if (content == "to") {cx.marked = "keyword"; return cont(pattern);}
else return pass();
}
function altbody(type) {
if (type == "{") return cont(pushlex("}", "alt"), altblock1, poplex);
return pass();
}
function altblock1(type) {
if (type == "}") return cont();
if (type == "|") return cont(altblock1);
if (content == "when") {cx.marked = "keyword"; return cont(expression, altblock2);}
if (type.match(/[\]\);,]/)) return cont(altblock1);
return pass(pattern, altblock2);
}
function altblock2(type) {
if (type == "{") return cont(pushlex("}", "alt"), block, poplex, altblock1);
else return pass(altblock1);
}
function macro(type) {
if (type.match(/[\[\(\{]/)) return matchBrackets(type, expression);
return pass();
}
function matchBrackets(type, comb) {
if (type == "[") return cont(pushlex("]"), commasep(comb, "]"), poplex);
if (type == "(") return cont(pushlex(")"), commasep(comb, ")"), poplex);
if (type == "{") return cont(pushlex("}"), commasep(comb, "}"), poplex);
return cont();
}
function parse(state, stream, style) {
var cc = state.cc;
// Communicate our context to the combinators.
// (Less wasteful than consing up a hundred closures on every call.)
cx.state = state; cx.stream = stream; cx.marked = null, cx.cc = cc;
while (true) {
var combinator = cc.length ? cc.pop() : block;
if (combinator(tcat)) {
while(cc.length && cc[cc.length - 1].lex)
cc.pop()();
return cx.marked || style;
}
}
}
return {
startState: function() {
return {
tokenize: tokenBase,
cc: [],
lexical: {indented: -indentUnit, column: 0, type: "top", align: false},
keywords: valKeywords,
indented: 0
};
},
token: function(stream, state) {
if (stream.sol()) {
if (!state.lexical.hasOwnProperty("align"))
state.lexical.align = false;
state.indented = stream.indentation();
}
if (stream.eatSpace()) return null;
tcat = content = null;
var style = state.tokenize(stream, state);
if (style == "comment") return style;
if (!state.lexical.hasOwnProperty("align"))
state.lexical.align = true;
if (tcat == "prefix") return style;
if (!content) content = stream.current();
return parse(state, stream, style);
},
indent: function(state, textAfter) {
if (state.tokenize != tokenBase) return 0;
var firstChar = textAfter && textAfter.charAt(0), lexical = state.lexical,
type = lexical.type, closing = firstChar == type;
if (type == "stat") return lexical.indented + indentUnit;
if (lexical.align) return lexical.column + (closing ? 0 : 1);
return lexical.indented + (closing ? 0 : (lexical.info == "alt" ? altIndentUnit : indentUnit));
},
electricChars: "{}"
};
});
CodeMirror.defineMIME("text/x-rustsrc", "rust"); | PypiClean |
/GTW-1.2.6.tar.gz/GTW-1.2.6/__test__/rst_harness.py |
from __future__ import absolute_import, division, print_function, unicode_literals
from _GTW.__test__.Test_Command import *
from _TFL.Regexp import Multi_Re_Replacer, Re_Replacer, re
from posixpath import join as pp_join
import multiprocessing
import requests
import subprocess
import sys
import time
skip_headers = set (["connection", "set-cookie", "x-frame-options"])
date_cleaner = Multi_Re_Replacer \
( Re_Replacer
( r"'date' : '\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}'"
, r"'date' : <datetime>"
)
, Re_Replacer
( r"'(default_value|date)' : '\d{4}-\d{2}-\d{2}'"
, r"'date' : <date instance>"
)
)
p_type_cleaner = Re_Replacer \
( r"'p_type' : 'unicode'"
, r"'p_type' : 'str'"
)
json_cleaner = Multi_Re_Replacer (date_cleaner, p_type_cleaner)
def req_json (r) :
if r is not None and r.content :
result = r.json
if TFL.callable (result) :
try :
result = result ()
except Exception :
result = None
return result
# end def req_json
def _run_server (Scaffold, args = []) :
print (["run_server"] + server_args + args)
result = Scaffold (["run_server"] + server_args + args)
return result
# end def run_server
def run_server \
( test_module_name
, db_url = "hps://"
, db_name = None
, scaffold_name = "Scaffold"
) :
import socket
import tempfile
import _TFL.Url
import _TFL.Caller
time.sleep (1)
url = TFL.Url (db_url)
if not db_name :
db_name = "test.%s" % (url.scheme, )
cmd = \
[ sys.executable, "-c"
, "; ".join
( ( "import %s" % (test_module_name, )
, "%s.%s" % (test_module_name, scaffold_name)
+ "( "
+ repr
( ["run_server"]
+ server_args
+ ["-db_url", db_url, "-db_name", db_name]
)
+ ")"
)
)
]
tf = tempfile.NamedTemporaryFile (delete = False)
print \
( "Using", tf.name, "as stdout/stderr for server process"
, file=sys.stderr
)
p = subprocess.Popen (cmd, stderr = tf, stdout = tf)
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.settimeout (2.0)
i = 0
while True :
try :
s.connect (("localhost", 9999))
except socket.error as exc :
if i < 30 :
i += 1
time.sleep (1)
else :
break
else :
exc = None
break
s.close ()
if exc is not None :
print ("Socket connect gave exception:", exc)
p.kill ()
caller_scope = TFL.Caller.Scope (1)
caller_scope.Scaffold.reset_callbacks.append (p.terminate)
return p
# end def run_server
def _normal (k, v) :
k = k.lower ()
if k in ("date", "last-modified") :
v = "<datetime instance>"
elif k in ("etag",) :
v = "ETag value"
elif k in ("rat",) :
k = "RAT"
v = "<REST authorization token>"
elif k == "content-length" and int (v) != 0 :
v = "<length>"
elif k == "server" :
v = "<server>"
return k, v
# end def _normal
def show (r, ** kw) :
normalize_json = kw.pop ("normalize_json", False)
cleaner = kw.pop ("cleaner", False)
json = req_json (r)
if json is not None :
if normalize_json :
json = dict ( _normal (k, v) for k, v in pyk.iteritems (json))
kw ["json"] = json
elif r.content :
kw ["content"] = pyk.decoded \
(r.content).replace ("\r", "").strip ().split ("\n")
output = formatted \
( dict
( status = r.status_code
, url = r.url
, ** kw
)
)
if cleaner :
output = cleaner (output)
print (output)
return r
# end def show
def showf (r, ** kw) :
return show \
( r
, headers = dict
( _normal (k, v) for k, v in pyk.iteritems (r.headers)
if k.lower () not in skip_headers
)
, ** kw
)
# end def showf
def traverse (url, level = 0, seen = None, ** kw) :
if seen is None :
seen = set ()
rg = requests.get (url, ** kw)
ro = requests.options (url, ** kw)
path = requests.utils.urlparse (url).path or "/"
if ro.ok :
allow = ro.headers ["allow"]
if allow not in seen :
print (path, ":", allow)
seen.add (allow)
else :
print (path, ":", ro.status_code, pyk.decoded (ro.content))
if rg.ok :
json = req_json (rg)
if json :
l = level + 1
for e in json.get ("entries", ()) :
traverse ("http://localhost:9999" + str (e), l, seen, ** kw)
# end def traverse
class Requester (TFL.Meta.Object) :
"""Wrapper for `requests`"""
class W (TFL.Meta.Object) :
def __init__ (self, name, prefix) :
self.method = getattr (requests, name)
self.prefix = prefix
# end def __init__
def __call__ (self, path, * args, ** kw) :
kw.setdefault ("headers", { "Content-Type": "application/json" })
url = pp_join (self.prefix, path.lstrip ("/"))
return self.method (url, * args, ** kw)
# end def __call__
# end class W
def __init__ (self, prefix) :
self.prefix = prefix
# end def __init__
def __getattr__ (self, name) :
if name.startswith ("__") and name.endswith ("__") :
### Placate inspect.unwrap of Python 3.5,
### which accesses `__wrapped__` and eventually throws `ValueError`
return getattr (self.__super, name)
return self.W (name, self.prefix)
# end def __getattr__
# end class Requester
R = Requester ("http://localhost:9999")
server_args = \
[ "-UTP=RST"
, "-auto_reload=no"
, "-debug=no"
, "-load_I18N=no"
, "-log_level=0"
, "-port=9999"
]
def _main (Scaffold) :
backend = sos.environ.get \
("GTW_test_backends", "HPS").split (":") [0].strip ()
db_url = Scaffold.Backend_Parameters.get (backend, "hps://").strip ("'")
db_name = Scaffold.Backend_Default_Path.get \
(backend) or "test.%s" % (backend.lower (), )
_run_server \
(Scaffold, ["-db_url", db_url, "-db_name", db_name, "-debug", "no"])
# end def _main
### __END__ GTW.__test__.rst_harness | PypiClean |
/lib/Repeat.py | from __future__ import absolute_import, division, print_function
from .Code import WarningMsg
from .Patterns import Pattern, Cycle, asStream
from .Utils import modi
from .TimeVar import var, Pvar
import inspect
class MethodList:
""" Class for holding information about the order of which methods have been
called on Player attributes. `root` is the original Pattern.
"""
def __init__(self, root):
self.root=root
self.list_of_methods = []
def get_root_pattern(self):
return self.root
def set_root_pattern(self, new):
self.root = new
def add_method(self, method_name, args, kwargs):
self.list_of_methods.append((method_name, args, kwargs))
def update(self, method, args, kwargs):
""" Updates the args and kwargs for a repeated method """
for i, info in enumerate(self.list_of_methods):
name, _, _ = info
if name == method:
self.list_of_methods[i] = (method, args, kwargs)
return
raise ValueError
def remove(self, method):
""" Removes a method (should be a string) from the list of methods """
for i, info in enumerate(self.list_of_methods):
name, args, kwargs = info
if name == method:
self.list_of_methods.pop(i)
return
raise ValueError
def __repr__(self):
return repr(self.list_of_methods)
def __contains__(self, method):
""" Returns true if the method is in the list of methods """
for name, args, kwargs in self.list_of_methods:
if name == method:
return True
return False
def __iter__(self):
for value in self.list_of_methods:
yield value
class Repeatable(object):
after_update_methods = []
method_synonyms = {}
def __init__(self):
self.repeat_events = {}
self.previous_patterns = {} # not a good name - TODO change
def update_pattern_root(self, attr):
""" Update the base attribute pattern that methods are applied to """
if attr not in self.previous_patterns:
self.previous_patterns[attr] = MethodList(self.attr[attr])
else:
self.previous_patterns[attr].set_root_pattern( self.attr[attr] )
self.update_pattern_methods(attr)
return
def update_pattern_methods(self, attr):
""" Update the 'current' version of a pattern based on its root and methods stored """
if attr not in self.previous_patterns:
self.previous_patterns[attr] = MethodList(self.attr[attr])
result = self.previous_patterns[attr].get_root_pattern()
# For each method in the list, call on the pattern
for method, args, kwargs in self.previous_patterns[attr]:
call_pattern_method = getattr(Pattern, method)
result = call_pattern_method(result, *args, **kwargs)
self.attr[attr] = result
return
def get_attr_and_method_name(self, cmd):
""" Returns the attribute and method name from a string in the form
`"attr.method"` would return `"attr"` and `"method"`. If attr is not
present, it returns `"degree"` in place.
"""
if cmd in self.method_synonyms:
attr = [ self.method_synonyms[cmd] ]
else:
attr = cmd.split(".")
# We can also schedule attribute methods
if len(attr) == 1:
attr_name = "degree"
method_name = attr[0]
elif len(attr) == 2:
attr_name = attr[0]
method_name = attr[1]
return attr_name, method_name
def is_pattern_method(self, method_name, attr="degree"):
""" Returns True if the method is a valid method of `Pattern` """
if attr == "degree" and hasattr(self, method_name):
return False
elif hasattr(Pattern, method_name):
return True
else:
return False
def is_player_method(self, method_name, attr="degree"):
""" Returns True if the method is a valid method of `Player` """
return hasattr(self, method_name) and attr == "degree"
def get_method_by_name(self, cmd):
""" Returns the attribute name and method based on `cmd` which is a string.
Should be in form `"attr.method"`.
"""
attr_name, method_name = self.get_attr_and_method_name(cmd)
# Just get the player method if a valid player method
if self.is_player_method(method_name, attr_name):
method = getattr(self, method_name)
# If its a Pattern method, create a "new" function that acts as a method
elif self.is_pattern_method(method_name, attr_name):
def method(*args, **kwargs):
# If there are no "old" patterns held in memory, use the pattern method and store
if attr_name not in self.previous_patterns:
self.previous_patterns[attr_name] = MethodList(self.attr[attr_name]) # store the root
# If this has already been called, "undo it"
if method_name in self.previous_patterns[attr_name]:
self.previous_patterns[attr_name].remove(method_name)
# If not, add it to the list
else:
self.previous_patterns[attr_name].add_method(method_name, args, kwargs)
# Update the attribute
self.update_pattern_methods(attr_name)
return
method.__name__ = cmd # for debugging purposes
else:
# Raise TypeError if not a method
err = "{} is not a valid method for type {}".format(cmd, self.__class__)
raise(TypeError(err))
assert callable(method)
return attr_name, method
def after(self, n, cmd, *args, **kwargs):
""" Schedule self.cmd(args, kwargs) in 'n' beats time
```
# Stop the player looping after 16 beats
p1 >> pads().after(16, "stop")
```
"""
quantise = kwargs.get("quantise", True)
try:
event = lambda: getattr(self, cmd)(*args, **kwargs)
if not quantise:
time = self.metro.now() + n
else:
time = self.metro.next_bar() + n
self.metro.schedule( event, time )
except:
pass
return self
def every(self, occurence, cmd, *args, **kwargs):
""" Every n beats, call a method (defined as a string) on the
object and use the args and kwargs. To call the method
every n-th beat of a timeframe, use the `cycle` keyword argument
to specify that timeframe.
::
# Call the shuffle method every 4 beats
p1.every(4, 'shuffle')
# Call the stutter method on the 5th beat of every 8 beat cycle
p1.every(5, 'stutter', 4, cycle=8)
# If the method is not valid but *is* a valid Pattern method, that is called and reverted
p1.every(4, 'palindrome')
"""
try:
attr, method = self.get_method_by_name(cmd)
except AttributeError:
WarningMsg("{} is not a valid method for type {}".format(cmd, self.__class__))
return self
# Collect the cycle length
cycle = None
ident = None
if "cycle" in kwargs:
cycle = kwargs["cycle"]
del kwargs["cycle"]
if "ident" in kwargs:
ident = kwargs["ident"]
del kwargs["ident"]
# Convert `Cycles` to `var`-- should they be Pvar?
attr_name, method_name = self.get_attr_and_method_name(cmd)
# Just get the player method if a valid player method
if self.is_player_method(method_name, attr_name):
cycle_dur = occurence
else:
cycle_dur = occurence * 2
args, kwargs = self.convert_cycles(args, kwargs, cycle_dur)
# If the method call already exists, just update it (should be in a function)
if ident is not None:
cmd = "{}-{}".format(cmd, ident)
if cmd in self.repeat_events:
# Work out whether the method needs calling or not
call = self.repeat_events[cmd]
# Update the time details
call.update(occurence, cycle, args, kwargs)
attr, method_name = self.get_attr_and_method_name(cmd)
if self.is_pattern_method(method_name, attr):
# if n is even, it means the method is active # TODO -- put this in a class mate
n, acc = call.count()
if n % 2 == 1:
if method_name in self.previous_patterns[attr]:
self.previous_patterns[attr].remove(method_name)
else:
if method_name in self.previous_patterns[attr]:
self.previous_patterns[attr].update(method_name, args, kwargs)
# Update the attribute
self.update_pattern_methods(attr)
if not call.isScheduled():
call.schedule()
else:
self.repeat_events[cmd] = MethodCall(self, method, occurence, cycle, args, kwargs)
self.repeat_events[cmd].schedule()
return self
def stop_calling_all(self):
""" Stops all repeated methods. """
for method in list(self.repeat_events.keys()):
self.never(method)
return self
def never(self, cmd, ident=None):
""" Stops calling cmd on repeat """
attr, method = self.get_attr_and_method_name(cmd)
if ident is not None:
cmd = "{}-{}".format(cmd, ident)
try:
# If it a pattern method, undo it
if method in self.previous_patterns[attr]:
self.previous_patterns[attr].remove(method)
self.update_pattern_methods(attr)
self.repeat_events[cmd].stop()
del self.repeat_events[cmd]
except KeyError:
err = "Player method '{}' not active".format(cmd)
raise KeyError(err)
return self
@staticmethod
def convert_cycles(args, kwargs, occurence):
""" Converts any values that are instances of `Cycle` to a `var` with the
same duration as the frequency of the every call (occurrence) """
args = [(var(value, occurence) if isinstance(value, Cycle) else value) for value in args]
kwargs = {key: (var(value, occurence) if isinstance(value, Cycle) else value) for key, value in kwargs.items()}
return args, kwargs
class MethodCall:
""" Class to represent an object's method call that,
when called, schedules itself in the future """
def __init__(self, parent, method, n, cycle=None, args=(), kwargs={}):
self.parent = parent
self.method = method
self.update(n, cycle, args, kwargs)
self.after_update = False
self.stopping = False
def update(self, n, cycle=None, args=(), kwargs={}):
""" Updates the values of the MethodCall. Re-adjusts the index if cycle has been changed """
if cycle is not None:
self.when = asStream(cycle)
self.cycle = asStream(n)
else:
self.when = asStream(n)
self.cycle = None
self.args = args
self.kwargs = kwargs
# Check if a method has the _beat_ keyword argument
if "_beat_" in inspect.getargspec(self.method).args:
self.kwargs["_beat_"] = None
self.i, self.next = self.count()
self.offset = float(modi(self.cycle, self.i)) if self.cycle is not None else 0
return self
def count(self):
""" Counts the number of times this method would have been called between clock start and now """
n = 0; acc = 0; dur = 0
now = float(self.parent.metro.now())
# Get durations
durations = self.when # if self.cycle is None else asStream(self.cycle)
total_dur = float(sum(durations))
# How much time left to fit remainder in
try:
acc = now - (now % total_dur)
except ZeroDivisionError:
acc = 0
# n is the index to return for calculating self.when[n]
# acc is when to start
n = int(len(durations) * (acc / total_dur))
if acc != now:
while True:
dur = float(durations[n])
acc += dur
n += 1
if acc >= now:
break
return n, acc
def __repr__(self):
return "<Future {}() call of '{}'>".format(self.method.__name__, self.parent)
def __call__(self, *args, **kwargs):
""" Proxy for parent object __call__, calls the enclosed method and schedules it in the future. """
assert self.method is not None
# Return without scheduling if stopping
if self.stopping:
return
# Give the method a reference to when OSC messages should send
self.assign_beat()
# Call the method
self.call_method()
# Update the next time to schedule
self.next += float(self.when[self.i])
if self.cycle is not None:
self.offset = float(modi(self.cycle, self.i))
# Re-schedule the method call
self.schedule()
# Increase the index to get the next duration
self.i += 1
return
def assign_beat(self):
if "_beat_" in self.kwargs:
self.kwargs["_beat_"] = self.get_next()
return
def call_method(self):
""" Calls the method. Prints to the console with error info. """
try:
self.method.__call__(*self.args, **self.kwargs)
except Exception as e:
print("{} in '{}': {}".format(e.__class__.__name__, self.method.__name__, e))
return
def get_next(self):
""" Returns the beat that the next occurrence of this method call is due """
return self.next + self.offset
def schedule(self):
""" Schedules the method to be called in the clock """
self.parent.metro.schedule(self, self.get_next())
def isScheduled(self):
""" Returns True if this is in the Tempo Clock """
return self in self.parent.metro
def stop(self):
self.stopping = True | PypiClean |
/DLTA-AI-1.1.tar.gz/DLTA-AI-1.1/DLTA_AI_app/trackers/strongsort/sort/iou_matching.py | from __future__ import absolute_import
import numpy as np
from . import linear_assignment
def iou(bbox, candidates):
"""Computer intersection over union.
Parameters
----------
bbox : ndarray
A bounding box in format `(top left x, top left y, width, height)`.
candidates : ndarray
A matrix of candidate bounding boxes (one per row) in the same format
as `bbox`.
Returns
-------
ndarray
The intersection over union in [0, 1] between the `bbox` and each
candidate. A higher score means a larger fraction of the `bbox` is
occluded by the candidate.
"""
bbox_tl, bbox_br = bbox[:2], bbox[:2] + bbox[2:]
candidates_tl = candidates[:, :2]
candidates_br = candidates[:, :2] + candidates[:, 2:]
tl = np.c_[np.maximum(bbox_tl[0], candidates_tl[:, 0])[:, np.newaxis],
np.maximum(bbox_tl[1], candidates_tl[:, 1])[:, np.newaxis]]
br = np.c_[np.minimum(bbox_br[0], candidates_br[:, 0])[:, np.newaxis],
np.minimum(bbox_br[1], candidates_br[:, 1])[:, np.newaxis]]
wh = np.maximum(0., br - tl)
area_intersection = wh.prod(axis=1)
area_bbox = bbox[2:].prod()
area_candidates = candidates[:, 2:].prod(axis=1)
return area_intersection / (area_bbox + area_candidates - area_intersection)
def iou_cost(tracks, detections, track_indices=None,
detection_indices=None):
"""An intersection over union distance metric.
Parameters
----------
tracks : List[deep_sort.track.Track]
A list of tracks.
detections : List[deep_sort.detection.Detection]
A list of detections.
track_indices : Optional[List[int]]
A list of indices to tracks that should be matched. Defaults to
all `tracks`.
detection_indices : Optional[List[int]]
A list of indices to detections that should be matched. Defaults
to all `detections`.
Returns
-------
ndarray
Returns a cost matrix of shape
len(track_indices), len(detection_indices) where entry (i, j) is
`1 - iou(tracks[track_indices[i]], detections[detection_indices[j]])`.
"""
if track_indices is None:
track_indices = np.arange(len(tracks))
if detection_indices is None:
detection_indices = np.arange(len(detections))
cost_matrix = np.zeros((len(track_indices), len(detection_indices)))
for row, track_idx in enumerate(track_indices):
if tracks[track_idx].time_since_update > 1:
cost_matrix[row, :] = linear_assignment.INFTY_COST
continue
bbox = tracks[track_idx].to_tlwh()
candidates = np.asarray(
[detections[i].tlwh for i in detection_indices])
cost_matrix[row, :] = 1. - iou(bbox, candidates)
return cost_matrix | PypiClean |
/OMSTD-hh-001-0.1.2.tar.gz/OMSTD-hh-001-0.1.2/omstd_hh_001/lib/data.py | __author__ = 'cr0hn - cr0hn<-at->cr0hn.com (@ggdaniel)'
# --------------------------------------------------------------------------
class Parameters:
"""Program parameters"""
# ----------------------------------------------------------------------
def __init__(self, **kwargs):
"""
:param ports_range: ports range as string: '1-2000'
:type ports_range: str
:param targets: list os string with targets
:type targets: list(str)
:param random_port_scan: Select port to scan in random order
:type random_port_scan: bool
:param verbosity: verbosity level
:type verbosity: int
:param only_open: only manage opened ports
:type only_open: bool
:param print_function: function used to display debug info. Default is 'print' call.
:type print_function: function
:param proxy: URL with proxy info
:type proxy: str
:raises: ValueError
"""
self.ports_range = kwargs.get("ports_range", "0-1024")
self.targets = kwargs.get("targets", None)
self.verbosity = int(kwargs.get("verbosity", 0))
self.random_port_scan = kwargs.get("random_port_scan", False)
self.print_function = kwargs.get("print_function", print)
self.only_open = kwargs.get("only_open", False)
self.proxy = kwargs.get("proxy", None)
self.proxy_user = kwargs.get("proxy_user", None)
self.proxy_pass = kwargs.get("proxy_pass", None)
if not isinstance(self.ports_range, str):
raise TypeError("Expected str, got '%s' instead" % type(self.ports_range))
if not isinstance(self.targets, list):
raise TypeError("Expected list, got '%s' instead" % type(self.targets))
else:
for p in self.targets:
if not isinstance(p, str):
raise TypeError("Expected str, got '%s' instead" % type(p))
# Remove duplicates
self.targets = list(set(self.targets))
# Expand ports
_parsed_ports = self.ports_range.strip().split("-")
if len(_parsed_ports) == 1:
_p_start = int(_parsed_ports[0])
_p_end = _p_start + 1
elif len(_parsed_ports) == 2:
_p_start = int(_parsed_ports[0])
_p_end = int(_parsed_ports[1])
else:
raise ValueError("Port range must be defined as start-end: 1-4025")
self.ports_range = [int(x) for x in range(_p_start, _p_end)]
if self.proxy is not None:
from urllib.parse import urlparse
_scheme = "http" if self.proxy.startswith("http://") or self.proxy.startswith("https://") else ""
self.proxy = urlparse(self.proxy, scheme=_scheme)
# --------------------------------------------------------------------------
class Results:
"""Program results"""
# ----------------------------------------------------------------------
def __init__(self, **kwargs):
"""
:param ports: Port status as format: {PORT_NUMBER: STATUS}
:type ports: dict(int: str)
:param scan_time: Time got for scan in miliseconds
:type scan_time: float
"""
self.ports = kwargs.get("ports", None)
self.scan_time = kwargs.get("scan_time", 0)
# Truncate time
self.scan_time = '{number:.2f}'.format(number=self.scan_time)
self.__open_ports = None
# ----------------------------------------------------------------------
@property
def open_ports(self):
"""
:return: Return only open ports
:rtype: list(int)
"""
if self.__open_ports is None:
self.__open_ports = [x for x, y in self.ports.items() if y.lower() == "open"]
return self.__open_ports
__all__ = ["Results", "Parameters"] | PypiClean |
/Hector_Observations_Pipeline-1.4-py3-none-any.whl/hop/distortion_correction/HectorTranslationSoftware/Packages/sds/README.txt | # "@(#) $Id: ACMM:sds/README.txt,v 3.94 09-Dec-2020 17:15:24+11 ks $"
The DRAMA Self Defining data System (Sds). Can also be used
indpendent of DRAMA - see the Standalone directory.
This software has made use of the ESO cmm system (known internally
at AAO as "acmm").
This module is a DRAMA module - it must have a dmakefile and not
a Makefile. This will be checked when you archive the module.
This module, sds, was created from an old SCCS archive.
Version 3.0 is a snapshot taken when it was put into
ACMM on Fri Jan 17 12:21:29 2003
| PypiClean |
/GangaCK-1.0.0.tar.gz/GangaCK-1.0.0/scripts/xmlmerge.py | import re
import sys
import os
import subprocess
import argparse
import ConfigParser
import logging
from collections import defaultdict
from glob import glob
import xml.etree.cElementTree as ET
## TODO: leave out the outputfile, determine ad-hoc
OUTPUTDIR = 'output'
OUTPUT = 'output/summary.xml' # Name of output file. Use for search in subjob and creation of merged one.
#===============================================================================
class MyFormatter(logging.Formatter):
"""
https://stackoverflow.com/questions/1343227/can-pythons-logging-format-be-modified-depending-on-the-message-log-level
"""
err_fmt = "%(levelname)s!: %(msg)s"
def format(self, record):
format_orig = self._fmt
if record.levelno in (logging.WARNING, logging.ERROR):
self._fmt = MyFormatter.err_fmt
# Call the original formatter class to do the grunt work
result = logging.Formatter.format(self, record)
# Restore the original format configured by the user
self._fmt = format_orig
return result
## Prepare global logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
## Prepare handler
fmt = MyFormatter()
fh = logging.FileHandler('merge.log', 'w')
fh.setLevel(logging.INFO)
fh.setFormatter(fmt)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(fmt)
## add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
#===============================================================================
def get_massstorage_output():
## Read the gangarc file, report the masstorage output location if found
conf = ConfigParser.ConfigParser()
conf.read(os.path.expandvars("$HOME/.gangarc"))
## Return None if it's not set
if not conf.has_option('Output', 'massstoragefile'):
return None
return eval(conf.get('Output', 'massstoragefile'))['uploadOptions']['path']
def init():
"""
This will do initial check. Namely, we need 'summary' module from LHCb framework.
Exit if there's no summary library.
Also create empty dir, for logging.
TODO: Get the latest
"""
path = '/sw/lib/lhcb/LHCB/LHCB_v40r0/Kernel/XMLSummaryBase'
os.environ['XMLSUMMARYBASEROOT'] = path
sys.path.append(os.environ['XMLSUMMARYBASEROOT']+'/python/XMLSummaryBase')
try:
global summary
import summary
except ImportError:
logger.error("Module `summary` from LHCB/LHCB_v36r4/Kernel/XMLSummaryBase not found.")
sys.exit(-1)
## make dir if not exists
if not os.path.exists(OUTPUTDIR):
os.makedirs(OUTPUTDIR)
#===============================================================================
def perform_location_check():
"""
Exit if the location doesn't seem right...
"""
if re.search(r'/gangadir/workspace/\S+/LocalXML/\d+$',os.getcwd()) is None:
logger.exception('Bad location, abort: '+ os.getcwd())
sys.exit(-1)
def perform_existence_precheck():
"""There should be at least 1 summary.xml file, per subjob dir."""
for sjpath in glob('*/output'):
if not glob(sjpath+'/summary.xml'):
logger.warning('No *.xml file found at %s' % sjpath)
#-------------------------------------------------------------------------------
def perform_check_success():
"""Check for tag <success>True/False<success> for individual xml."""
logger.info('\n### Check <success> in each *.xml')
all_success = True
for fpath in glob('*/'+OUTPUT):
tree = ET.parse(fpath)
success = eval(tree.find('success').text)
all_success &= success
if not success:
logger.warning('Found success=False: '+fpath)
if all_success:
logger.info('All *.xml found success=True')
#--------------------------------------------------------------
def valid_for_merge(fname):
"""
Given a file name, return True if it seems like a valid candidate for merging.
"""
if 'catalog' in fname.lower():
return False
if fname == 'Generatorlogger.xml':
return False
if fname == 'jobDescription.xml':
return False
if not fname.endswith('.xml'):
return False
return True
def perform_merge_xml(args):
logger.info('\n### Perform merging of xml.')
## Heuristic search for xml files
queue = defaultdict(list)
for dname,_,files in os.walk('.'):
if dname != os.path.join('.',OUTPUTDIR) and 'input' not in dname: # Relati
for fname in files:
if valid_for_merge(fname):
fullfpath = os.path.join( dname, fname )
queue[fname].append( fullfpath )
## Loop through queue, remove & merge
logger.info('List of xml merging:')
for fname,l in queue.iteritems():
logger.info('{:20} ({} files)'.format(fname, len(l)))
fout = os.path.join(OUTPUTDIR,fname)
## Remove existing one
if os.path.exists(fout):
logger.info("Removing existed merge: " + fout)
os.remove(fout)
## Call merge
summary.Merge(l).write(fout)
logger.info("Merge completed!")
#-------------------------------------------------------------------------------
def perform_resort():
"""Sort the counter section to be more useful. Do alphabetically for now"""
logger.info('Sorting: '+OUTPUT)
tree = ET.parse(OUTPUT)
container = tree.find("counters")
data = [ (elem.get("name"), elem) for elem in container ]
data.sort()
# insert the last item from each tuple
container[:] = [ item[-1] for item in data ]
tree.write(OUTPUT)
logger.info('Sort completed!')
#-------------------------------------------------------------------------------
def perform_merge_ppl():
"""
If there are __parsedxmlsummary__, merge them all into one,
so that the subjobs can be archived.
"""
logger.info('\n### Perform merging of ppl')
target = 'output/__postprocesslocations__'
src = '*/'+target
## Check if there's any
if not glob(src):
logger.info('No ppl to be merged. Continue.')
return
## Remove existing one
if os.path.exists(target):
logger.info("Removing existed merge: " + target)
os.remove(target)
## Easiest to use bash
arg = 'ls {} | xargs cat | tee {}'.format( src, target)
subprocess.check_output( arg, shell=True )
logger.info('Merged ppl completed: %s'%target)
#-------------------------------------------------------------------------------
def perform_jobstatus_check():
"""
If there are __jobstatus__, do following check.
> TODO: Assert number check. There should be one of this for each subdir
- Assert EXITCODE.
"""
for fpath in glob('*/output/__jobstatus__'):
with open(fpath) as fin:
dat = fin.read()
res = re.findall(r'EXITCODE: (\d+)', dat)
if not res:
logger.warning('JobStatus: missing EXITCODE: '+ fpath)
continue
code = int(res[0])
if code != 0:
logger.warning('JobStatus: failure EXITCODE: '+ fpath)
#-------------------------------------------------------------------------------
def is_slurm():
## Check by test the first subjob has `__jobstatus__` file
## May be inaccurate
return os.path.isfile(os.path.join(os.getcwd(),'0/output/__jobstatus__'))
SLURM_FAIL_KEYWORDS = (
'Application Manager Terminated with error',
'Traceback (most recent call last)',
'0x0000000000000000',
)
def perform_slurm_stdout_postcheck():
def tail(f, n, offset=None):
"""Reads a n lines from f with an offset of offset lines. The return
value is a tuple in the form ``(lines, has_more)`` where `has_more` is
an indicator that is `True` if there are more lines in the file.
"""
avg_line_length = 74
to_read = n + (offset or 0)
while 1:
try:
f.seek(-(avg_line_length * to_read), 2)
except IOError:
# woops. apparently file is smaller than what we want
# to step back, go to the beginning instead
f.seek(0)
pos = f.tell()
lines = f.read().splitlines()
if len(lines) >= to_read or pos == 0:
return lines[-to_read:offset and -offset or None], \
len(lines) > to_read or pos > 0
avg_line_length *= 1.3
def readtaildat(path):
# tail only to have small dat
with open(path) as fin:
return tail(fin, 50)[0]
def check_single(path):
dat = readtaildat(path)
sjid = path.split('/')[-3]
if 'stderr' in path and '--- GANGA APPLICATION ERROR END ---' not in dat[-1]:
logger.warning('{:>4} {}'.format(sjid, dat[-1]))
for line in dat:
if any( badword in line for badword in SLURM_FAIL_KEYWORDS ):
logger.warning('{:>4} {}'.format(sjid, line))
logger.info('\n### Running SLURM post-check')
## check on stdout
gpath = os.path.join(os.getcwd(), '*/output/std*')
for path in glob(gpath):
check_single(path)
logger.info('SLURM post-check done!')
#--------------------------------------------------------------
def perform_memcheck():
"""Try to determine the largest mem used. For future cluster allocation."""
logger.info('\n### Memory usage')
## V2: Get from xml
tree = ET.parse(OUTPUT)
usage = float(tree.find("usage").find('stat').text) / 1024
logger.info('Maximum mem usage: %.2f MB' % usage)
#--------------------------------------------------------------
def extract_lfn(uri):
"""
>>> extract_lfn('PFN:root://lhcb-sdpd13.t1.grid.kiae.ru.:1094/t1.grid.kiae.ru/data/lhcb/lhcbdisk/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000006_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000006_2.AllStreams.dst'
>>> extract_lfn('LFN:root://clhcbdlf.ads.rl.ac.uk//castor/ads.rl.ac.uk/prod/lhcb/LHCb/Collision12/EW.DST/00041836/0000/00041836_00000303_1.ew.dst?svcClass=lhcbDst')
'/lhcb/LHCb/Collision12/EW.DST/00041836/0000/00041836_00000303_1.ew.dst'
>>> extract_lfn('PFN:file:///storage/gpfs_lhcb/lhcb/disk/LHCb/Collision12/EW.DST/00041836/0000/00041836_00000303_1.ew.dst')
'/lhcb/LHCb/Collision12/EW.DST/00041836/0000/00041836_00000303_1.ew.dst'
>>> extract_lfn('PFN:root://se16.lcg.cscs.ch:1094/pnfs/lcg.cscs.ch/lhcb/lhcb/LHCb/Collision12/EW.DST/00041836/0001/00041836_00018845_1.ew.dst')
'/lhcb/LHCb/Collision12/EW.DST/00041836/0001/00041836_00018845_1.ew.dst'
>>> extract_lfn('PFN:root://dcdoor05.pic.es:1094/pnfs/pic.es/data/lhcb/LHCb/Collision12/EW.DST/00041836/0003/00041836_00039307_1.ew.dst')
'/lhcb/LHCb/Collision12/EW.DST/00041836/0003/00041836_00039307_1.ew.dst'
>>> extract_lfn('PFN:file:///storage/gpfs_lhcb/lhcb/disk/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000036_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000036_2.AllStreams.dst'
>>> extract_lfn('LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst'
>>> extract_lfn('PFN:root://lhcb-sdpd16.t1.grid.kiae.ru.:1094/t1.grid.kiae.ru/data/lhcb/lhcbdisk/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst'
>>> extract_lfn('PFN:root://heplnx232.pp.rl.ac.uk:1094/pnfs/pp.rl.ac.uk/data/lhcb/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst'
>>> extract_lfn('PFN:root://bohr3226.tier2.hep.manchester.ac.uk:1094//dpm/tier2.hep.manchester.ac.uk/home/lhcb/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst')
'/lhcb/MC/2012/ALLSTREAMS.DST/00046841/0000/00046841_00000003_2.AllStreams.dst'
>>> extract_lfn('root://f01-080-125-e.gridka.de:1094/pnfs/gridka.de/lhcb/LHCb/Collision12/EW.DST/00020198/0000/00020198_00000752_1.ew.dst')
'/lhcb/LHCb/Collision12/EW.DST/00020198/0000/00020198_00000752_1.ew.dst'
"""
common_endprefix = [ '/data/lhcb/lhcbdisk/lhcb/', '/prod/lhcb/', '/gpfs_lhcb/lhcb/disk/', '/data/lhcb/lhcb/', '/data/lhcb/', '/home/lhcb/lhcb/', '/lhcb/lhcb/', '/lhcb/' ]
disposable_nodes = [ 'LFN:', 'PFN:' ]
#
uri = uri.replace("<", "<").replace(">", ">").replace("&", "&") # Escape
uri = uri.split('?')[0] # Remove query tail
for node in disposable_nodes:
uri = uri.replace(node, '')
for node in common_endprefix:
if node in uri:
# print node, uri
uri = '/lhcb/'+uri.split(node)[-1]
break
return uri
def perform_data_validation(args):
logger.info("\n### Verifying faulty data in merged output/summary.xml (status!=full)")
root = ET.parse(OUTPUT).getroot()
lfn_nevt = dict()
lfn_rawnames = defaultdict(list)
## Check input nodes, collect bad input
for node in root.find('input').findall('file'):
rawname = node.get('name')
lfn = extract_lfn(rawname)
nevt = node.text
status = node.get('status')
valid = status == 'full' or (not args.partial_is_bad and status in ('part','mult'))
## In case of invalid, try to retrieve nevt previous file of same LFN
## If there's previous file, this will negate -1.
if not valid:
nevt = lfn_nevt.get(lfn, -1) ## In case of
lfn_nevt[lfn] = nevt
lfn_rawnames[lfn].append(rawname)
## 2nd-pass. Report bad evt
list_subjob = list()
count_input_evt = 0
count_output_evt = 0
count_bad_files = 0
summary_children = list(glob('*/output/summary.xml'))
for lfn,nevt in lfn_nevt.iteritems():
if nevt <= 0:
## This is bad, run through collected rawnames
count_bad_files += 1
res_report = defaultdict(list) # For printout
logger.warning("Found bad: " + lfn)
for name in lfn_rawnames[lfn]:
stdout = subprocess.check_output(['grep', '-r', name] + summary_children)
sjids = re.findall(r'(\d+)/output/summary\.xml', stdout)
list_subjob.extend(sjids)
## Print result, group by file
for line in stdout.strip().split('\n'):
if line:
fname,text = line.split(':\t')
res_report[fname].append(text.strip())
for fname,l in res_report.iteritems():
logger.warning(fname)
logger.warning('\n'.join(l))
print # line break
else: # good evt
count_input_evt += int(nevt)
## Check output nodes
for node in root.find('output').findall('file'):
count_output_evt += int(node.text)
# Report
logger.info('Count input evt: {:,}'.format(count_input_evt))
logger.info('Count output evt: {:,}'.format(count_output_evt))
if count_bad_files==0:
logger.info("No bad file found. Congrats!")
return
## Print the command to fix this
list_subjob = list({ int(sjid) for sjid in list_subjob })
logger.info("Defected inputdata found....")
logger.info(sorted(list_subjob))
#--------------------------------------------------------------
def perform_dush():
## Report the directory size
paths = [ os.getcwd() ] # Default
jid = os.getcwd().split('/')[-1]
## SKIP due to panfs DOWN on 2015-08-24
s = get_massstorage_output()
if s is not None:
s = os.path.join(s,jid)
if os.path.exists(s):
paths.append(s)
logger.info('\n### Directory size')
for path in paths:
size = subprocess.check_output(['du', '-sh', path]).split()[0]
logger.info('{:5} | {:40}'.format( size, path ))
#===============================================================================
def call_eos(*args):
"""
Wrapping subprocess.call for eos command from shell.
Catching the segfault when there's no token.
https://stackoverflow.com/questions/22250893
Return stdout at success, raise `CalledProcessError` otherwise.
Note: Check with pure 'eos' command, since it yields segfault error code.
Other subcommand silently crash.
Usage:
>> call_eos('ls')
"""
from subprocess import Popen, PIPE, CalledProcessError, check_output
cmd = ['eos'] + list(args)
proc = Popen(['eos'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
out,err = proc.communicate('\n')
rcode = proc.returncode
if rcode != 0: # segfault,
raise CalledProcessError( rcode, 'eos' )
stdout = check_output(cmd)
stdout = stdout.replace('\x1b[?1034h', '')
return stdout
def perform_hadd():
"""
Request to call `hadd` on resultant root file , with additional
lookup on massStorage location (panasas for LPHE).
"""
logger.info('\n### `hadd` queue')
logger.info("Seeking root files...")
## Prep search location
paths = [ os.getcwd() ] # By default, the current dir.
jid = paths[0].split('/')[-1]
## In addition, the massstorage, if any.
## SKIP due to panfs DOWN on 2015-08-24
path2 = get_massstorage_output()
if path2 is not None:
path2 = os.path.join(path2,jid)
if os.path.exists(path2):
paths.append( path2 )
## Begins the lookup, indexed by filename
indexed_list = defaultdict(list)
fulloutdir = os.path.join(os.getcwd(),OUTPUTDIR)
for searchpath in paths:
for dname,_,files in os.walk(searchpath):
if dname!=fulloutdir:
for fname in files:
if fname.endswith('.root'):
fullfpath = os.path.join( dname, fname )
indexed_list[fname].append( fullfpath )
## Add also EOS, which is not possible to grep like above
## Need $EOS_MGM_URL and $EOS_HOME
if 'EOS_MGM_URL' not in os.environ and 'EOS_HOME' not in os.environ:
return
url = os.environ['EOS_MGM_URL']
path = os.path.expandvars('$EOS_HOME/ganga/{}'.format(jid)) # Assume default destination on EOS
res = call_eos( 'find', path )
files = res.split('\n')
files = res.replace('\x1b[?1034h', '').split('\n') # eos binary produce strange escape code
for fpath in files:
if fpath.endswith('.root'):
fname = os.path.split(fpath)[1]
indexed_list[fname].append( url+'/'+fpath )
## Exit if no queue needed
if not indexed_list:
print 'No *.root files found. Ignore hadd.'
return
## Request to user with queue info
for key,val in indexed_list.iteritems():
cmd = ['du', '-shc'] + val
## Skip EOS
if all( 'root://' in arg for arg in val ):
usage = 0.0
url = os.environ['EOS_MGM_URL']
for arg in val:
fpath = arg.replace(url, '')
res = call_eos( 'ls', '-l', fpath )
usage += int(res.split()[4]) # Bytes
# Convert to MB
usage = '%.2f MB (eos)' % (float(usage)/1024/1024)
else:
res = subprocess.check_output(cmd)
usage = res.split()[-2]
logger.info("{:20} ( {} files, {} )".format(key,len(val), usage))
if raw_input("> Perform hadd? Y/[n]: ") != 'Y':
return
## Perform upon request
for fname, queue in indexed_list.iteritems():
target = 'output/%s'%fname
## Check existing file, ask for permission to delete
if os.path.exists(target):
if raw_input("Output existed, remove it? Y/[n]: ") == 'Y':
subprocess.call(['rm', target])
else:
logger.info("Retain existing file, continue.")
## Do actual merge
cmd = [ 'hadd', target ] + sorted(queue) # '-f9',
subprocess.call(cmd)
#-------------------------------------------------------------------------------
def perform_delete_subjobs_root():
"""
Ask user to delete *.root files ONLY of the subjobs.
ask before performing archive (where no data is delete).
"""
## Check first how many are there. Skip if there is None
res1 = glob('*/input/*.root')
res2 = glob('*/output/*.root')
res = res1+res2
if not res:
return
if raw_input('\n> Delete *.root files (%i) of subjobs? Y/[n]: '%len(res)) != 'Y':
return
cmd = 'rm -f */output/*.root */input/*.root'
logger.info('Running: %r'%cmd)
subprocess.call(cmd, shell=True)
logger.info('Delete successfully!')
#-------------------------------------------------------------------------------
searchpath = [
'output/*.root',
'output/*.dst',
'*/output/*.dst',
'output/*.sim',
'*/output/*.sim',
'output/*.digi',
'*/output/*.digi',
]
def perform_eoscp():
"""
Ask to upload the large data to EOS. Can be either from main or subjobs.
"""
queue = []
for path in searchpath:
queue += glob(path)
## Exit immediately if there's nothing to do
if not queue:
return
## Ask for permission
logger.info('\n### Upload large files to EOS')
logger.info('Found files: %i'%len(queue))
if raw_input("Perform upload to EOS? Y/[n]: ") != 'Y':
return
assert 'EOS_MGM_URL' in os.environ, 'Needs $EOS_MGM_URL defined.'
assert 'EOS_HOME' in os.environ, 'Needs $EOS_HOME defined.'
jid = os.getcwd().split('/')[-1]
for src in queue:
target = os.path.join('$EOS_MGM_URL/$EOS_HOME/ganga', jid, src)
target = os.path.expandvars(target.replace('output/',''))
args = 'eoscp', '-s', src, target
stdout = subprocess.check_output(args)
logger.info(stdout.strip())
logger.info('All files uploaded to EOS successfully')
if raw_input("Delete those files locally? Y/[n]: ") != 'Y':
return
args = ['rm'] + queue
subprocess.call(args)
#-------------------------------------------------------------------------------
def perform_archive():
"""
(Experimental) Archive the subjobs into single data.tar.zx, and delete them.
"""
if raw_input("\n> Perform archive? Y/[n]: ") != 'Y':
return
## Check is already existed, do nothing otherwise (not delete!).
target = 'data.tar.xz'
if os.path.exists(target):
logger.warning('Found existing archive (%s). Abort for safety'%target)
return
cmd = "tar -Jcf {0} --exclude='./output' --exclude={0} .".format(target)
subprocess.call(cmd, shell=True)
cmd = "find . -maxdepth 1 -type d -name '[0-9]*' | xargs rm -rf ; rm -rf debug/ input/"
subprocess.call(cmd, shell=True)
logger.info('Archiving finished succesfully: %r'%target)
#===============================================================================
def main(args):
init()
perform_location_check()
perform_dush()
perform_existence_precheck()
perform_check_success()
perform_merge_xml(args)
perform_resort()
perform_merge_ppl()
perform_jobstatus_check()
if is_slurm():
perform_slurm_stdout_postcheck()
perform_memcheck()
if not args.skip_validate:
perform_data_validation(args)
perform_hadd()
perform_delete_subjobs_root()
perform_eoscp()
perform_archive()
print # endline
#===============================================================================
if __name__ == '__main__':
## Read args
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--partial-is-bad', action="store_true", default=False, help="If True, consider 'partial' data as false too.")
parser.add_argument('-s', '--skip-validate',
action="store_true",
default=False,
help="If not skip, the script will loop check whether all data input has been correctly processed or not.")
parser.add_argument('--hadd', action='store_true', default=False,
help="If True, run directly the `hadd` module.")
parser.add_argument('--archive', action='store_true', default=False,
help="If True, run directly the `archive` module")
# parser.add_argument('-n', '--files', type=int, help='Max number of files to merge', default=-1)
args = parser.parse_args()
## Run only hadd module.
if args.hadd:
perform_hadd()
sys.exit()
if args.archive:
perform_archive()
sys.exit()
main(args) | PypiClean |
/DFTtoolbox-1.6.3.tar.gz/DFTtoolbox-1.6.3/README.md | # DFTtoolbox
DFTtoolbox is a Python module that aims to help computational condensed
matter physicist or material scientists who use density functional
theory (DFT) codes , such as quantum espresso, abinit, elk, etc., as a
tool to study material properties quickly build their calculations and
analyze the calculated results.
Most DFT codes usually come with massive variables. Users are required
to be familar with all the variable not only in the building stage but
also the postprocess stage. Therefore, the philosophy of DFTtoolbox is
"the less user input the better!". Users only need to feed very few
variables to DFTtoolbox to build a DFT input file or to analyze the
calculated results. No painc learning curves for DFT codes
anymore!
# Prerequisite
DFTtoolbox is a Python module which helps DFT code users quickly build an
input file in seconds and analyze+visuallize the output data immediately.
Currently, DFTtoolbox can support three popular DFT codes:
- Quantum Espresso (tested in v6.1) http://www.quantum-espresso.org/
- Abinit (tested in v8.4.2) http://www.abinit.org/
- Elk (tested in v4.3.6) http://elk.sourceforge.net/
DFTtoolbox is based on numpy and matplotlib. Before installing DFTtoolbox,
please make sure you have these package installed. Also, DFTtoolbox can
only read the crystal structure and band high-symmytry
path in xcrysden format (.xsf and .kpf). If you're not familar with
xcrysden, check their websit (http://www.xcrysden.org/). It is very
easy to learn. Then you are all set !
# How does DFTtoolbox build a DFT task ?
Before building a input file using DFTtoolbox, the user has to prepare
the structure file and the k-path file in xcrysden format (.xsf and .kpf.)
Then DFTtoolbox can:
- initialize a ground state calculation
The user only need to feed very few parameters, DFTtoolbox will automatically
generate the input files to perform four basic ground state calculation:
-- self-consistent field calculation
-- band structure calculation
-- projected band structure calculation (i.e. fatband)
-- partial density of state calculation
- initialize a structural relaxation calculation
DFTtoolbox can automatically generate an input file for structural relaxation
by requesting "zero" input! DFTtoolbox will pick the values that fit for most
calculations for you.
Note:
Because DFTtoolbox is designed based on the philosophy "the less user inputs
the better". I don't want you to learn another complicated code to deal with
the already-very-complicated DFT codes. So the input files are only for general
purposes and will not fit all kinds of ground state or structural relaxation
calculations. However even if the generated input files don't completely satisify
your need, it is always a good starting point for you to tweak your own input files.
# How does DFTtoolbox help analyze the results?
One of the painc part of DFT users is the output files are usually very complicated
and are lack of appropriate tools to visuallize the results beaufitully and
easily. DFTtoolbox can automatically read the output data of band calculation,
projected band calculation and PDOS calculations, save them in numpy array format
(.npz files) and plot the results with publishable qualities. A convenient feature
of DFTtoolbox is that it can combine arbitary states to plot projected band
structures and PDOS making it a useful tool to band character analysis.
In addition, if you want to use your own tools to plot the results,
the output .npz file can also let you load the data in numpy standard format
immediately, so you can postprocess your data in Python framework easily.
# Is there a quick tutorial?
Yes, check (/examples/). There is a simple example FeO. I believe
anyone can learn how to use it in a few minutes. You will find it only needs
very few necesary input variabless to initialize or postprocess a DFT calculation.
Also, in /template/ there are template files for your to use.
# Installation
- from the tarball:
Just download the tarball. Unzip it and put it anywhere you want. Go to the
folder, type:
python setup.py install
- from PyPI:
pip install DFTtoolbox
# Figures Demo
Here are examples of the figure generated by DFTtoolbox. These figure were
obtained from ferromagnetic FeO. This example has been included in the module.
- Fig.1 band stucture (green and blue are different spin)

- Fig.2 band structure projected on Fe d-orbital

- Fig.3 PDOS (data-1: Fe d-oribtal, data-2: O p-orbital)
 | PypiClean |
/Altair%20Smartworks%20SDK-0.0.1.tar.gz/Altair Smartworks SDK-0.0.1/openapi_client/model/event_high_cpu_response.py | import re # noqa: F401
import sys # noqa: F401
from openapi_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from openapi_client.model.event_high_cpu_data import EventHighCPUData
from openapi_client.model.property_cpu_response_links import PropertyCPUResponseLinks
globals()['EventHighCPUData'] = EventHighCPUData
globals()['PropertyCPUResponseLinks'] = PropertyCPUResponseLinks
class EventHighCPUResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'title': (str,), # noqa: E501
'description': (str,), # noqa: E501
'data': (EventHighCPUData,), # noqa: E501
'links': ([PropertyCPUResponseLinks],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'title': 'title', # noqa: E501
'description': 'description', # noqa: E501
'data': 'data', # noqa: E501
'links': 'links', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""EventHighCPUResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
title (str): [optional] # noqa: E501
description (str): [optional] # noqa: E501
data (EventHighCPUData): [optional] # noqa: E501
links ([PropertyCPUResponseLinks]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value) | PypiClean |
/Flask-uWSGI-WebSocket-0.6.1.tar.gz/Flask-uWSGI-WebSocket-0.6.1/flask_uwsgi_websocket/_gevent.py | import uuid
from gevent import spawn, wait
from gevent.event import Event
from gevent.monkey import patch_all
from gevent.queue import Queue, Empty
from gevent.select import select
from werkzeug.exceptions import HTTPException
from .websocket import WebSocket, WebSocketMiddleware
from ._uwsgi import uwsgi
class GeventWebSocketClient(object):
def __init__(self, environ, fd, send_event, send_queue, recv_event,
recv_queue, timeout=5):
self.environ = environ
self.fd = fd
self.send_event = send_event
self.send_queue = send_queue
self.recv_event = recv_event
self.recv_queue = recv_queue
self.timeout = timeout
self.id = str(uuid.uuid1())
self.connected = True
def send(self, msg, binary=True):
if binary:
return self.send_binary(msg)
self.send_queue.put(msg)
self.send_event.set()
def send_binary(self, msg):
self.send_queue.put(msg)
self.send_event.set()
def receive(self):
return self.recv()
def recv(self):
return self.recv_queue.get()
def close(self):
self.connected = False
class GeventWebSocketMiddleware(WebSocketMiddleware):
client = GeventWebSocketClient
def __call__(self, environ, start_response):
urls = self.websocket.url_map.bind_to_environ(environ)
try:
endpoint, args = urls.match()
handler = self.websocket.view_functions[endpoint]
except HTTPException:
handler = None
if not handler or 'HTTP_SEC_WEBSOCKET_KEY' not in environ:
return self.wsgi_app(environ, start_response)
# do handshake
uwsgi.websocket_handshake(environ['HTTP_SEC_WEBSOCKET_KEY'],
environ.get('HTTP_ORIGIN', ''))
# setup events
send_event = Event()
send_queue = Queue()
recv_event = Event()
recv_queue = Queue()
# create websocket client
client = self.client(environ, uwsgi.connection_fd(), send_event,
send_queue, recv_event, recv_queue,
self.websocket.timeout)
# spawn handler
handler = spawn(handler, client, **args)
# spawn recv listener
def listener(client):
# wait max `client.timeout` seconds to allow ping to be sent
select([client.fd], [], [], client.timeout)
recv_event.set()
listening = spawn(listener, client)
while True:
if not client.connected:
recv_queue.put(None)
listening.kill()
handler.join(client.timeout)
return ''
# wait for event to draw our attention
wait([handler, send_event, recv_event], None, 1)
# handle send events
if send_event.is_set():
try:
while True:
uwsgi.websocket_send(send_queue.get_nowait())
except Empty:
send_event.clear()
except IOError:
client.connected = False
# handle receive events
elif recv_event.is_set():
recv_event.clear()
try:
message = True
# More than one message may have arrived, so keep reading
# until an empty message is read. Note that select()
# won't register after we've read a byte until all the
# bytes are read, make certain to read all the data.
# Experimentally, not putting the final empty message
# into the queue caused websocket timeouts; theoretically
# this code can skip writing the empty message but clients
# should be able to ignore it anyway.
while message:
message = uwsgi.websocket_recv_nb()
recv_queue.put(message)
listening = spawn(listener, client)
except IOError:
client.connected = False
# handler done, we're outta here
elif handler.ready():
listening.kill()
return ''
class GeventWebSocket(WebSocket):
middleware = GeventWebSocketMiddleware
def init_app(self, app):
aggressive = app.config.get('UWSGI_WEBSOCKET_AGGRESSIVE_PATCH', True)
patch_all(aggressive=aggressive)
super(GeventWebSocket, self).init_app(app) | PypiClean |
/GooeyDev-1.0.8b2.tar.gz/GooeyDev-1.0.8b2/gooey/gui/formatters.py | import os
import itertools
from gooey.gui.util.quoting import quote
def checkbox(metadata, value):
return metadata['commands'][0] if value else None
def radioGroup(metadata, value):
# TODO
try:
return self.commands[self._value.index(True)][0]
except ValueError:
return None
def multiFileChooser(metadata, value):
paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x)
if metadata['commands'] and paths:
return u'{} {}'.format(metadata['commands'][0], paths)
return paths or None
def textArea(metadata, value):
if metadata['commands'] and value:
return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape')))
else:
return quote(value.encode('unicode_escape')) if value else ''
def commandField(metadata, value):
if metadata['commands'] and value:
return u'{} {}'.format(metadata['commands'][0], value)
else:
return value or None
def counter(metatdata, value):
'''
Returns
str(option_string * DropDown Value)
e.g.
-vvvvv
'''
if not str(value).isdigit():
return None
command = str(metatdata['commands'][0]).strip()
return ' '.join(itertools.repeat(command, int(value)))
def dropdown(metadata, value):
if value == 'Select Option':
return None
elif metadata['commands'] and value:
return u'{} {}'.format(metadata['commands'][0], quote(value))
else:
return quote(value) if value else ''
def listbox(meta, value):
if meta['commands'] and value:
return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value)))
else:
return ' '.join(map(quote, value)) if value else ''
def general(metadata, value):
if metadata.get('commands') and value:
if not metadata.get('nargs'):
v = quote(value)
else:
v = value
return u'{0} {1}'.format(metadata['commands'][0], v)
else:
if not value:
return None
elif not metadata.get('nargs'):
return quote(value)
else:
return value | PypiClean |
/Glances-3.4.0.3.tar.gz/Glances-3.4.0.3/glances/exports/glances_csv.py | import os.path
import csv
import sys
import time
from glances.compat import PY3, iterkeys, itervalues
from glances.logger import logger
from glances.exports.glances_export import GlancesExport
class Export(GlancesExport):
"""This class manages the CSV export module."""
def __init__(self, config=None, args=None):
"""Init the CSV export IF."""
super(Export, self).__init__(config=config, args=args)
# CSV file name
self.csv_filename = args.export_csv_file
# Set the CSV output file
# (see https://github.com/nicolargo/glances/issues/1525)
if not os.path.isfile(self.csv_filename) or args.export_csv_overwrite:
# File did not exist, create it
file_mode = 'w'
self.old_header = None
else:
# A CSV file already exit, append new data
file_mode = 'a'
# Header will be checked later
# Get the existing one
try:
self.csv_file = open_csv_file(self.csv_filename, 'r')
reader = csv.reader(self.csv_file)
except IOError as e:
logger.critical("Cannot open existing CSV file: {}".format(e))
sys.exit(2)
self.old_header = next(reader, None)
self.csv_file.close()
try:
self.csv_file = open_csv_file(self.csv_filename, file_mode)
self.writer = csv.writer(self.csv_file)
except IOError as e:
logger.critical("Cannot create the CSV file: {}".format(e))
sys.exit(2)
logger.info("Stats exported to CSV file: {}".format(self.csv_filename))
self.export_enable = True
self.first_line = True
def exit(self):
"""Close the CSV file."""
logger.debug("Finalise export interface %s" % self.export_name)
self.csv_file.close()
def update(self, stats):
"""Update stats in the CSV output file."""
# Get the stats
all_stats = stats.getAllExportsAsDict(plugin_list=self.plugins_to_export(stats))
# Init data with timestamp (issue#708)
if self.first_line:
csv_header = ['timestamp']
csv_data = [time.strftime('%Y-%m-%d %H:%M:%S')]
# Loop over plugins to export
for plugin in self.plugins_to_export(stats):
if isinstance(all_stats[plugin], list):
for stat in sorted(all_stats[plugin], key=lambda x: x['key']):
# First line: header
if self.first_line:
csv_header += ['{}_{}_{}'.format(plugin, self.get_item_key(stat), item) for item in stat]
# Others lines: stats
csv_data += itervalues(stat)
elif isinstance(all_stats[plugin], dict):
# First line: header
if self.first_line:
fieldnames = iterkeys(all_stats[plugin])
csv_header += ('{}_{}'.format(plugin, fieldname) for fieldname in fieldnames)
# Others lines: stats
csv_data += itervalues(all_stats[plugin])
# Export to CSV
# Manage header
if self.first_line:
if self.old_header is None:
# New file, write the header on top on the CSV file
self.writer.writerow(csv_header)
# File already exist, check if header are compatible
if self.old_header != csv_header and self.old_header is not None:
# Header are different, log an error and do not write data
logger.error("Cannot append data to existing CSV file. Headers are different.")
logger.debug("Old header: {}".format(self.old_header))
logger.debug("New header: {}".format(csv_header))
else:
# Header are equals, ready to write data
self.old_header = None
# Only do this once
self.first_line = False
# Manage data
if self.old_header is None:
self.writer.writerow(csv_data)
self.csv_file.flush()
def open_csv_file(file_name, file_mode):
if PY3:
csv_file = open(file_name, file_mode, newline='')
else:
csv_file = open(file_name, file_mode + 'b')
return csv_file | PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/static/lib/js/bootstrap-multiselect.js | (function (root, factory) {
'use strict';
// check to see if 'knockout' AMD module is specified if using requirejs
if (typeof define === 'function' && define.amd &&
typeof require === 'function' && typeof require.specified === 'function' && require.specified('knockout')) {
// AMD. Register as an anonymous module.
define(['jquery', 'knockout'], factory);
} else {
// Browser globals
factory(root.jQuery, root.ko);
}
})(this, function ($, ko) {
'use strict';
if (typeof ko !== 'undefined' && ko.bindingHandlers && !ko.bindingHandlers.multiselect) {
ko.bindingHandlers.multiselect = {
after: ['options', 'value', 'selectedOptions', 'enable', 'disable'],
init: function(element, valueAccessor, allBindings) {
var $element = $(element);
var config = ko.toJS(valueAccessor());
$element.multiselect(config);
if (allBindings.has('options')) {
var options = allBindings.get('options');
if (ko.isObservable(options)) {
ko.computed({
read: function() {
options();
setTimeout(function() {
var ms = $element.data('multiselect');
if (ms){
ms.updateOriginalOptions();//Not sure how beneficial this is.
}
$element.multiselect('rebuild');
}, 1);
},
disposeWhenNodeIsRemoved: element
});
}
}
//value and selectedOptions are two-way, so these will be triggered even by our own actions.
//It needs some way to tell if they are triggered because of us or because of outside change.
//It doesn't loop but it's a waste of processing.
if (allBindings.has('value')) {
var value = allBindings.get('value');
if (ko.isObservable(value)) {
ko.computed({
read: function() {
value();
setTimeout(function() {
$element.multiselect('refresh');
}, 1);
},
disposeWhenNodeIsRemoved: element
}).extend({ rateLimit: 100, notifyWhenChangesStop: true });
}
}
//Switched from arrayChange subscription to general subscription using 'refresh'.
//Not sure performance is any better using 'select' and 'deselect'.
if (allBindings.has('selectedOptions')) {
var selectedOptions = allBindings.get('selectedOptions');
if (ko.isObservable(selectedOptions)) {
ko.computed({
read: function() {
selectedOptions();
setTimeout(function() {
$element.multiselect('refresh');
}, 1);
},
disposeWhenNodeIsRemoved: element
}).extend({ rateLimit: 100, notifyWhenChangesStop: true });
}
}
var setEnabled = function (enable) {
setTimeout(function () {
if (enable){
$element.multiselect('enable');
}
else{
$element.multiselect('disable');
}
});
};
if (allBindings.has('enable')) {
var enable = allBindings.get('enable');
if (ko.isObservable(enable)) {
ko.computed({
read: function () {
setEnabled(enable());
},
disposeWhenNodeIsRemoved: element
}).extend({ rateLimit: 100, notifyWhenChangesStop: true });
} else {
setEnabled(enable);
}
}
if (allBindings.has('disable')) {
var disable = allBindings.get('disable');
if (ko.isObservable(disable)) {
ko.computed({
read: function () {
setEnabled(!disable());
},
disposeWhenNodeIsRemoved: element
}).extend({ rateLimit: 100, notifyWhenChangesStop: true });
} else {
setEnabled(!disable);
}
}
ko.utils.domNodeDisposal.addDisposeCallback(element, function() {
$element.multiselect('destroy');
});
},
update: function(element, valueAccessor) {
var $element = $(element);
var config = ko.toJS(valueAccessor());
$element.multiselect('setOptions', config);
$element.multiselect('rebuild');
}
};
}
function forEach(array, callback) {
for (var index = 0; index < array.length; ++index) {
callback(array[index], index);
}
}
/**
* Constructor to create a new multiselect using the given select.
*
* @param {jQuery} select
* @param {Object} options
* @returns {Multiselect}
*/
function Multiselect(select, options) {
this.$select = $(select);
this.options = this.mergeOptions($.extend({}, options, this.$select.data()));
// Placeholder via data attributes
if (this.$select.attr('data-placeholder')) {
this.options.nonSelectedText = this.$select.data('placeholder');
}
// Initialization.
// We have to clone to create a new reference.
this.originalOptions = this.$select.clone()[0].options;
this.query = '';
this.searchTimeout = null;
this.lastToggledInput = null;
this.options.multiple = this.$select.attr('multiple') === 'multiple';
this.options.onChange = $.proxy(this.options.onChange, this);
this.options.onSelectAll = $.proxy(this.options.onSelectAll, this);
this.options.onDeselectAll = $.proxy(this.options.onDeselectAll, this);
this.options.onDropdownShow = $.proxy(this.options.onDropdownShow, this);
this.options.onDropdownHide = $.proxy(this.options.onDropdownHide, this);
this.options.onDropdownShown = $.proxy(this.options.onDropdownShown, this);
this.options.onDropdownHidden = $.proxy(this.options.onDropdownHidden, this);
this.options.onInitialized = $.proxy(this.options.onInitialized, this);
this.options.onFiltering = $.proxy(this.options.onFiltering, this);
// Build select all if enabled.
this.buildContainer();
this.buildButton();
this.buildDropdown();
this.buildReset();
this.buildSelectAll();
this.buildDropdownOptions();
this.buildFilter();
this.updateButtonText();
this.updateSelectAll(true);
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
this.options.wasDisabled = this.$select.prop('disabled');
if (this.options.disableIfEmpty && $('option', this.$select).length <= 0) {
this.disable();
}
this.$select.wrap('<span class="multiselect-native-select" />').after(this.$container);
this.options.onInitialized(this.$select, this.$container);
}
Multiselect.prototype = {
defaults: {
/**
* Default text function will either print 'None selected' in case no
* option is selected or a list of the selected options up to a length
* of 3 selected options.
*
* @param {jQuery} options
* @param {jQuery} select
* @returns {String}
*/
buttonText: function(options, select) {
if (this.disabledText.length > 0 && (select.prop('disabled') || (options.length === 0 && this.disableIfEmpty))) {
return this.disabledText;
}
else if (options.length === 0) {
return this.nonSelectedText;
}
else if (this.allSelectedText && options.length === $('option', $(select)).length && $('option', $(select)).length !== 1 && this.multiple) {
if (this.selectAllNumber) {
return this.allSelectedText + ' (' + options.length + ')';
}
else {
return this.allSelectedText;
}
}
else if (this.numberDisplayed !== 0 && options.length > this.numberDisplayed) {
return options.length + ' ' + this.nSelectedText;
}
else {
var selected = '';
var delimiter = this.delimiterText;
options.each(function() {
var label = ($(this).attr('label') !== undefined) ? $(this).attr('label') : $(this).text();
selected += label + delimiter;
});
return selected.substr(0, selected.length - this.delimiterText.length);
}
},
/**
* Updates the title of the button similar to the buttonText function.
*
* @param {jQuery} options
* @param {jQuery} select
* @returns {@exp;selected@call;substr}
*/
buttonTitle: function(options) {
if (options.length === 0) {
return this.nonSelectedText;
}
else {
var selected = '';
var delimiter = this.delimiterText;
options.each(function () {
var label = ($(this).attr('label') !== undefined) ? $(this).attr('label') : $(this).text();
selected += label + delimiter;
});
return selected.substr(0, selected.length - this.delimiterText.length);
}
},
checkboxName: function() {
return false; // no checkbox name
},
/**
* Create a label.
*
* @param {jQuery} element
* @returns {String}
*/
optionLabel: function(element){
return $(element).attr('label') || $(element).text();
},
/**
* Create a class.
*
* @param {jQuery} element
* @returns {String}
*/
optionClass: function(element) {
return $(element).attr('class') || '';
},
/**
* Triggered on change of the multiselect.
*
* Not triggered when selecting/deselecting options manually.
*
* @param {jQuery} option
* @param {Boolean} checked
*/
onChange : function() {
},
/**
* Triggered when the dropdown is shown.
*
* @param {jQuery} event
*/
onDropdownShow: function() {
},
/**
* Triggered when the dropdown is hidden.
*
* @param {jQuery} event
*/
onDropdownHide: function() {
},
/**
* Triggered after the dropdown is shown.
*
* @param {jQuery} event
*/
onDropdownShown: function() {
},
/**
* Triggered after the dropdown is hidden.
*
* @param {jQuery} event
*/
onDropdownHidden: function() {
},
/**
* Triggered on select all.
*/
onSelectAll: function() {
},
/**
* Triggered on deselect all.
*/
onDeselectAll: function() {
},
/**
* Triggered after initializing.
*
* @param {jQuery} $select
* @param {jQuery} $container
*/
onInitialized: function() {
},
/**
* Triggered on filtering.
*
* @param {jQuery} $filter
*/
onFiltering: function() {
},
enableHTML: false,
buttonClass: 'btn btn-default',
inheritClass: false,
buttonWidth: 'auto',
buttonContainer: '<div class="btn-group" />',
dropRight: false,
dropUp: false,
selectedClass: 'active',
// Maximum height of the dropdown menu.
// If maximum height is exceeded a scrollbar will be displayed.
maxHeight: false,
includeSelectAllOption: false,
includeSelectAllIfMoreThan: 0,
selectAllText: ' Select all',
selectAllValue: 'multiselect-all',
selectAllName: false,
selectAllNumber: true,
selectAllJustVisible: true,
enableFiltering: false,
enableCaseInsensitiveFiltering: false,
enableFullValueFiltering: false,
enableClickableOptGroups: false,
enableCollapsibleOptGroups: false,
collapseOptGroupsByDefault: false,
filterPlaceholder: 'Search',
// possible options: 'text', 'value', 'both'
filterBehavior: 'text',
includeFilterClearBtn: true,
preventInputChangeEvent: false,
nonSelectedText: 'None selected',
nSelectedText: 'selected',
allSelectedText: 'All selected',
numberDisplayed: 3,
disableIfEmpty: false,
disabledText: '',
delimiterText: ', ',
includeResetOption: false,
includeResetDivider: false,
resetText: 'Reset',
templates: {
button: '<button type="button" class="multiselect dropdown-toggle" data-toggle="dropdown"><span class="multiselect-selected-text"></span> <b class="caret"></b></button>',
ul: '<ul class="multiselect-container dropdown-menu"></ul>',
filter: '<li class="multiselect-item multiselect-filter"><div class="input-group"><span class="input-group-addon"><i class="glyphicon glyphicon-search"></i></span><input class="form-control multiselect-search" type="text" /></div></li>',
filterClearBtn: '<span class="input-group-btn"><button class="btn btn-default multiselect-clear-filter" type="button"><i class="glyphicon glyphicon-remove-circle"></i></button></span>',
li: '<li><a tabindex="0"><label></label></a></li>',
divider: '<li class="multiselect-item divider"></li>',
liGroup: '<li class="multiselect-item multiselect-group"><label></label></li>',
resetButton: '<li class="multiselect-reset text-center"><div class="input-group"><a class="btn btn-default btn-block"></a></div></li>'
}
},
constructor: Multiselect,
/**
* Builds the container of the multiselect.
*/
buildContainer: function() {
this.$container = $(this.options.buttonContainer);
this.$container.on('show.bs.dropdown', this.options.onDropdownShow);
this.$container.on('hide.bs.dropdown', this.options.onDropdownHide);
this.$container.on('shown.bs.dropdown', this.options.onDropdownShown);
this.$container.on('hidden.bs.dropdown', this.options.onDropdownHidden);
},
/**
* Builds the button of the multiselect.
*/
buildButton: function() {
this.$button = $(this.options.templates.button).addClass(this.options.buttonClass);
if (this.$select.attr('class') && this.options.inheritClass) {
this.$button.addClass(this.$select.attr('class'));
}
// Adopt active state.
if (this.$select.prop('disabled')) {
this.disable();
}
else {
this.enable();
}
// Manually add button width if set.
if (this.options.buttonWidth && this.options.buttonWidth !== 'auto') {
this.$button.css({
'width' : '100%', //this.options.buttonWidth,
'overflow' : 'hidden',
'text-overflow' : 'ellipsis'
});
this.$container.css({
'width': this.options.buttonWidth
});
}
// Keep the tab index from the select.
var tabindex = this.$select.attr('tabindex');
if (tabindex) {
this.$button.attr('tabindex', tabindex);
}
this.$container.prepend(this.$button);
},
/**
* Builds the ul representing the dropdown menu.
*/
buildDropdown: function() {
// Build ul.
this.$ul = $(this.options.templates.ul);
if (this.options.dropRight) {
this.$ul.addClass('pull-right');
}
// Set max height of dropdown menu to activate auto scrollbar.
if (this.options.maxHeight) {
// TODO: Add a class for this option to move the css declarations.
this.$ul.css({
'max-height': this.options.maxHeight + 'px',
'overflow-y': 'auto',
'overflow-x': 'hidden'
});
}
if (this.options.dropUp) {
var height = Math.min(this.options.maxHeight, $('option[data-role!="divider"]', this.$select).length*26 + $('option[data-role="divider"]', this.$select).length*19 + (this.options.includeSelectAllOption ? 26 : 0) + (this.options.enableFiltering || this.options.enableCaseInsensitiveFiltering ? 44 : 0));
var moveCalc = height + 34;
this.$ul.css({
'max-height': height + 'px',
'overflow-y': 'auto',
'overflow-x': 'hidden',
'margin-top': '-' + moveCalc + 'px'
});
}
this.$container.append(this.$ul);
},
/**
* Build the dropdown options and binds all necessary events.
*
* Uses createDivider and createOptionValue to create the necessary options.
*/
buildDropdownOptions: function() {
this.$select.children().each($.proxy(function(index, element) {
var $element = $(element);
// Support optgroups and options without a group simultaneously.
var tag = $element.prop('tagName')
.toLowerCase();
if ($element.prop('value') === this.options.selectAllValue) {
return;
}
if (tag === 'optgroup') {
this.createOptgroup(element);
}
else if (tag === 'option') {
if ($element.data('role') === 'divider') {
this.createDivider();
}
else {
this.createOptionValue(element);
}
}
// Other illegal tags will be ignored.
}, this));
// Bind the change event on the dropdown elements.
$(this.$ul).off('change', 'li:not(.multiselect-group) input[type="checkbox"], li:not(.multiselect-group) input[type="radio"]');
$(this.$ul).on('change', 'li:not(.multiselect-group) input[type="checkbox"], li:not(.multiselect-group) input[type="radio"]', $.proxy(function(event) {
var $target = $(event.target);
var checked = $target.prop('checked') || false;
var isSelectAllOption = $target.val() === this.options.selectAllValue;
// Apply or unapply the configured selected class.
if (this.options.selectedClass) {
if (checked) {
$target.closest('li')
.addClass(this.options.selectedClass);
}
else {
$target.closest('li')
.removeClass(this.options.selectedClass);
}
}
// Get the corresponding option.
var value = $target.val();
var $option = this.getOptionByValue(value);
var $optionsNotThis = $('option', this.$select).not($option);
var $checkboxesNotThis = $('input', this.$container).not($target);
if (isSelectAllOption) {
if (checked) {
this.selectAll(this.options.selectAllJustVisible, true);
}
else {
this.deselectAll(this.options.selectAllJustVisible, true);
}
}
else {
if (checked) {
$option.prop('selected', true);
if (this.options.multiple) {
// Simply select additional option.
$option.prop('selected', true);
}
else {
// Unselect all other options and corresponding checkboxes.
if (this.options.selectedClass) {
$($checkboxesNotThis).closest('li').removeClass(this.options.selectedClass);
}
$($checkboxesNotThis).prop('checked', false);
$optionsNotThis.prop('selected', false);
// It's a single selection, so close.
this.$button.click();
}
if (this.options.selectedClass === 'active') {
$optionsNotThis.closest('a').css('outline', '');
}
}
else {
// Unselect option.
$option.prop('selected', false);
}
// To prevent select all from firing onChange: #575
this.options.onChange($option, checked);
// Do not update select all or optgroups on select all change!
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
}
this.$select.change();
this.updateButtonText();
if(this.options.preventInputChangeEvent) {
return false;
}
}, this));
$('li a', this.$ul).on('mousedown', function(e) {
if (e.shiftKey) {
// Prevent selecting text by Shift+click
return false;
}
});
$(this.$ul).on('touchstart click', 'li a', $.proxy(function(event) {
event.stopPropagation();
var $target = $(event.target);
if (event.shiftKey && this.options.multiple) {
if($target.is('label')){ // Handles checkbox selection manually (see https://github.com/davidstutz/bootstrap-multiselect/issues/431)
event.preventDefault();
$target = $target.find('input');
$target.prop('checked', !$target.prop('checked'));
}
var checked = $target.prop('checked') || false;
if (this.lastToggledInput !== null && this.lastToggledInput !== $target) { // Make sure we actually have a range
var from = this.$ul.find('li:visible').index($target.parents('li'));
var to = this.$ul.find('li:visible').index(this.lastToggledInput.parents('li'));
if (from > to) { // Swap the indices
var tmp = to;
to = from;
from = tmp;
}
// Make sure we grab all elements since slice excludes the last index
++to;
// Change the checkboxes and underlying options
var range = this.$ul.find('li').not('.multiselect-filter-hidden').slice(from, to).find('input');
range.prop('checked', checked);
if (this.options.selectedClass) {
range.closest('li')
.toggleClass(this.options.selectedClass, checked);
}
for (var i = 0, j = range.length; i < j; i++) {
var $checkbox = $(range[i]);
var $option = this.getOptionByValue($checkbox.val());
$option.prop('selected', checked);
}
}
// Trigger the select "change" event
$target.trigger('change');
}
// Remembers last clicked option
if($target.is('input') && !$target.closest('li').is('.multiselect-item')){
this.lastToggledInput = $target;
}
$target.blur();
}, this));
// Keyboard support.
this.$container.off('keydown.multiselect').on('keydown.multiselect', $.proxy(function(event) {
if ($('input[type="text"]', this.$container).is(':focus')) {
return;
}
if (event.keyCode === 9 && this.$container.hasClass('open')) {
this.$button.click();
}
else {
var $items = $(this.$container).find('li:not(.divider):not(.disabled) a').filter(':visible');
if (!$items.length) {
return;
}
var index = $items.index($items.filter(':focus'));
// Navigation up.
if (event.keyCode === 38 && index > 0) {
index--;
}
// Navigate down.
else if (event.keyCode === 40 && index < $items.length - 1) {
index++;
}
else if (!~index) {
index = 0;
}
var $current = $items.eq(index);
$current.focus();
if (event.keyCode === 32 || event.keyCode === 13) {
var $checkbox = $current.find('input');
$checkbox.prop('checked', !$checkbox.prop('checked'));
$checkbox.change();
}
event.stopPropagation();
event.preventDefault();
}
}, this));
if (this.options.enableClickableOptGroups && this.options.multiple) {
$('li.multiselect-group input', this.$ul).on('change', $.proxy(function(event) {
event.stopPropagation();
var $target = $(event.target);
var checked = $target.prop('checked') || false;
var $li = $(event.target).closest('li');
var $group = $li.nextUntil('li.multiselect-group')
.not('.multiselect-filter-hidden')
.not('.disabled');
var $inputs = $group.find('input');
var $options = [];
if (this.options.selectedClass) {
if (checked) {
$li.addClass(this.options.selectedClass);
}
else {
$li.removeClass(this.options.selectedClass);
}
}
$.each($inputs, $.proxy(function(index, input) {
var value = $(input).val();
var $option = this.getOptionByValue(value);
if (checked) {
$(input).prop('checked', true);
$(input).closest('li')
.addClass(this.options.selectedClass);
$option.prop('selected', true);
}
else {
$(input).prop('checked', false);
$(input).closest('li')
.removeClass(this.options.selectedClass);
$option.prop('selected', false);
}
$options.push(this.getOptionByValue(value));
}, this));
// Cannot use select or deselect here because it would call updateOptGroups again.
this.options.onChange($options, checked);
this.$select.change();
this.updateButtonText();
this.updateSelectAll();
}, this));
}
if (this.options.enableCollapsibleOptGroups && this.options.multiple) {
$('li.multiselect-group .caret-container', this.$ul).on('click', $.proxy(function(event) {
var $li = $(event.target).closest('li');
var $inputs = $li.nextUntil('li.multiselect-group')
.not('.multiselect-filter-hidden');
var visible = true;
$inputs.each(function() {
visible = visible && !$(this).hasClass('multiselect-collapsible-hidden');
});
if (visible) {
$inputs.hide()
.addClass('multiselect-collapsible-hidden');
}
else {
$inputs.show()
.removeClass('multiselect-collapsible-hidden');
}
}, this));
$('li.multiselect-all', this.$ul).css('background', '#f3f3f3').css('border-bottom', '1px solid #eaeaea');
$('li.multiselect-all > a > label.checkbox', this.$ul).css('padding', '3px 20px 3px 35px');
$('li.multiselect-group > a > input', this.$ul).css('margin', '4px 0px 5px -20px');
}
},
/**
* Create an option using the given select option.
*
* @param {jQuery} element
*/
createOptionValue: function(element) {
var $element = $(element);
if ($element.is(':selected')) {
$element.prop('selected', true);
}
// Support the label attribute on options.
var label = this.options.optionLabel(element);
var classes = this.options.optionClass(element);
var value = $element.val();
var inputType = this.options.multiple ? 'checkbox' : 'radio';
var $li = $(this.options.templates.li);
var $label = $('label', $li);
$label.addClass(inputType);
$label.attr('title', label);
$li.addClass(classes);
// Hide all children items when collapseOptGroupsByDefault is true
if (this.options.collapseOptGroupsByDefault && $(element).parent().prop('tagName').toLowerCase() === 'optgroup') {
$li.addClass('multiselect-collapsible-hidden');
$li.hide();
}
if (this.options.enableHTML) {
$label.html(' ' + label);
}
else {
$label.text(' ' + label);
}
var $checkbox = $('<input/>').attr('type', inputType);
var name = this.options.checkboxName($element);
if (name) {
$checkbox.attr('name', name);
}
$label.prepend($checkbox);
var selected = $element.prop('selected') || false;
$checkbox.val(value);
if (value === this.options.selectAllValue) {
$li.addClass('multiselect-item multiselect-all');
$checkbox.parent().parent()
.addClass('multiselect-all');
}
$label.attr('title', $element.attr('title'));
this.$ul.append($li);
if ($element.is(':disabled')) {
$checkbox.attr('disabled', 'disabled')
.prop('disabled', true)
.closest('a')
.attr('tabindex', '-1')
.closest('li')
.addClass('disabled');
}
$checkbox.prop('checked', selected);
if (selected && this.options.selectedClass) {
$checkbox.closest('li')
.addClass(this.options.selectedClass);
}
},
/**
* Creates a divider using the given select option.
*
* @param {jQuery} element
*/
createDivider: function() {
var $divider = $(this.options.templates.divider);
this.$ul.append($divider);
},
/**
* Creates an optgroup.
*
* @param {jQuery} group
*/
createOptgroup: function(group) {
var label = $(group).attr('label');
var value = $(group).attr('value');
var $li = $('<li class="multiselect-item multiselect-group"><a href="javascript:void(0);"><label><b></b></label></a></li>');
var classes = this.options.optionClass(group);
$li.addClass(classes);
if (this.options.enableHTML) {
$('label b', $li).html(' ' + label);
}
else {
$('label b', $li).text(' ' + label);
}
if (this.options.enableCollapsibleOptGroups && this.options.multiple) {
$('a', $li).append('<span class="caret-container"><b class="caret"></b></span>');
}
if (this.options.enableClickableOptGroups && this.options.multiple) {
$('a label', $li).prepend('<input type="checkbox" value="' + value + '"/>');
}
if ($(group).is(':disabled')) {
$li.addClass('disabled');
}
this.$ul.append($li);
$('option', group).each($.proxy(function($, group) {
this.createOptionValue(group);
}, this));
},
/**
* Build the reset.
*
*/
buildReset: function() {
if (this.options.includeResetOption) {
// Check whether to add a divider after the reset.
if (this.options.includeResetDivider) {
this.$ul.prepend($(this.options.templates.divider));
}
var $resetButton = $(this.options.templates.resetButton);
if (this.options.enableHTML) {
$('a', $resetButton).html(this.options.resetText);
}
else {
$('a', $resetButton).text(this.options.resetText);
}
$('a', $resetButton).click($.proxy(function(){
this.clearSelection();
}, this));
this.$ul.prepend($resetButton);
}
},
/**
* Build the select all.
*
* Checks if a select all has already been created.
*/
buildSelectAll: function() {
if (typeof this.options.selectAllValue === 'number') {
this.options.selectAllValue = this.options.selectAllValue.toString();
}
var alreadyHasSelectAll = this.hasSelectAll();
if (!alreadyHasSelectAll && this.options.includeSelectAllOption && this.options.multiple && $('option', this.$select).length > this.options.includeSelectAllIfMoreThan) {
// Check whether to add a divider after the select all.
if (this.options.includeSelectAllDivider) {
this.$ul.prepend($(this.options.templates.divider));
}
var $li = $(this.options.templates.li);
$('label', $li).addClass('checkbox');
if (this.options.enableHTML) {
$('label', $li).html(' ' + this.options.selectAllText);
}
else {
$('label', $li).text(' ' + this.options.selectAllText);
}
if (this.options.selectAllName) {
$('label', $li).prepend('<input type="checkbox" name="' + this.options.selectAllName + '" />');
}
else {
$('label', $li).prepend('<input type="checkbox" />');
}
var $checkbox = $('input', $li);
$checkbox.val(this.options.selectAllValue);
$li.addClass('multiselect-item multiselect-all');
$checkbox.parent().parent()
.addClass('multiselect-all');
this.$ul.prepend($li);
$checkbox.prop('checked', false);
}
},
/**
* Builds the filter.
*/
buildFilter: function() {
// Build filter if filtering OR case insensitive filtering is enabled and the number of options exceeds (or equals) enableFilterLength.
if (this.options.enableFiltering || this.options.enableCaseInsensitiveFiltering) {
var enableFilterLength = Math.max(this.options.enableFiltering, this.options.enableCaseInsensitiveFiltering);
if (this.$select.find('option').length >= enableFilterLength) {
this.$filter = $(this.options.templates.filter);
$('input', this.$filter).attr('placeholder', this.options.filterPlaceholder);
// Adds optional filter clear button
if(this.options.includeFilterClearBtn) {
var clearBtn = $(this.options.templates.filterClearBtn);
clearBtn.on('click', $.proxy(function(){
clearTimeout(this.searchTimeout);
this.query = '';
this.$filter.find('.multiselect-search').val('');
$('li', this.$ul).show().removeClass('multiselect-filter-hidden');
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
}, this));
this.$filter.find('.input-group').append(clearBtn);
}
this.$ul.prepend(this.$filter);
this.$filter.val(this.query).on('click', function(event) {
event.stopPropagation();
}).on('input keydown', $.proxy(function(event) {
// Cancel enter key default behaviour
if (event.which === 13) {
event.preventDefault();
}
// This is useful to catch "keydown" events after the browser has updated the control.
clearTimeout(this.searchTimeout);
this.searchTimeout = this.asyncFunction($.proxy(function() {
if (this.query !== event.target.value) {
this.query = event.target.value;
var currentGroup, currentGroupVisible;
$.each($('li', this.$ul), $.proxy(function(index, element) {
var value = $('input', element).length > 0 ? $('input', element).val() : '';
var text = $('label', element).text();
var filterCandidate = '';
if ((this.options.filterBehavior === 'text')) {
filterCandidate = text;
}
else if ((this.options.filterBehavior === 'value')) {
filterCandidate = value;
}
else if (this.options.filterBehavior === 'both') {
filterCandidate = text + '\n' + value;
}
if (value !== this.options.selectAllValue && text) {
// By default lets assume that element is not
// interesting for this search.
var showElement = false;
if (this.options.enableCaseInsensitiveFiltering) {
filterCandidate = filterCandidate.toLowerCase();
this.query = this.query.toLowerCase();
}
if (this.options.enableFullValueFiltering && this.options.filterBehavior !== 'both') {
var valueToMatch = filterCandidate.trim().substring(0, this.query.length);
if (this.query.indexOf(valueToMatch) > -1) {
showElement = true;
}
}
else if (filterCandidate.indexOf(this.query) > -1) {
showElement = true;
}
// Toggle current element (group or group item) according to showElement boolean.
if(!showElement){
$(element).css('display', 'none');
$(element).addClass('multiselect-filter-hidden');
}
if(showElement){
$(element).css('display', 'block');
$(element).removeClass('multiselect-filter-hidden');
}
// Differentiate groups and group items.
if ($(element).hasClass('multiselect-group')) {
// Remember group status.
currentGroup = element;
currentGroupVisible = showElement;
}
else {
// Show group name when at least one of its items is visible.
if (showElement) {
$(currentGroup).show()
.removeClass('multiselect-filter-hidden');
}
// Show all group items when group name satisfies filter.
if (!showElement && currentGroupVisible) {
$(element).show()
.removeClass('multiselect-filter-hidden');
}
}
}
}, this));
}
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
this.options.onFiltering(event.target);
}, this), 300, this);
}, this));
}
}
},
/**
* Unbinds the whole plugin.
*/
destroy: function() {
this.$container.remove();
this.$select.show();
// reset original state
this.$select.prop('disabled', this.options.wasDisabled);
this.$select.data('multiselect', null);
},
/**
* Refreshs the multiselect based on the selected options of the select.
*/
refresh: function () {
var inputs = {};
$('li input', this.$ul).each(function() {
inputs[$(this).val()] = $(this);
});
$('option', this.$select).each($.proxy(function (index, element) {
var $elem = $(element);
var $input = inputs[$(element).val()];
if ($elem.is(':selected')) {
$input.prop('checked', true);
if (this.options.selectedClass) {
$input.closest('li')
.addClass(this.options.selectedClass);
}
}
else {
$input.prop('checked', false);
if (this.options.selectedClass) {
$input.closest('li')
.removeClass(this.options.selectedClass);
}
}
if ($elem.is(':disabled')) {
$input.attr('disabled', 'disabled')
.prop('disabled', true)
.closest('li')
.addClass('disabled');
}
else {
$input.prop('disabled', false)
.closest('li')
.removeClass('disabled');
}
}, this));
this.updateButtonText();
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
},
/**
* Select all options of the given values.
*
* If triggerOnChange is set to true, the on change event is triggered if
* and only if one value is passed.
*
* @param {Array} selectValues
* @param {Boolean} triggerOnChange
*/
select: function(selectValues, triggerOnChange) {
if(!$.isArray(selectValues)) {
selectValues = [selectValues];
}
for (var i = 0; i < selectValues.length; i++) {
var value = selectValues[i];
if (value === null || value === undefined) {
continue;
}
var $option = this.getOptionByValue(value);
var $checkbox = this.getInputByValue(value);
if($option === undefined || $checkbox === undefined) {
continue;
}
if (!this.options.multiple) {
this.deselectAll(false);
}
if (this.options.selectedClass) {
$checkbox.closest('li')
.addClass(this.options.selectedClass);
}
$checkbox.prop('checked', true);
$option.prop('selected', true);
if (triggerOnChange) {
this.options.onChange($option, true);
}
}
this.updateButtonText();
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
},
/**
* Clears all selected items.
*/
clearSelection: function () {
this.deselectAll(false);
this.updateButtonText();
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
},
/**
* Deselects all options of the given values.
*
* If triggerOnChange is set to true, the on change event is triggered, if
* and only if one value is passed.
*
* @param {Array} deselectValues
* @param {Boolean} triggerOnChange
*/
deselect: function(deselectValues, triggerOnChange) {
if(!$.isArray(deselectValues)) {
deselectValues = [deselectValues];
}
for (var i = 0; i < deselectValues.length; i++) {
var value = deselectValues[i];
if (value === null || value === undefined) {
continue;
}
var $option = this.getOptionByValue(value);
var $checkbox = this.getInputByValue(value);
if($option === undefined || $checkbox === undefined) {
continue;
}
if (this.options.selectedClass) {
$checkbox.closest('li')
.removeClass(this.options.selectedClass);
}
$checkbox.prop('checked', false);
$option.prop('selected', false);
if (triggerOnChange) {
this.options.onChange($option, false);
}
}
this.updateButtonText();
this.updateSelectAll();
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
},
/**
* Selects all enabled & visible options.
*
* If justVisible is true or not specified, only visible options are selected.
*
* @param {Boolean} justVisible
* @param {Boolean} triggerOnSelectAll
*/
selectAll: function (justVisible, triggerOnSelectAll) {
justVisible = typeof justVisible === 'undefined' ? true : justVisible;
var allLis = $('li:not(.divider):not(.disabled):not(.multiselect-group)', this.$ul);
var visibleLis = $('li:not(.divider):not(.disabled):not(.multiselect-group):not(.multiselect-filter-hidden):not(.multiselect-collapisble-hidden)', this.$ul).filter(':visible');
if(justVisible) {
$('input:enabled' , visibleLis).prop('checked', true);
visibleLis.addClass(this.options.selectedClass);
$('input:enabled' , visibleLis).each($.proxy(function(index, element) {
var value = $(element).val();
var option = this.getOptionByValue(value);
$(option).prop('selected', true);
}, this));
}
else {
$('input:enabled' , allLis).prop('checked', true);
allLis.addClass(this.options.selectedClass);
$('input:enabled' , allLis).each($.proxy(function(index, element) {
var value = $(element).val();
var option = this.getOptionByValue(value);
$(option).prop('selected', true);
}, this));
}
$('li input[value="' + this.options.selectAllValue + '"]', this.$ul).prop('checked', true);
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
if (triggerOnSelectAll) {
this.options.onSelectAll();
}
},
/**
* Deselects all options.
*
* If justVisible is true or not specified, only visible options are deselected.
*
* @param {Boolean} justVisible
*/
deselectAll: function (justVisible, triggerOnDeselectAll) {
justVisible = typeof justVisible === 'undefined' ? true : justVisible;
var allLis = $('li:not(.divider):not(.disabled):not(.multiselect-group)', this.$ul);
var visibleLis = $('li:not(.divider):not(.disabled):not(.multiselect-group):not(.multiselect-filter-hidden):not(.multiselect-collapisble-hidden)', this.$ul).filter(':visible');
if(justVisible) {
$('input[type="checkbox"]:enabled' , visibleLis).prop('checked', false);
visibleLis.removeClass(this.options.selectedClass);
$('input[type="checkbox"]:enabled' , visibleLis).each($.proxy(function(index, element) {
var value = $(element).val();
var option = this.getOptionByValue(value);
$(option).prop('selected', false);
}, this));
}
else {
$('input[type="checkbox"]:enabled' , allLis).prop('checked', false);
allLis.removeClass(this.options.selectedClass);
$('input[type="checkbox"]:enabled' , allLis).each($.proxy(function(index, element) {
var value = $(element).val();
var option = this.getOptionByValue(value);
$(option).prop('selected', false);
}, this));
}
$('li input[value="' + this.options.selectAllValue + '"]', this.$ul).prop('checked', false);
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
if (triggerOnDeselectAll) {
this.options.onDeselectAll();
}
},
/**
* Rebuild the plugin.
*
* Rebuilds the dropdown, the filter and the select all option.
*/
rebuild: function() {
this.$ul.html('');
// Important to distinguish between radios and checkboxes.
this.options.multiple = this.$select.attr('multiple') === 'multiple';
this.buildSelectAll();
this.buildDropdownOptions();
this.buildFilter();
this.updateButtonText();
this.updateSelectAll(true);
if (this.options.enableClickableOptGroups && this.options.multiple) {
this.updateOptGroups();
}
if (this.options.disableIfEmpty && $('option', this.$select).length <= 0) {
this.disable();
}
else {
this.enable();
}
if (this.options.dropRight) {
this.$ul.addClass('pull-right');
}
},
/**
* The provided data will be used to build the dropdown.
*/
dataprovider: function(dataprovider) {
var groupCounter = 0;
var $select = this.$select.empty();
$.each(dataprovider, function (index, option) {
var $tag;
if ($.isArray(option.children)) { // create optiongroup tag
groupCounter++;
$tag = $('<optgroup/>').attr({
label: option.label || 'Group ' + groupCounter,
disabled: !!option.disabled,
value: option.value
});
forEach(option.children, function(subOption) { // add children option tags
var attributes = {
value: subOption.value,
label: subOption.label || subOption.value,
title: subOption.title,
selected: !!subOption.selected,
disabled: !!subOption.disabled
};
//Loop through attributes object and add key-value for each attribute
for (var key in subOption.attributes) {
attributes['data-' + key] = subOption.attributes[key];
}
//Append original attributes + new data attributes to option
$tag.append($('<option/>').attr(attributes));
});
}
else {
var attributes = {
'value': option.value,
'label': option.label || option.value,
'title': option.title,
'class': option.class,
'selected': !!option.selected,
'disabled': !!option.disabled
};
//Loop through attributes object and add key-value for each attribute
for (var key in option.attributes) {
attributes['data-' + key] = option.attributes[key];
}
//Append original attributes + new data attributes to option
$tag = $('<option/>').attr(attributes);
$tag.text(option.label || option.value);
}
$select.append($tag);
});
this.rebuild();
},
/**
* Enable the multiselect.
*/
enable: function() {
this.$select.prop('disabled', false);
this.$button.prop('disabled', false)
.removeClass('disabled');
},
/**
* Disable the multiselect.
*/
disable: function() {
this.$select.prop('disabled', true);
this.$button.prop('disabled', true)
.addClass('disabled');
},
/**
* Set the options.
*
* @param {Array} options
*/
setOptions: function(options) {
this.options = this.mergeOptions(options);
},
/**
* Merges the given options with the default options.
*
* @param {Array} options
* @returns {Array}
*/
mergeOptions: function(options) {
return $.extend(true, {}, this.defaults, this.options, options);
},
/**
* Checks whether a select all checkbox is present.
*
* @returns {Boolean}
*/
hasSelectAll: function() {
return $('li.multiselect-all', this.$ul).length > 0;
},
/**
* Update opt groups.
*/
updateOptGroups: function() {
var $groups = $('li.multiselect-group', this.$ul);
var selectedClass = this.options.selectedClass;
$groups.each(function() {
var $options = $(this).nextUntil('li.multiselect-group')
.not('.multiselect-filter-hidden')
.not('.disabled');
var checked = true;
$options.each(function() {
var $input = $('input', this);
if (!$input.prop('checked')) {
checked = false;
}
});
if (selectedClass) {
if (checked) {
$(this).addClass(selectedClass);
}
else {
$(this).removeClass(selectedClass);
}
}
$('input', this).prop('checked', checked);
});
},
/**
* Updates the select all checkbox based on the currently displayed and selected checkboxes.
*/
updateSelectAll: function() {
if (this.hasSelectAll()) {
var allBoxes = $('li:not(.multiselect-item):not(.multiselect-filter-hidden):not(.multiselect-group):not(.disabled) input:enabled', this.$ul);
var allBoxesLength = allBoxes.length;
var checkedBoxesLength = allBoxes.filter(':checked').length;
var selectAllLi = $('li.multiselect-all', this.$ul);
var selectAllInput = selectAllLi.find('input');
if (checkedBoxesLength > 0 && checkedBoxesLength === allBoxesLength) {
selectAllInput.prop('checked', true);
selectAllLi.addClass(this.options.selectedClass);
}
else {
selectAllInput.prop('checked', false);
selectAllLi.removeClass(this.options.selectedClass);
}
}
},
/**
* Update the button text and its title based on the currently selected options.
*/
updateButtonText: function() {
var options = this.getSelected();
// First update the displayed button text.
if (this.options.enableHTML) {
$('.multiselect .multiselect-selected-text', this.$container).html(this.options.buttonText(options, this.$select));
}
else {
$('.multiselect .multiselect-selected-text', this.$container).text(this.options.buttonText(options, this.$select));
}
// Now update the title attribute of the button.
$('.multiselect', this.$container).attr('title', this.options.buttonTitle(options, this.$select));
},
/**
* Get all selected options.
*
* @returns {jQUery}
*/
getSelected: function() {
return $('option', this.$select).filter(':selected');
},
/**
* Gets a select option by its value.
*
* @param {String} value
* @returns {jQuery}
*/
getOptionByValue: function (value) {
var options = $('option', this.$select);
var valueToCompare = value.toString();
for (var i = 0; i < options.length; i = i + 1) {
var option = options[i];
if (option.value === valueToCompare) {
return $(option);
}
}
},
/**
* Get the input (radio/checkbox) by its value.
*
* @param {String} value
* @returns {jQuery}
*/
getInputByValue: function (value) {
var checkboxes = $('li input:not(.multiselect-search)', this.$ul);
var valueToCompare = value.toString();
for (var i = 0; i < checkboxes.length; i = i + 1) {
var checkbox = checkboxes[i];
if (checkbox.value === valueToCompare) {
return $(checkbox);
}
}
},
/**
* Used for knockout integration.
*/
updateOriginalOptions: function() {
this.originalOptions = this.$select.clone()[0].options;
},
asyncFunction: function(callback, timeout, self) {
var args = Array.prototype.slice.call(arguments, 3);
return setTimeout(function() {
callback.apply(self || window, args);
}, timeout);
},
setAllSelectedText: function(allSelectedText) {
this.options.allSelectedText = allSelectedText;
this.updateButtonText();
}
};
$.fn.multiselect = function(option, parameter, extraOptions) {
return this.each(function() {
var data = $(this).data('multiselect');
var options = typeof option === 'object' && option;
// Initialize the multiselect.
if (!data) {
data = new Multiselect(this, options);
$(this).data('multiselect', data);
}
// Call multiselect method.
if (typeof option === 'string') {
data[option](parameter, extraOptions);
if (option === 'destroy') {
$(this).data('multiselect', false);
}
}
});
};
$.fn.multiselect.Constructor = Multiselect;
$(function() {
$('select[data-role=multiselect]').multiselect();
});
}); | PypiClean |
/fastmbar-1.4.1.tar.gz/fastmbar-1.4.1/docs/build/_static/copybutton.js | const messages = {
'en': {
'copy': 'Copy',
'copy_to_clipboard': 'Copy to clipboard',
'copy_success': 'Copied!',
'copy_failure': 'Failed to copy',
},
'es' : {
'copy': 'Copiar',
'copy_to_clipboard': 'Copiar al portapapeles',
'copy_success': '¡Copiado!',
'copy_failure': 'Error al copiar',
},
'de' : {
'copy': 'Kopieren',
'copy_to_clipboard': 'In die Zwischenablage kopieren',
'copy_success': 'Kopiert!',
'copy_failure': 'Fehler beim Kopieren',
},
'fr' : {
'copy': 'Copier',
'copy_to_clipboard': 'Copier dans le presse-papier',
'copy_success': 'Copié !',
'copy_failure': 'Échec de la copie',
},
'ru': {
'copy': 'Скопировать',
'copy_to_clipboard': 'Скопировать в буфер',
'copy_success': 'Скопировано!',
'copy_failure': 'Не удалось скопировать',
},
'zh-CN': {
'copy': '复制',
'copy_to_clipboard': '复制到剪贴板',
'copy_success': '复制成功!',
'copy_failure': '复制失败',
},
'it' : {
'copy': 'Copiare',
'copy_to_clipboard': 'Copiato negli appunti',
'copy_success': 'Copiato!',
'copy_failure': 'Errore durante la copia',
}
}
let locale = 'en'
if( document.documentElement.lang !== undefined
&& messages[document.documentElement.lang] !== undefined ) {
locale = document.documentElement.lang
}
let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT;
if (doc_url_root == '#') {
doc_url_root = '';
}
/**
* SVG files for our copy buttons
*/
let iconCheck = `<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-check" width="44" height="44" viewBox="0 0 24 24" stroke-width="2" stroke="#22863a" fill="none" stroke-linecap="round" stroke-linejoin="round">
<title>${messages[locale]['copy_success']}</title>
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M5 12l5 5l10 -10" />
</svg>`
// If the user specified their own SVG use that, otherwise use the default
let iconCopy = ``;
if (!iconCopy) {
iconCopy = `<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-copy" width="44" height="44" viewBox="0 0 24 24" stroke-width="1.5" stroke="#000000" fill="none" stroke-linecap="round" stroke-linejoin="round">
<title>${messages[locale]['copy_to_clipboard']}</title>
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<rect x="8" y="8" width="12" height="12" rx="2" />
<path d="M16 8v-2a2 2 0 0 0 -2 -2h-8a2 2 0 0 0 -2 2v8a2 2 0 0 0 2 2h2" />
</svg>`
}
/**
* Set up copy/paste for code blocks
*/
const runWhenDOMLoaded = cb => {
if (document.readyState != 'loading') {
cb()
} else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', cb)
} else {
document.attachEvent('onreadystatechange', function() {
if (document.readyState == 'complete') cb()
})
}
}
const codeCellId = index => `codecell${index}`
// Clears selected text since ClipboardJS will select the text when copying
const clearSelection = () => {
if (window.getSelection) {
window.getSelection().removeAllRanges()
} else if (document.selection) {
document.selection.empty()
}
}
// Changes tooltip text for a moment, then changes it back
// We want the timeout of our `success` class to be a bit shorter than the
// tooltip and icon change, so that we can hide the icon before changing back.
var timeoutIcon = 2000;
var timeoutSuccessClass = 1500;
const temporarilyChangeTooltip = (el, oldText, newText) => {
el.setAttribute('data-tooltip', newText)
el.classList.add('success')
// Remove success a little bit sooner than we change the tooltip
// So that we can use CSS to hide the copybutton first
setTimeout(() => el.classList.remove('success'), timeoutSuccessClass)
setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon)
}
// Changes the copy button icon for two seconds, then changes it back
const temporarilyChangeIcon = (el) => {
el.innerHTML = iconCheck;
setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon)
}
const addCopyButtonToCodeCells = () => {
// If ClipboardJS hasn't loaded, wait a bit and try again. This
// happens because we load ClipboardJS asynchronously.
if (window.ClipboardJS === undefined) {
setTimeout(addCopyButtonToCodeCells, 250)
return
}
// Add copybuttons to all of our code cells
const COPYBUTTON_SELECTOR = 'div.highlight pre';
const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR)
codeCells.forEach((codeCell, index) => {
const id = codeCellId(index)
codeCell.setAttribute('id', id)
const clipboardButton = id =>
`<button class="copybtn o-tooltip--left" data-tooltip="${messages[locale]['copy']}" data-clipboard-target="#${id}">
${iconCopy}
</button>`
codeCell.insertAdjacentHTML('afterend', clipboardButton(id))
})
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string
}
/**
* Removes excluded text from a Node.
*
* @param {Node} target Node to filter.
* @param {string} exclude CSS selector of nodes to exclude.
* @returns {DOMString} Text from `target` with text removed.
*/
function filterText(target, exclude) {
const clone = target.cloneNode(true); // clone as to not modify the live DOM
if (exclude) {
// remove excluded nodes
clone.querySelectorAll(exclude).forEach(node => node.remove());
}
return clone.innerText;
}
// Callback when a copy button is clicked. Will be passed the node that was clicked
// should then grab the text and replace pieces of text that shouldn't be used in output
function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") {
var regexp;
var match;
// Do we check for line continuation characters and "HERE-documents"?
var useLineCont = !!lineContinuationChar
var useHereDoc = !!hereDocDelim
// create regexp to capture prompt and remaining line
if (isRegexp) {
regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)')
} else {
regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)')
}
const outputLines = [];
var promptFound = false;
var gotLineCont = false;
var gotHereDoc = false;
const lineGotPrompt = [];
for (const line of textContent.split('\n')) {
match = line.match(regexp)
if (match || gotLineCont || gotHereDoc) {
promptFound = regexp.test(line)
lineGotPrompt.push(promptFound)
if (removePrompts && promptFound) {
outputLines.push(match[2])
} else {
outputLines.push(line)
}
gotLineCont = line.endsWith(lineContinuationChar) & useLineCont
if (line.includes(hereDocDelim) & useHereDoc)
gotHereDoc = !gotHereDoc
} else if (!onlyCopyPromptLines) {
outputLines.push(line)
} else if (copyEmptyLines && line.trim() === '') {
outputLines.push(line)
}
}
// If no lines with the prompt were found then just use original lines
if (lineGotPrompt.some(v => v === true)) {
textContent = outputLines.join('\n');
}
// Remove a trailing newline to avoid auto-running when pasting
if (textContent.endsWith("\n")) {
textContent = textContent.slice(0, -1)
}
return textContent
}
var copyTargetText = (trigger) => {
var target = document.querySelector(trigger.attributes['data-clipboard-target'].value);
// get filtered text
let exclude = '.linenos';
let text = filterText(target, exclude);
return formatCopyText(text, '', false, true, true, true, '', '')
}
// Initialize with a callback so we can modify the text before copy
const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText})
// Update UI with error/success messages
clipboard.on('success', event => {
clearSelection()
temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success'])
temporarilyChangeIcon(event.trigger)
})
clipboard.on('error', event => {
temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure'])
})
}
runWhenDOMLoaded(addCopyButtonToCodeCells) | PypiClean |
/Grid2Op-1.9.3-py3-none-any.whl/grid2op/MakeEnv/UpdateEnv.py | import time
import os
import grid2op.MakeEnv.PathUtils
from grid2op.Exceptions import UnknownEnv
from grid2op.MakeEnv.UserUtils import list_available_local_env
from grid2op.MakeEnv.Make import _retrieve_github_content
_LIST_REMOTE_URL = (
"https://api.github.com/repos/bdonnot/grid2op-datasets/contents/updates.json"
)
_LIST_REMOTE_ENV_HASH = (
"https://api.github.com/repos/bdonnot/grid2op-datasets/contents/env_hashes.json"
)
def _write_file(path_local_env, new_config, file_name):
with open(os.path.join(path_local_env, file_name), "w", encoding="utf-8") as f:
f.write(new_config)
def update_env(env_name=None):
"""
This function allows you to retrieve the latest version of the some of files used to create the
environment.
File can be for example "config.py" or "prod_charac.csv" or "difficulty_levels.json".
Parameters
----------
env_name: ``str``
The name of the environment you want to update the config file (must be an environment you
have already downloaded). If ``None`` it will look for updates for all the environments
locally available.
Examples
--------
Here is an example on how to for the update of your environments:
.. code-block:: python
import grid2op
grid2op.update_env()
# it will download the files "config.py" or "prod_charac.csv" or "difficulty_levels.json"
# of your local environment to match the latest version available.
"""
_update_files(env_name=env_name)
def _update_file(dict_, env_name, file_name):
"""
INTERNAL
.. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\
Update a single file of a single environment.
File can be for example "config.py" or "prod_charac.csv" or "difficulty_levels.json".
"""
baseurl, filename = dict_["base_url"], dict_["filename"]
url_ = baseurl + filename
time.sleep(1)
new_config = _retrieve_github_content(url_, is_json=False)
path_local_env = os.path.join(grid2op.MakeEnv.PathUtils.DEFAULT_PATH_DATA, env_name)
if os.path.exists(os.path.join(path_local_env, ".multimix")):
# this is a multimix env ...
mixes = os.listdir(path_local_env)
for mix in mixes:
mix_dir = os.path.join(path_local_env, mix)
if os.path.exists(os.path.join(mix_dir, file_name)):
# this is indeed a mix
_write_file(mix_dir, new_config, file_name=file_name)
else:
_write_file(path_local_env, new_config, file_name=file_name)
print(
'\t Successfully updated file "{}" for environment "{}"'.format(
file_name, env_name
)
)
def _do_env_need_update(env_name, env_hashes):
if env_name not in env_hashes:
# no hash for this environment is provided, i don't know, so in doubt i need to update it (old behaviour)
return True
else:
# i check if "my" hash is different from the remote hash
base_path = grid2op.get_current_local_dir()
hash_remote_hex = env_hashes[env_name]
hash_local = _hash_env(os.path.join(base_path, env_name))
hash_local_hex = hash_local.hexdigest()
res = hash_remote_hex != hash_local_hex
return res
def _update_files(env_name=None, answer_json=None, env_hashes=None):
"""
INTERNAL
.. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\
Update all the "modified" files of a given environment. If ``None`` is provided as input, all local environments
will be checked for update.
Parameters
----------
env_name: ``str``
Name of the environment you want to update (should be locally available)
"""
avail_envs = list_available_local_env()
if answer_json is None:
# optimization to retrieve only once this file
answer_json = _retrieve_github_content(_LIST_REMOTE_URL)
if env_hashes is None:
# optimization to retrieve only once this file
env_hashes = _retrieve_github_content(_LIST_REMOTE_ENV_HASH)
if env_name is None:
# i update all the files for all the environments
for env_name in avail_envs:
_update_files(env_name, answer_json=answer_json, env_hashes=env_hashes)
else:
# i update the files for only an environment
if env_name in avail_envs:
need_update = _do_env_need_update(env_name, env_hashes)
if env_name in answer_json and need_update:
dict_main = answer_json[env_name]
for k, dict_ in dict_main.items():
_update_file(dict_, env_name, file_name=k)
elif need_update and env_name not in answer_json:
print(
f'Environment: "{env_name}" is not up to date, but we did not found any files to update. '
f'IF this environment is officially supported by grid2op (see full list at '
f'https://grid2op.readthedocs.io/en/latest/available_envs.html#description-of-some-environments) '
f'Please write an issue at :\n\t\t'
f'https://github.com/rte-france/Grid2Op/issues/new?assignees=&labels=question&title=Environment%20{env_name}%20is%20not%20up%20to%20date%20but%20I%20cannot%20update%20it.&body=%3c%21%2d%2dDescribe%20shortly%20the%20context%20%2d%2d%3e%0d'
)
else:
# environment is up to date
print('Environment "{}" is up to date'.format(env_name))
else:
raise UnknownEnv(
'Impossible to locate the environment named "{}". Have you downlaoded it?'
"".format(env_name)
)
# TODO make that a method of the environment maybe ?
def _hash_env(
path_local_env,
hash_=None,
blocksize=64, # TODO is this correct ?
):
import hashlib # lazy import
if hash_ is None:
# we use this as it is supposedly faster than md5
# we don't really care about the "secure" part of it (though it's a nice tool to have)
hash_ = hashlib.blake2b()
if os.path.exists(os.path.join(path_local_env, ".multimix")):
# this is a multi mix, so i need to run through all sub env
mixes = sorted(os.listdir(path_local_env))
for mix in mixes:
mix_dir = os.path.join(path_local_env, mix)
if os.path.isdir(mix_dir):
hash_ = _hash_env(mix_dir, hash_=hash_, blocksize=blocksize)
else:
# i am hashing a regular environment
# first i hash the config files
for fn_ in [
"alerts_info.json",
"config.py",
"difficulty_levels.json",
"grid.json",
"grid_layout.json",
"prods_charac.csv",
"storage_units_charac.csv",
# chronix2grid files, if any
"loads_charac.csv",
"params.json",
"params_load.json",
"params_loss.json",
"params_opf.json",
"params_res.json",
"scenario_params.json",
]: # list the file we want to hash (we don't hash everything
full_path_file = os.path.join(path_local_env, fn_)
import re
if os.path.exists(full_path_file):
with open(full_path_file, "r", encoding="utf-8") as f:
text_ = f.read()
text_ = re.sub(
"\s", "", text_
) # this is done to ensure a compatibility between platform
# sometime git replaces the "\r\n" in windows with "\n" on linux / macos and it messes
# up the hash
hash_.update(text_.encode("utf-8"))
# now I hash the chronics
# but as i don't want to read every chronics (for time purposes) i will only hash the names
# of all the chronics
path_chronics = os.path.join(path_local_env, "chronics")
for chron_name in sorted(os.listdir(path_chronics)):
hash_.update(chron_name.encode("utf-8"))
return hash_ | PypiClean |
/DnaWeaver-0.3.8.tar.gz/DnaWeaver-0.3.8/dnaweaver/AssemblyPlanReport/mixins/PlotsMixin/AssemblyBlocksMixin.py | from dna_features_viewer import GraphicRecord
import matplotlib.pyplot as plt
from copy import deepcopy
import matplotlib.patches as mpatches
import matplotlib.font_manager as fm
from Bio import SeqIO
from ...config import SETTINGS
class AssemblyBlocksMixin:
def plot_assembly_blocks(
self,
parts_offset=0,
plot_top_assembly=True,
ax=None,
edge_widths=None,
legend=False,
legend_offset=-0.05,
):
"""Return a Matplotlib or Bokeh plot of the assembly tree of blocks.
Parameters
----------
parts_offset
Offset applied so that consecutive blocks are not exactly on the same
level. Can go from 0 (flat line of blocks) to e.g. 1.
plot_top_assembly
Whether the top assembly (which is just one big block) should be
plotted or not.
ax
A Matplotlib Axes object. If no ax is provided, a new figure and ax
are generated.
edge_widths
A dict {sourcename : width} indicating the widths of the rectangles
based on the source name. Rectangles with a very small width will be
edgeless.
legend
Whether the legend is included in the ax.
"""
rectangles = []
if edge_widths is None:
edge_widths = {}
if not hasattr(self, "plan"):
self.compute_full_assembly_plan()
tree = deepcopy(self.plan)
def rec(_quote, depth=0, xstart=0, xend=None):
children = _quote.pop("assembly_plan")
# print _quote.final_location
if _quote.final_location is not None:
left, right = _quote.final_location
else:
left, right = xstart, xend
if children is None:
children = []
if plot_top_assembly or (depth > 0):
source = self.sources[_quote.source]
rectangles.append(
{
"top": -depth + 0.5 + (not plot_top_assembly),
"bottom": -depth + (not plot_top_assembly),
"left": left,
"right": right,
"source": str(_quote.source),
"lead_time": str(_quote.lead_time),
"price": str(_quote.price),
"length": len(_quote.sequence),
"has_children": children != [],
"line_width": 1,
"operation_type": source.operation_type,
"fa_symbol": source._report_fa_symbol_plain,
"color": source._report_color,
}
)
for child in children:
rec(
child,
depth=depth + 1,
xstart=xstart + child.segment_start,
xend=xstart + child.segment_end,
)
rec(tree, xend=len(tree.sequence))
tops = list(set([r["top"] for r in rectangles]))
offsets = {top: 0 for top in tops}
guides = []
for top in sorted(tops):
rects = sorted(
[r for r in rectangles if r["top"] == top], key=lambda r: r["left"]
)
for i, rect in enumerate(rects):
color = ["#e5ecff", "#ffffe5"][i % 2]
if rect["has_children"]:
guides.append(
{
"top": rect["top"],
"bottom": rect["top"] - 1,
"left": rect["left"],
"right": rect["right"],
"color": color,
}
)
for rectangle in sorted(rectangles, key=lambda r: r["left"]):
if parts_offset:
offset = offsets[rectangle["top"]]
offsets[rectangle["top"]] = (offsets[rectangle["top"]] + 1) % 2
rectangle["top"] += parts_offset * offset
rectangle["bottom"] += parts_offset * offset
if hasattr(self, "genbank") and (self.genbank is not None):
record = SeqIO.read(self.genbank, "genbank")
else:
record = None
return _matplotlib_plot_assembly_blocks(
rectangles,
guides,
tops,
ax,
record=record,
legend=legend,
legend_offset=legend_offset,
)
def _matplotlib_plot_assembly_blocks(
rectangles,
guides,
tops,
ax=None,
fig_height="auto",
fig_width=8,
legend=False,
textprops=None,
record=None,
legend_offset=-0.05,
):
"""Plot the assembly block rectangles using matplotlib."""
if record is not None:
fig, record_ax, blocks_ax = plt.subplots(
2, 1, figsize=(7, 7), gridspec_kw={"height_ratios": [4, 1]}
)
grecord = GraphicRecord.from_biopython_record(
record, fun_color=lambda a: "#aabbff"
)
grecord.plot(record_ax)
res = _matplotlib_plot_assembly_blocks(
rectangles,
guides,
tops,
ax=blocks_ax,
fig_height="auto",
fig_width=8,
legend=False,
textprops=None,
record=None,
)
record_ax.set_xlim(blocks_ax.get_xlim())
return res
L = max([r["right"] for r in rectangles])
if textprops is None:
textprops = fm.FontProperties(
fname=SETTINGS["OpenSans-ttf-path"], size=12, family="sans-serif"
)
if ax is None:
if fig_height == "auto":
fig_height = 8
fig, ax = plt.subplots(1, figsize=(8, 8))
ax.set_xlim((-0.05 * L, 1.05 * L))
ax.set_ylim((min(tops) - 1, 1.5))
for g in guides:
patch = mpatches.Rectangle(
(g["left"], g["bottom"]),
g["right"] - g["left"],
g["top"] - g["bottom"],
color=g["color"],
ec="none",
)
ax.add_patch(patch)
seen_sources = set([])
legend_handles = []
sorted_rectangles = sorted(rectangles, key=lambda r: (-r["bottom"], r["left"]))
for g in sorted_rectangles:
if g["source"] not in seen_sources:
legend_patch = mpatches.Patch(
facecolor=g["color"], label=g["source"], edgecolor="k", linewidth=1.0,
)
legend_handles.append(legend_patch)
seen_sources.add(g["source"])
if legend:
plt.legend(handles=[legend_patch])
# linewidth = g["line_width"]
width = g["right"] - g["left"]
line_width = 1.0 if (1.0 * width / L) > 0.002 else 0
patch = mpatches.Rectangle(
(g["left"], g["bottom"]),
width,
g["top"] - g["bottom"],
facecolor=g["color"],
edgecolor="k",
linewidth=line_width,
)
ax.add_patch(patch)
ax.axis("off")
if legend:
if ax.legend_ is not None:
ax.legend_.remove()
legend = ax.legend(
handles=legend_handles,
frameon=False,
ncol=2,
loc=2,
bbox_to_anchor=(0.0, legend_offset),
)
ltext = legend.get_texts()
plt.setp(ltext, fontproperties=textprops)
ax.figure.subplots_adjust(hspace=0.0)
return ax, legend_handles | PypiClean |
/LDB_Inventory_Barcode-0.14.1.tar.gz/LDB_Inventory_Barcode-0.14.1/docs/changelog.rst | Changelog
---------
v0.13.1
~~~~~~~
* Fix a crash when using the ``generate`` shortcut function.
v0.13.0
~~~~~~~
* Added support for transparent backgrounds. This is done by setting the ``mode`` option
for a writer to ``RGBA``.
v0.12.0
~~~~~~~
* Removed ``writer_options`` from ``barcode.get``. This parameter was not used.
* Add a ``with_doctype`` flag to ``SVGWriter``. Set this to false to avoid including a
``DOCTYPE`` in the resulting SVG.
* Add support for ``Pillow>=8.0.0``.
v0.11.0
~~~~~~~
* Added basic support for multiline text.
* Dropped lots of older compat-only code and other cleanups.
* Fixed a bug in the API when combining certain barcodes and writers.
* Published documentation again and updated all project references.
* Fix python_barcode.get mixups between `options` as `writer_options`.
Previously, some writer/barcode combinations worked fine, while others
failed. Now all work consistently.
* The cli tool has been fixed and should now work as expected again.
v0.10.0
~~~~~~~
* Added support for GS1-128.
v0.9.1
~~~~~~
* Officially support Python 3.7
* Refer to Pillow in the docs, rather than PIL.
v0.9.0
~~~~~~
* Removed buggy ``Barcode.raw`` attribute.
* Various CLI errors ironed out.
* Make the default value for ``writer_options``` consistent across writers.
v0.8.3
~~~~~~
* Fix pushing of releases to GitHub.
v0.8.2
~~~~~~
* Fix crashes when attempting to use the CLI app.
* Properly include version numbers in SVG comments.
v0.8.1
~~~~~~
* Improve README rendering, and point to this fork's location (the outdated
README on PyPI was causing some confusion).
v0.8.0
~~~~~~
* First release under the name ``python-barcode``.
Previous Changelog
------------------
This project is a fork of pyBarcode, which, apparently, is no longer
maintained. v0.8.0 is our first release, and is the latest ``master`` from that
parent project.
v0.8
~~~~
* Code 128 added.
* Data for charsets and bars moved to subpackage barcode.charsets.
* Merged in some improvements.
v0.7
~~~~
* Fixed some issues with fontsize and fontalignment.
* Added Python 3 support. It's not well tested yet, but the tests run without
errors with Python 3.3. Commandline script added.
v0.6
~~~~
* Changed save and write methods to take the options as a dict not as keyword
arguments (fix this in your code). Added option to left align the text under
the barcode. Fixed bug with EAN13 generation.
v0.5.0
~~~~~~
* Added new generate function to do all generation in one step.
* Moved writer from a subpackage to a module (this breaks some existing code).
UPC is now rendered as real UPC, not as EAN13 with the leading "0".
v0.4.3
~~~~~~
* Fixed bug in new write method (related to PIL) and updated docs.
v0.4.2
~~~~~~
* Added write method to support file like objects as target.
v0.4.1
~~~~~~
* Bugfix release. Removed redundancy in input validation.
* EAN8 was broken. It now works as expected.
v0.4
~~~~
* Removed \*\*options from writers __init__ method. These options never had
effect. They were always overwritten by default_options.
* New config option available: text_distance (the distance between barcode and
text).
v0.4b2
~~~~~~
* Basic documentation included. The barcode object now has a new attribute
called `raw` to have the rendered output without saving to disk.
v0.4b1
~~~~~~
* Support for rendering barcodes as images is implemented. PIL is required to
use it.
v0.3
~~~~
* Compression for SVG output now works.
v0.3b1
~~~~~~
* Writer API has changed for simple adding new (own) writers.
* SVG output is now generated with xml.dom module instead of stringformatting
(makes it more robust).
v0.2.1
~~~~~~
* API of render changed. Now render takes keyword arguments instead of a dict.
v0.2
~~~~
* More tests added.
v0.1
~~~~
* First release.
| PypiClean |
/GxAutoTestManager-1.0.1.tar.gz/GxAutoTestManager-1.0.1/AutoTestManager/client/modules/reboot.py |
from multiprocessing import Process, Lock
from multiprocessing.managers import BaseManager
import serial
import time
import re
class RebootManager(BaseManager):
pass
def rebootmanager():
m = RebootManager()
m.start()
return m
class Reboot:
def __init__(self):
self.serial_dev = []
self.serial_object = {}
self.lock = {}
self._lock = Lock()
def _add_device(self, name,device):
with self._lock:
if not self._is_open(name,device):
print("%s %s own"%(name, self._lock))
try:
obj = serial.Serial(device, 115200, timeout = 30)
print(obj)
self.serial_object[device] = obj
except:
return None
lock = Lock()
self.lock[device] = lock
print("%s %s release"%(name, self._lock))
return obj
return True
def remove_device(self, device):
#with self._lock:
# del self.serial_object[device]
# del self.lock[device]
with self._lock:
with self.lock[device]:
self.serial_object[device].close()
del self.serial_object[device]
del self.lock[device]
del self._lock
def _is_open(self, name,device):
if device in self.serial_object.keys():
print("%s %s is open"%(name,device))
return True
print("%s %s is not open"%(name,device))
return False
def _power_control(self, name, device, port, cmd):
retry_times = 0
max_retry_times = 5
while(True):
print("%s %s"%(name, cmd))
self.serial_object[device].write(cmd.encode())
s = self.serial_object[device].readline().decode('utf-8', errors='replace')
#print("###",s)
s = self.serial_object[device].readline().decode('utf-8', errors='replace')
print("###",s)
if (re.findall(r"success", s)):
break
else:
retry_times += 1
if (retry_times == max_retry_times):
return False
time.sleep(0.3) # min delay
def reboot(self, device, port, name=''):
if not self._add_device(name,device):
return False
with self.lock[device]:
cmdon = "setpower on " + str(port) + "\n"
cmdoff = "setpower off " + str(port) + "\n"
self._power_control(name, device, port, cmdoff)
self._power_control(name, device, port, cmdon)
def power_off(self, device, port, name=''):
if not self._add_device(name,device):
return False
with self.lock[device]:
cmdoff = "setpower off " + str(port) + "\n"
self._power_control(name, device, port, cmdoff)
def power_on(self, device, port, name=''):
if not self._add_device(name,device):
return False
with self.lock[device]:
cmdon = "setpower on " + str(port) + "\n"
self._power_control(name, device, port, cmdon)
def task(name, reboot, device, port):
while 1:
time.sleep(3)
reboot.reboot(name, device, port)
reboot.power_off(name, device, port)
reboot.power_on(name, device, port)
if __name__ == '__main__':
RebootManager.register('Reboot', Reboot)
manager = rebootmanager()
reboot_obj = manager.Reboot()
names = ['sub_process_1', 'sub_process_2', 'sub_process_3']
ports = ['1','2','3']
ps = []
i = 0
for name in names:
ps.append(Process(target=task,args=(name,reboot_obj,'/dev/ttyUSB0', ports[i])))
i = i + 1
for p in ps:
p.start()
for p in ps:
p.join() | PypiClean |
/Accern-0.4.0.tar.gz/Accern-0.4.0/accern/historical.py | from accern import default_client, error, util
from accern.default_client import AccernClient
from accern.schema import Schema
from accern.config import get_config
def get_api_base(env):
try:
return get_config()["io"][env]
except KeyError:
raise ValueError("Unknown env type: {0}".format(env))
class HistoricalClient(AccernClient):
"""Perform requests to the Accern API web services."""
def __init__(self, token=None, client=None, env=None):
"""Intialize with params.
:param client: default http client. Optional
:param token: Accern API token. Required.
"""
self.env = "production" if env is None else env
self.api_base = get_api_base(self.env)
self.token = token
self._client = client or default_client.new_http_client()
@staticmethod
def interpret_response(rbody, rcode, rheaders):
try:
if hasattr(rbody, 'decode'):
rbody = rbody.decode('utf-8')
resp = util.json.loads(rbody)
except Exception:
raise error.APIError(
"Invalid response body from API: %s "
"(HTTP response code was %d)" % (rbody, rcode),
rbody, rcode, rheaders)
if not 200 <= rcode < 300:
AccernClient.handle_error(rbody, rcode, resp)
return resp
def create_job(self, schema):
"""Create a job with schema.
:param schema: job detail, will be added to payload
:raises ApiError: when the API returns an error.
:raises Timeout: if the request timed out.
:raises TransportError: when something went wrong while trying to
exceute a request.
"""
schema = Schema.validate_schema(method='historical', schema=schema)
token = AccernClient.check_token(self.token)
method = 'POST'
headers = AccernClient.build_api_headers(token, method)
if method == 'POST':
post_data = util.json.dumps({'query': schema})
else:
post_data = None
rbody, rcode, rheaders = self._client.request(method, self.api_base, headers, post_data)
resp = self.interpret_response(rbody, rcode, rheaders)
return resp
def get_jobs(self, job_id=None):
"""Get the user's job history.
:param job_id: if job_id is valid, will return the job related
"""
token = AccernClient.check_token(self.token)
method = 'GET'
headers = AccernClient.build_api_headers(token, method)
if job_id is None:
rbody, rcode, rheaders = self._client.request(method, self.api_base, headers, post_data=None)
resp = self.interpret_response(rbody, rcode, rheaders)
else:
rbody, rcode, rheaders = self._client.request(method, '%s/%s' % (self.api_base, job_id), headers, post_data=None)
resp = self.interpret_response(rbody, rcode, rheaders)
return resp | PypiClean |
/DS-OOP-Review-0.1.2.tar.gz/DS-OOP-Review-0.1.2/football/season.py | from possible_values import *
from game import Game
from random import randint, uniform, sample
def generate_rand_games(n=15):
'''Generate n random games using value lists in possible_values
'''
# Begin with enpty list
games = []
# For the specified number of games, create an instance of the Game
# class...
# TODO - You can also include the location and week number if desired
for _ in list(range(n)):
# Get team names by sampling team_names from possible_values
game = Game(teams=sample(team_names, k=2))
# Give each team a random number (from 0 to 3) of each:
# touchdowns and field goals
for i in list(range(randint(0, 4))):
game.field_goal(game.teams[0])
for j in list(range(randint(0, 4))):
game.field_goal(game.teams[1])
for k in list(range(randint(0, 4))):
game.touchdown(game.teams[0])
for l in list(range(randint(0, 4))):
game.touchdown(game.teams[1])
games.append(game)
return games
def season_report(games):
'''Print out a season report given a list of games
Parameters
-----------------------------
games : list
a list of Game class instances
'''
# Instantiate empty set and lists
teams = set()
winning_teams = []
losing_teams = []
winning_team_total_points = 0
losing_team_total_points = 0
# Loop through the games
for game in games:
# Ensure both teams are included in the set of teams
teams.add(game.teams[0])
teams.add(game.teams[1])
# Record the winning and losing teams for each game
winning_team, losing_team = game.get_winning_team()
winning_teams.append(winning_team)
losing_teams.append(losing_team)
# Easy way to keep track of points
winning_team_total_points += game.score[winning_team]
losing_team_total_points += game.score[losing_team]
# Calculates the average points scored by winning team and losing team
# in a game
winning_team_average = (winning_team_total_points /
len(winning_team))
losing_team_average = (losing_team_total_points /
len(losing_team))
# Instantiate dict to keep track of individual team records
team_records = {}
# Could use a defaultdict from collections, but I chose to
# manually set the default value for each team in the teams set
for team in list(teams):
team_records[team] = [0, 0]
# Add one for each of the wins (position 0) and losses (position 1)
for team in winning_teams:
team_records[team][0] += 1
for team in losing_teams:
team_records[team][1] += 1
# Finally print the report
print('\n\n--------Football Season Report--------\n')
print('Team Records')
print('---------------------------------')
for team, record in team_records.items():
print(f'{team}: {record[0]} W, {record[1]} L')
print('---------------------------------')
print(f'Average Score of Winning Team: {winning_team_average: .1f}')
print(f'Average Score of Losing Team: {losing_team_average:.1f}\n')
# You could choose to return something here if you wanted TODO
if __name__ == '__main__':
season_report(generate_rand_games()) | PypiClean |
/Flask-Api-Awesomesauce-0.1.tar.gz/Flask-Api-Awesomesauce-0.1/flask_api_awesomesauce/api_decorators.py | from functools import wraps
from flask import jsonify, request
from flask.wrappers import BadRequest
import utilities
def json_response(status_code, message, additional_data=None):
"""
Args:
status_code (int): HTTP Status Code for response object.
response_data (dict): Data to be returned as JSON in the response body.
Returns:
A Flask Response object
"""
response_payload = {
'statusCode': status_code,
'message': message
}
if additional_data:
response_payload.update(additional_data)
utilities.python_to_json_syntax(response_payload)
response = jsonify(response_payload)
response.status_code = status_code
return response
def api_declaration(
api_version,
required_payload_elements=None,
optional_payload_elements=None,
required_url_parameters=None,
optional_url_parameters=None):
"""
Args:
api_version (str): API version being served by app.
required_payload_elements (dict): Names and descriptions of
required JSON payload elements.
optional_payload_elements (dict): Names and descriptions of
optional JSON payload elements.
required_url_parameters (dict): Names and descriptions of
required URL parameters.
optional_url_parameters (dict): Names and descriptions of
optional URL parameters.
"""
api_usage_declarations = {
key: value for key, value in locals().items() if type(value) == dict}
utilities.python_to_json_syntax(api_usage_declarations)
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
# Check for Invalid JSON.
try:
request_json = request.get_json()
except BadRequest:
return json_response(
400, 'JSON request payload has a syntax error(s).',
{'usage': api_usage_declarations})
if 'requiredPayloadElements' in api_usage_declarations:
for element in api_usage_declarations[
'requiredPayloadElements']:
if not element in request_json:
return json_response(
400,
'JSON is missing a required element(s).',
{'usage': api_usage_declarations})
if 'requiredUrlParameters' in api_usage_declarations:
request_args = request.args
for element in api_usage_declarations['requiredUrlParameters']:
if not element in request_args:
return json_response(
400,
'A required URL parameter(s) is missing.',
{'usage': api_usage_declarations})
return func(*args, **kwargs)
return wrapper
return decorate
# def response_template(api_version,
# required_payload_elements=None,
# optional_payload_elements=None,
# required_url_parameters=None,
# optional_url_parameters=None):
# """
# Ensure that a flask.Response object conforms to desired specifications.
#
# What should this function do?
#
# On the front side:
# 1. Verifies that the JSON in the request in valid.
# 2. Verifies that required payload/url elements are provided.
#
# On the backside:
# 1. Only include this information if original functions response code
# is 4xx/5xx to avoid overly verbose responses.
#
# """
# # Extract dictionary objects from locals().items()
# api_usage_declarations = {
# key: value for key, value in locals().items() if type(value) == dict}
#
# json_syntax_api_usage_declarations = dict()
# for key, value in api_usage_declarations.iteritems():
# for python_syntax in re.finditer(r'_[a-z]', key):
# key = key.replace(
# python_syntax.group(), python_syntax.group()[1].upper())
# json_syntax_api_usage_declarations[key] = value
#
# def decorate(func):
# @wraps(func)
# def wrapper(*args, **kwargs):
#
# response_data = dict(
# meta=dict(apiVersion=api_version,
# usage=json_syntax_api_usage_declarations,
# request=dict(
# requestUrl=request.base_url,
# requestMethod=request.method),
# response=dict()))
#
# # Check for Invalid JSON.
# try:
# request.get_json()
# except BadRequest:
# response_data['meta']['response']['message'] = (
# 'JSON contains syntax errors.')
# response_data['meta']['response']['statusCode'] = 400
# response = jsonify(response_data)
# response.status_code = (
# response_data['meta']['response']['statusCode'])
# current_app.logger.error(
# response_data['meta']['response']['message'])
# return response
#
# if 'requiredPayloadElements' in json_syntax_api_usage_declarations:
# request_json = request.get_json()
# for element in json_syntax_api_usage_declarations[
# 'requiredPayloadElements']:
# if not element in request_json:
# response_data['meta']['response']['statusCode'] = 400
# response_data['meta']['response']['message'] = (
# 'Required JSON element(s) are not in payload.')
# response = jsonify(response_data)
# response.status_code = (
# response_data['meta']['response']['statusCode'])
# current_app.logger.error(
# response_data['meta']['response']['message'])
# return response
# elif 'requiredUrlParameters' in json_syntax_api_usage_declarations:
# request_args = request.args
# for element in json_syntax_api_usage_declarations[
# 'requiredUrlParameters']:
# if not element in request_args:
# response_data['meta']['response']['statusCode'] = 400
# response_data['meta']['response']['message'] = (
# 'Missing required URL parameter(s).')
# response = jsonify(response_data)
# response.status_code = (
# response_data['meta']['response']['statusCode'])
# current_app.logger.error(
# response_data['meta']['response']['message'])
# return response
#
# original_response = func(*args, **kwargs)
# original_response_data = json.loads(original_response.data)
#
# if type(original_response) != Response:
#
#
# # Reassign dict location of 'meta' elements from original response.
# response_data['meta']['response'] = dict()
# for meta_element in ['statusCode', 'message']:
# if meta_element in original_response_data:
# response_data['meta']['response'][meta_element] = (
# original_response_data[meta_element])
# original_response_data.pop(meta_element)
#
# # Transfer remaining origin_response elements to new response dict.
# for element in original_response_data:
# response_data[element] = original_response_data[element]
#
# response = jsonify(response_data)
# response.status_code = (
# response_data['meta']['response']['statusCode'])
# return response
#
# return wrapper
#
# return decorate | PypiClean |
/MCdeck-0.6.3-py3-none-any.whl/mcdeck/tts.py | import os.path
from PySide6 import QtCore, QtWidgets, QtGui
from lcgtools.graphics import LcgImage
from mcdeck.util import ErrorDialog
class TTSExportDialog(QtWidgets.QDialog):
"""Dialog for Tabletop Simulator export.
:param parent: parent widget
:param settings: the application settings object
:param cards: list of Card objects to export
"""
def __init__(self, parent, settings, cards):
super().__init__(parent)
self.__settings = settings
self.__cards = cards
main_layout = QtWidgets.QVBoxLayout()
# Path for output images of front and back sides
output_box = QtWidgets.QGroupBox(self)
output_box.setTitle('File outputs for Tabletop Simulator deck export')
output_layout = QtWidgets.QGridLayout()
lbl = QtWidgets.QLabel
row = 0
output_layout.addWidget(lbl('Front side images:'), row, 0)
self.__front_img_le = QtWidgets.QLineEdit()
output_layout.addWidget(self.__front_img_le, row, 1, 1, 3)
front_img_btn = QtWidgets.QPushButton('File...')
front_img_btn.clicked.connect(self.frontImgBtnClicked)
output_layout.addWidget(front_img_btn, row, 4, 1, 1)
row += 1
output_layout.addWidget(lbl('Back side images:'), row, 0)
self.__back_img_le = QtWidgets.QLineEdit()
output_layout.addWidget(self.__back_img_le, row, 1, 1, 3)
back_img_btn = QtWidgets.QPushButton('File...')
back_img_btn.clicked.connect(self.backImgBtnClicked)
output_layout.addWidget(back_img_btn, row, 4, 1, 1)
output_box.setLayout(output_layout)
main_layout.addWidget(output_box)
width_layout = QtWidgets.QHBoxLayout()
width_layout.addWidget(lbl('Card width (px)'))
self.__card_width_le = QtWidgets.QLineEdit()
self.__card_width_le.setText(str(512))
_val = QtGui.QIntValidator(32, 4096)
self.__card_width_le.setValidator(_val)
width_layout.addWidget(self.__card_width_le)
main_layout.addLayout(width_layout)
# Pushbuttons
btns = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Ok |
QtWidgets.QDialogButtonBox.Cancel)
btns.rejected.connect(self.reject)
btns.accepted.connect(self.accept)
main_layout.addWidget(btns)
self.setLayout(main_layout)
def accept(self):
if not self.__cards:
ErrorDialog(self, 'No cards to export', 'The deck contains no '
'cards to be exported.').exec()
return
elif len(self.__cards) > 70:
ErrorDialog(self, 'Too many cards', 'The maximum number of cards '
'that can be exported is 70.').exec()
return
# Validate acceptable data
front_path = self.__front_img_le.text()
back_path = self.__back_img_le.text()
if not (front_path and back_path):
ErrorDialog(self, 'Missing filename', 'Front and/or back image '
'path has not been set').exec()
return
if front_path == back_path:
ErrorDialog(self, 'Same filename', 'Front and back images cannot '
'have the same path').exec()
return
card_width = int(self.__card_width_le.text())
if not 32 <= card_width <= 4096:
ErrorDialog(self, 'Card width issue', 'Card width must be '
'within 32..4096 pixels').exec()
return
if os.path.exists(front_path) or os.path.exists(back_path):
_q = QtWidgets.QMessageBox.question
_res = _q(self, 'File(s) already exist', 'Front and/or back '
'image already exist. Proceed with overwriting?',
QtWidgets.QDialogButtonBox.Ok,
QtWidgets.QDialogButtonBox.Cancel)
if _res == QtWidgets.QDialogButtonBox.Cancel:
return
# Calculate relevant dimensions for export
card_cols = min(4096//card_width, 10)
card_rows = len(self.__cards)//card_cols
if len(self.__cards) % card_cols:
card_rows += 1
if card_rows == 1:
card_cols = len(self.__cards)
_s_c_height = self.__settings.card_height_mm
_s_c_width = self.__settings.card_width_mm
card_height = int(card_width*(_s_c_height/_s_c_width))
card_size = QtCore.QSize(card_width, card_height)
out_width = card_cols*card_width
out_height = card_rows*card_height
# Generate card front image
pix = QtGui.QPixmap(out_width, out_height)
p = QtGui.QPainter(pix)
col, row = 0, 0
for card in self.__cards:
xpos, ypos = col*card_width, row*card_height
c_img = card.front_img
c_img = c_img.scaled(card_size,
mode=QtCore.Qt.SmoothTransformation)
p.drawImage(QtCore.QPoint(xpos, ypos), c_img)
col += 1
if col >= card_cols:
row += 1
col = 0
del p
front_img = pix.toImage()
# Generate card back images
pix = QtGui.QPixmap(out_width, out_height)
p = QtGui.QPainter(pix)
col, row = 0, 0
for card in self.__cards:
xpos, ypos = col*card_width, row*card_height
c_img = card.back_img
if c_img:
if card.back_bleed > 0:
c_img = LcgImage(c_img).cropBleed(card.back_bleed)
c_img = c_img.scaled(card_size,
mode=QtCore.Qt.SmoothTransformation)
p.drawImage(QtCore.QPoint(xpos, ypos), c_img)
col += 1
if col >= card_cols:
row += 1
col = 0
del p
back_img = pix.toImage()
# Save the images
try:
front_img.save(front_path)
except Exception as e:
ErrorDialog(self, 'Image save error', 'Could not save file '
f'"{front_path}": {e}').exec()
return
try:
back_img.save(back_path)
except Exception as e:
ErrorDialog(self, 'Image save error', 'Could not save file '
f'"{back_path}": {e}').exec()
return
# Display message about success export and how to import in TTS
title = 'Images successfully exported'
text = ('<p>The images were successfully exported as the files '
f'"{front_path}" and "{back_path}".</p>'
'In order to import these files into a Tabletop Simulator '
'(TTS) game, perform the following operations in an active '
'game (instructions confirmed with TTS version 13):</p>'
'<ul>'
' <li>In TTS, choose Objects->Components->Custom->Deck</li>'
' <li>Click somewhere on the board, then hit Escape</li>'
' <li>In the "Face" field, insert front images file or URL</li>'
' <li>Select "Unique Backs"</li>'
' <li>In the "Back" field, insert back images file or URL</li>'
f' <li>In the "Width" field, enter {card_cols}</li>'
f' <li>In the "Height" field, enter {card_rows}</li>'
f' <li>In the "Number" field, enter {len(self.__cards)}</li>'
'</ul>'
'<p>If the deck is intended to be accessible to other users, '
'the images must be published to Internet accessible URLs '
'which can be downloaded by TTS. Make sure to enter those URLs '
'in the fields "Face" and "Back", rather than selecting '
'local files.</p>')
info = QtWidgets.QMessageBox(self, title, '')
info.setInformativeText(text)
info.setStandardButtons(QtWidgets.QMessageBox.Ok)
info.setDefaultButton(QtWidgets.QMessageBox.Ok)
info.exec()
super().accept()
@QtCore.Slot()
def frontImgBtnClicked(self, checked):
while True:
_get = QtWidgets.QFileDialog.getSaveFileName
title = 'Select output image file for card fronts'
filter = 'PNG (*.png);;JPEG (*.jpg *.jpeg)'
_n, _f = _get(self, title, filter=filter)
if _n:
other_txt = self.__back_img_le.text()
if other_txt and _n == other_txt:
ErrorDialog(self, 'Invalid file', 'Front and back images '
'cannot have the same path').exec()
else:
self.__front_img_le.setText(_n)
break
else:
break
@QtCore.Slot()
def backImgBtnClicked(self, checked):
while True:
_get = QtWidgets.QFileDialog.getSaveFileName
title = 'Select output image file for card backs'
filter = 'PNG (*.png);;JPEG (*.jpg *.jpeg)'
_n, _f = _get(self, title, filter=filter)
if _n:
other_txt = self.__front_img_le.text()
if other_txt and _n == other_txt:
ErrorDialog(self, 'Invalid file', 'Front and back images '
'cannot have the same path').exec()
else:
self.__back_img_le.setText(_n)
break
else:
break | PypiClean |
/NeuralPlayground-0.0.7.tar.gz/NeuralPlayground-0.0.7/neuralplayground/agents/agent_core.py | import os
import pickle
import numpy as np
import pandas as pd
from deepdiff import DeepDiff
from scipy.stats import levy_stable
class AgentCore(object):
"""Abstract class for all EHC models
Attributes
----------
agent_name : str
Name of the specific instantiation of the ExcInhPlasticity class
mod_kwargs: dict
Dictionary of specific parameters to be used by children classes
metadata
Specific data structure which will contain specific description for each model
obs_history: list
List of past observations while interacting with the environment in the act method
global_steps: int
Record of number of updates done on the weights
Methods
-------
reset(self):
Erase all memory from the model, initialize all relevant parameters and build from scratch
act(self, obs, policy_func=None):
Given an observation, return an action following a specific policy, if policy_func is None, then
return a random action
update(self):
Update model parameters, depends on the specific model
save_agent(self, save_path: str, raw_object: bool = True):
Save current state and information in general to re-instantiate the agent
restore_agent(self, save_path: str):
Restore saved agent
"""
def __init__(self, agent_name: str = "default_model", agent_step_size: float = 1.0, **mod_kwargs):
self.agent_name = agent_name
self.mod_kwargs = mod_kwargs
self.agent_step_size = agent_step_size
self.metadata = {"mod_kwargs": mod_kwargs}
self.obs_history = []
self.global_steps = 0
def reset(self):
"""Erase all memory from the model, initialize all relevant parameters and build from scratch"""
self.obs_history = []
self.global_steps = 0
def act(self, obs, policy_func=None):
"""
The base model executes a random action from a normal distribution
Parameters
----------
obs
Observation from the environment class needed to choose the right action
policy_func
Arbitrary function that represents a custom policy that receives and observation and gives an action
Returns
-------
action: float
action value which in this case is random number draw from a Gaussian
"""
if len(obs) == 0:
action = None
else:
action = np.random.normal(scale=self.agent_step_size, size=(2,))
self.obs_history.append(obs)
if len(self.obs_history) >= 1000: # reset every 1000
self.obs_history = [
obs,
]
if policy_func is not None:
return policy_func(obs)
return action
def update(self):
"""Update model parameters"""
return None
def save_agent(self, save_path: str, raw_object: bool = True):
"""Save current state and information in general to re-instantiate the environment
Parameters
----------
save_path: str
Path to save the agent
raw_object: bool
If True, save the raw object, otherwise save the dictionary of attributes
If True, you can load the object by using agent = pd.read_pickle(save_path)
if False, you can load the object by using agent.restore_environment(save_path)
"""
if raw_object:
pickle.dump(self, open(os.path.join(save_path), "wb"), pickle.HIGHEST_PROTOCOL)
else:
pickle.dump(self.__dict__, open(os.path.join(save_path), "wb"), pickle.HIGHEST_PROTOCOL)
def restore_agent(self, save_path: str):
"""Restore saved environment
Parameters
----------
save_path: str
Path to retrieve the environment saved using save_agent method (raw_object=False)
"""
self.__dict__ = pd.read_pickle(save_path)
def __eq__(self, other):
diff = DeepDiff(self.__dict__, other.__dict__)
if len(diff) == 0:
return True
else:
return False
def get_ratemap_matrix(self):
"""Function that returns some representation that will be compared against real experimental data"""
pass
class RandomAgent(AgentCore):
"""Simple agent with random trajectories"""
def __init__(self, step_size: float = 1.0):
"""Initialization
Parameters
----------
step_size: float
Standard deviation of normal distribution where the step in x, y coordinates is sampled
"""
super().__init__()
self.step_size = step_size
def act(self, obs):
"""The base model executes a random action from a normal distribution
Parameters
----------
obs:
Whatever observation from the environment class needed to choose the right action
Returns
-------
d_pos: nd.array (2,)
position variation to compute next position
"""
d_pos = np.random.normal(scale=self.step_size, size=(2,))
return d_pos
class LevyFlightAgent(RandomAgent):
"""Based on https://en.wikipedia.org/wiki/L%C3%A9vy_flight
and https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.levy_stable.html#scipy.stats.levy_stable
Still experimental, need hyperparameter tuning and perhaps some momentum"""
def __init__(
self,
alpha: float = 0.3,
beta: float = 1,
loc: float = 1.0,
scale: float = 0.8,
step_size: float = 0.3,
max_action_size: float = 50,
max_step_size: float = 10,
):
"""Initializing levy flight agent
From original documentation:
The probability density above is defined in the “standardized” form. To shift and/or scale the distribution
use the loc and scale parameters. Specifically, levy_stable.pdf(x, alpha, beta, loc, scale) is identically
equivalent to levy_stable.pdf(y, alpha, beta) / scale with y = (x - loc) / scale. Note that shifting the
location of a distribution does not make it a “noncentral” distribution; noncentral generalizations of some
distributions are available in separate classes.
Parameters
----------
alpha, beta: float
Levy flight distribution parameters
loc: float
bias of the standardized form
scale: float
scaling of the standardized form
step_size: float
direction scaling
max_action_size: float
maximum size of sampled step from levy distribution
max_step_size: float
maximum step size when multiplying max_action_size and step_size
"""
super().__init__(step_size=step_size)
self.levy = levy_stable(alpha, beta, loc=loc, scale=scale)
self.alpha = alpha
self.beta = beta
self.max_action_size = max_action_size
self.max_step_size = max_step_size
self.action_buffer = []
def _act(self, obs):
"""Auxiliary action method to compute
Parameters
----------
obs:
Whatever observation from the environment class needed to choose the right action
Returns
-------
d_pos: nd.array (2,)
position variation to compute next position
"""
# Pick direction
direction = super().act(obs)
# Normalize direction to step size
direction = direction / np.sqrt(np.sum(direction**2)) * self.step_size
# Sample step size
r = np.clip(self.levy.rvs(), a_min=0, a_max=self.max_action_size)
# Return step size from levy in a random direction
d_pos = r * direction
return d_pos
def act(self, obs):
"""Sample levy flight steps. If steps are too large (action_size > max_step_size),
it will divide it in several steps in the same direction.
Parameters
----------
obs:
Whatever observation from the environment class needed to choose the right action
Returns
-------
d_pos: nd.array (2,)
position variation to compute next position
"""
if len(self.action_buffer) > 0:
action = self.action_buffer.pop()
return action
else:
"""
Divide actions into multiple steps in the same direction
(Need to refactor this feature)
"""
action = self._act(obs)
action_size = np.sqrt(np.sum(action**2))
normalized_action = action / action_size
if action_size > self.max_step_size:
n_sub_steps = int(np.ceil(action_size / self.max_action_size))
sub_actions = [normalized_action for i in range(n_sub_steps)]
self.action_buffer += sub_actions
return self.action_buffer.pop()
else:
return action | PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.