code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
package org.pocketcampus.plugin.authentication.server;
import ch.epfl.tequila.client.model.TequilaPrincipal;
import java.util.List;
public interface SessionManager {
String insert(TequilaPrincipal principal, boolean rememberMe);
List<String> getFields(String sessionId, List<String> fields);
Integer destroySessions(String sciper);
}
| ValentinMinder/pocketcampus | plugin/authentication/server/src/main/java/org/pocketcampus/plugin/authentication/server/SessionManager.java | Java | bsd-3-clause | 340 |
package com.winningsmiledental;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.tree.*;
import org.swixml.*;
import org.swixml.SwingEngine;
import java.io.*;
import java.util.*;
import java.net.*;
import org.jdom.*;
import java.sql.*;
public class PatientInfoListener extends AbstractListener {
//private RecordManager manager;
public PatientInfoListener(GUI gui) {
super(gui);
//manager = ((PatientInfoGUI)gui).getRecordManager();
}
public void actionPerformed(ActionEvent ae){
String command = ae.getActionCommand();
//RecordManager manager = ((PatientInfoGUI)gui).getRecordManager();
if (command.equals("AC_CANCEL")) {
getExecutioner().loadMainMenu();
}
else if (command.equals("AC_NEW")) {
getExecutioner().loadPatientRecord();
}
else if (command.equals("AC_EDIT")) {
//System.out.println("attempting command >> edit");
JTable table = ((PatientInfoGUI)gui).getTable();
int row = table.getSelectedRow();
//System.out.println("row selected: " + row);
if (row >= 0) {
try {
int rcn = ((Integer)table.getValueAt(row, 4)).intValue();
System.out.println("rcn: " + rcn);
getExecutioner().loadPatientRecord(rcn);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
else if (command.equals("AC_SEEK")) {
((PatientInfoGUI)gui).refreshTable();
}
}
}
| mjpan/jdentpro | src/com/winningsmiledental/PatientInfoListener.java | Java | bsd-3-clause | 1,458 |
;(function($B){
//eval($B.InjectBuiltins())
var _b_ = $B.builtins;
var object = _b_.object
var JSObject = $B.JSObject
var _window = self;
// cross-browser utility functions
function $getMouseOffset(target, ev){
ev = ev || _window.event;
var docPos = $getPosition(target);
var mousePos = $mouseCoords(ev);
return {x:mousePos.x - docPos.x, y:mousePos.y - docPos.y};
}
function $getPosition(e){
var left = 0,
top = 0,
width = e.width || e.offsetWidth,
height = e.height || e.offsetHeight,
scroll = document.scrollingElement.scrollTop
while (e.offsetParent){
left += e.offsetLeft
top += e.offsetTop
e = e.offsetParent
}
left += e.offsetLeft || 0
top += e.offsetTop || 0
if(e.parentElement){
// eg SVG element inside an HTML element
var parent_pos = $getPosition(e.parentElement)
left += parent_pos.left
top += parent_pos.top
}
return {left: left, top: top, width: width, height: height}
}
function trace(msg){
var elt = document.getElementById("trace")
if(elt){
elt.innerText += msg
}
}
function $mouseCoords(ev){
if(ev.type.startsWith("touch")){
var res = {}
res.x = _b_.int.$factory(ev.touches[0].screenX)
res.y = _b_.int.$factory(ev.touches[0].screenY)
res.__getattr__ = function(attr){return this[attr]}
res.__class__ = "MouseCoords"
return res
}
var posx = 0,
posy = 0
if(!ev){var ev = _window.event}
if(ev.pageX || ev.pageY){
posx = ev.pageX
posy = ev.pageY
}else if(ev.clientX || ev.clientY){
posx = ev.clientX + document.body.scrollLeft +
document.documentElement.scrollLeft
posy = ev.clientY + document.body.scrollTop +
document.documentElement.scrollTop
}
var res = {}
res.x = _b_.int.$factory(posx)
res.y = _b_.int.$factory(posy)
res.__getattr__ = function(attr){return this[attr]}
res.__class__ = "MouseCoords"
return res
}
var $DOMNodeAttrs = ["nodeName", "nodeValue", "nodeType", "parentNode",
"childNodes", "firstChild", "lastChild", "previousSibling", "nextSibling",
"attributes", "ownerDocument"]
$B.$isNode = function(o){
// copied from http://stackoverflow.com/questions/384286/
// javascript-isdom-how-do-you-check-if-a-javascript-object-is-a-dom-object
return (
typeof Node === "object" ? o instanceof Node :
o && typeof o === "object" && typeof o.nodeType === "number" &&
typeof o.nodeName === "string"
)
}
$B.$isNodeList = function(nodes) {
// copied from http://stackoverflow.com/questions/7238177/
// detect-htmlcollection-nodelist-in-javascript
try{
var result = Object.prototype.toString.call(nodes)
var re = new RegExp("^\\[object (HTMLCollection|NodeList)\\]$")
return (typeof nodes === "object" &&
re.exec(result) !== null &&
nodes.length !== undefined &&
(nodes.length == 0 ||
(typeof nodes[0] === "object" && nodes[0].nodeType > 0))
)
}catch(err){
return false
}
}
var $DOMEventAttrs_W3C = ["NONE", "CAPTURING_PHASE", "AT_TARGET",
"BUBBLING_PHASE", "type", "target", "currentTarget", "eventPhase",
"bubbles", "cancelable", "timeStamp", "stopPropagation",
"preventDefault", "initEvent"]
var $DOMEventAttrs_IE = ["altKey", "altLeft", "button", "cancelBubble",
"clientX", "clientY", "contentOverflow", "ctrlKey", "ctrlLeft", "data",
"dataFld", "dataTransfer", "fromElement", "keyCode", "nextPage",
"offsetX", "offsetY", "origin", "propertyName", "reason", "recordset",
"repeat", "screenX", "screenY", "shiftKey", "shiftLeft",
"source", "srcElement", "srcFilter", "srcUrn", "toElement", "type",
"url", "wheelDelta", "x", "y"]
$B.$isEvent = function(obj){
var flag = true
for(var i = 0; i < $DOMEventAttrs_W3C.length; i++){
if(obj[$DOMEventAttrs_W3C[i]] === undefined){flag = false; break}
}
if(flag){return true}
for(var i = 0; i < $DOMEventAttrs_IE.length; i++){
if(obj[$DOMEventAttrs_IE[i]] === undefined){return false}
}
return true
}
// DOM node types
var $NodeTypes = {1: "ELEMENT",
2: "ATTRIBUTE",
3: "TEXT",
4: "CDATA_SECTION",
5: "ENTITY_REFERENCE",
6: "ENTITY",
7: "PROCESSING_INSTRUCTION",
8: "COMMENT",
9: "DOCUMENT",
10: "DOCUMENT_TYPE",
11: "DOCUMENT_FRAGMENT",
12: "NOTATION"
}
// Class for DOM attributes
var Attributes = $B.make_class("Attributes",
function(elt){
return{
__class__: Attributes,
elt: elt
}
}
)
Attributes.__contains__ = function(){
var $ = $B.args("__getitem__", 2, {self: null, key:null},
["self", "key"], arguments, {}, null, null)
if($.self.elt instanceof SVGElement){
return $.self.elt.hasAttributeNS(null, $.key)
}else if(typeof $.self.elt.hasAttribute == "function"){
return $.self.elt.hasAttribute($.key)
}
return false
}
Attributes.__delitem__ = function(){
var $ = $B.args("__getitem__", 2, {self: null, key:null},
["self", "key"], arguments, {}, null, null)
if(!Attributes.__contains__($.self, $.key)){
throw _b_.KeyError.$factory($.key)
}
if($.self.elt instanceof SVGElement){
$.self.elt.removeAttributeNS(null, $.key)
return _b_.None
}else if(typeof $.self.elt.hasAttribute == "function"){
$.self.elt.removeAttribute($.key)
return _b_.None
}
}
Attributes.__getitem__ = function(){
var $ = $B.args("__getitem__", 2, {self: null, key:null},
["self", "key"], arguments, {}, null, null)
if($.self.elt instanceof SVGElement &&
$.self.elt.hasAttributeNS(null, $.key)){
return $.self.elt.getAttributeNS(null, $.key)
}else if(typeof $.self.elt.hasAttribute == "function" &&
$.self.elt.hasAttribute($.key)){
return $.self.elt.getAttribute($.key)
}
throw _b_.KeyError.$factory($.key)
}
Attributes.__iter__ = function(self){
self.$counter = 0
// Initialize list of key-value attribute pairs
var attrs = self.elt.attributes,
items = []
for(var i = 0; i < attrs.length; i++){
items.push(attrs[i].name)
}
self.$items = items
return self
}
Attributes.__next__ = function(){
var $ = $B.args("__next__", 1, {self: null},
["self"], arguments, {}, null, null)
if($.self.$counter < $.self.$items.length){
var res = $.self.$items[$.self.$counter]
$.self.$counter++
return res
}else{
throw _b_.StopIteration.$factory("")
}
}
Attributes.__setitem__ = function(){
var $ = $B.args("__setitem__", 3, {self: null, key:null, value: null},
["self", "key", "value"], arguments, {}, null, null)
if($.self.elt instanceof SVGElement &&
typeof $.self.elt.setAttributeNS == "function"){
$.self.elt.setAttributeNS(null, $.key, $.value)
return _b_.None
}else if(typeof $.self.elt.setAttribute == "function"){
$.self.elt.setAttribute($.key, $.value)
return _b_.None
}
throw _b_.TypeError.$factory("Can't set attributes on element")
}
Attributes.get = function(){
var $ = $B.args("get", 3, {self: null, key:null, deflt: null},
["self", "key", "deflt"], arguments, {deflt:_b_.None}, null, null)
try{
return Attributes.__getitem__($.self, $.key)
}catch(err){
if(err.__class__ === _b_.KeyError){
return $.deflt
}else{
throw err
}
}
}
Attributes.keys = function(){
return Attributes.__iter__.apply(null, arguments)
}
Attributes.items = function(){
var $ = $B.args("values", 1, {self: null},
["self"], arguments, {}, null, null),
attrs = $.self.elt.attributes,
values = []
for(var i = 0; i < attrs.length; i++){
values.push([attrs[i].name, attrs[i].value])
}
return _b_.list.__iter__(values)
}
Attributes.values = function(){
var $ = $B.args("values", 1, {self: null},
["self"], arguments, {}, null, null),
attrs = $.self.elt.attributes,
values = []
for(var i = 0; i < attrs.length; i++){
values.push(attrs[i].value)
}
return _b_.list.__iter__(values)
}
$B.set_func_names(Attributes, "<dom>")
// Class for DOM events
var DOMEvent = $B.DOMEvent = {
__class__: _b_.type,
__mro__: [object],
$infos:{
__name__: "DOMEvent"
}
}
DOMEvent.__new__ = function(cls, evt_name){
var ev = new Event(evt_name)
ev.__class__ = DOMEvent
if(ev.preventDefault === undefined){
ev.preventDefault = function(){ev.returnValue = false}
}
if(ev.stopPropagation === undefined){
ev.stopPropagation = function(){ev.cancelBubble = true}
}
return ev
}
function dom2svg(svg_elt, coords){
// Used to compute the mouse position relatively to the upper left corner
// of an SVG element, based on the coordinates coords.x, coords.y that are
// relative to the browser screen.
var pt = svg_elt.createSVGPoint()
pt.x = coords.x
pt.y = coords.y
return pt.matrixTransform(svg_elt.getScreenCTM().inverse())
}
DOMEvent.__getattribute__ = function(self, attr){
switch(attr) {
case '__repr__':
case '__str__':
return function(){return '<DOMEvent object>'}
case 'x':
return $mouseCoords(self).x
case 'y':
return $mouseCoords(self).y
case 'data':
if(self.dataTransfer !== undefined){
return Clipboard.$factory(self.dataTransfer)
}
return $B.$JS2Py(self['data'])
case 'target':
if(self.target !== undefined){
return DOMNode.$factory(self.target)
}
case 'char':
return String.fromCharCode(self.which)
case 'svgX':
if(self.target instanceof SVGSVGElement){
return Math.floor(dom2svg(self.target, $mouseCoords(self)).x)
}
throw _b_.AttributeError.$factory("event target is not an SVG " +
"element")
case 'svgY':
if(self.target instanceof SVGSVGElement){
return Math.floor(dom2svg(self.target, $mouseCoords(self)).y)
}
throw _b_.AttributeError.$factory("event target is not an SVG " +
"element")
}
var res = self[attr]
if(res !== undefined){
if(typeof res == "function"){
var func = function(){
var args = []
for(var i = 0; i < arguments.length; i++){
args.push($B.pyobj2jsobj(arguments[i]))
}
return res.apply(self, arguments)
}
func.$infos = {
__name__: res.name,
__qualname__: res.name
}
return func
}
return $B.$JS2Py(res)
}
throw _b_.AttributeError.$factory("object DOMEvent has no attribute '" +
attr + "'")
}
DOMEvent.$factory = function(evt_name){
// Factory to create instances of DOMEvent, based on an event name
return DOMEvent.__new__(DOMEvent, evt_name)
}
// Function to transform a DOM event into an instance of DOMEvent
var $DOMEvent = $B.$DOMEvent = function(ev){
ev.__class__ = DOMEvent
ev.$no_dict = true
if(ev.preventDefault === undefined){
ev.preventDefault = function(){ev.returnValue = false}
}
if(ev.stopPropagation === undefined){
ev.stopPropagation = function(){ev.cancelBubble = true}
}
return ev
}
$B.set_func_names(DOMEvent, "browser")
var Clipboard = {
__class__: _b_.type,
$infos: {
__module__: "browser",
__name__: "Clipboard"
}
}
Clipboard.__getitem__ = function(self, name){
return self.data.getData(name)
}
Clipboard.__mro__ = [object]
Clipboard.__setitem__ = function(self, name, value){
self.data.setData(name, value)
}
Clipboard.$factory = function(data){ // drag and drop dataTransfer
return {
__class__ : Clipboard,
__dict__: _b_.dict.$factory(),
data : data
}
}
$B.set_func_names(Clipboard, "<dom>")
function $EventsList(elt, evt, arg){
// handles a list of callback fuctions for the event evt of element elt
// method .remove(callback) removes the callback from the list, and
// removes the event listener
this.elt = elt
this.evt = evt
if(isintance(arg, list)){this.callbacks = arg}
else{this.callbacks = [arg]}
this.remove = function(callback){
var found = false
for(var i = 0; i < this.callbacks.length; i++){
if(this.callbacks[i] === callback){
found = true
this.callback.splice(i, 1)
this.elt.removeEventListener(this.evt, callback, false)
break
}
}
if(! found){throw _b_.KeyError.$factory("not found")}
}
}
var OpenFile = $B.OpenFile = {
__class__: _b_.type, // metaclass type
__mro__: [object],
$infos: {
__module__: "<pydom>",
__name__: "OpenFile"
}
}
OpenFile.$factory = function(file, mode, encoding) {
var res = {
__class__: $OpenFileDict,
file: file,
reader: new FileReader()
}
if(mode === "r"){
res.reader.readAsText(file, encoding)
}else if(mode === "rb"){
res.reader.readAsBinaryString(file)
}
return res
}
OpenFile.__getattr__ = function(self, attr) {
if(self["get_" + attr] !== undefined){return self["get_" + attr]}
return self.reader[attr]
}
OpenFile.__setattr__ = function(self, attr, value) {
var obj = self.reader
if(attr.substr(0,2) == "on"){ // event
var callback = function(ev) { return value($DOMEvent(ev)) }
obj.addEventListener(attr.substr(2), callback)
}else if("set_" + attr in obj){
return obj["set_" + attr](value)
}else if(attr in obj){
obj[attr] = value
}else{
setattr(obj, attr, value)
}
}
$B.set_func_names(OpenFile, "<dom>")
var dom = {
File : function(){},
FileReader : function(){}
}
dom.File.__class__ = _b_.type
dom.File.__str__ = function(){return "<class 'File'>"}
dom.FileReader.__class__ = _b_.type
dom.FileReader.__str__ = function(){return "<class 'FileReader'>"}
// Class for options in a select box
var Options = {
__class__: _b_.type,
__delitem__: function(self, arg){
self.parent.options.remove(arg.elt)
},
__getitem__: function(self, key){
return DOMNode.$factory(self.parent.options[key])
},
__len__: function(self){
return self.parent.options.length
},
__mro__: [object],
__setattr__: function(self, attr, value){
self.parent.options[attr] = value
},
__setitem__: function(self, attr, value){
self.parent.options[attr] = $B.$JS2Py(value)
},
__str__: function(self){
return "<object Options wraps " + self.parent.options + ">"
},
append: function(self, element){
self.parent.options.add(element.elt)
},
insert: function(self, index, element){
if(index === undefined){self.parent.options.add(element.elt)}
else{self.parent.options.add(element.elt, index)}
},
item: function(self, index){
return self.parent.options.item(index)
},
namedItem: function(self, name){
return self.parent.options.namedItem(name)
},
remove: function(self, arg){
self.parent.options.remove(arg.elt)
},
$infos: {
__module__: "<pydom>",
__name__: "Options"
}
}
Options.$factory = function(parent){
return {
__class__: Options,
parent: parent
}
}
$B.set_func_names(Options, "<dom>")
// Class for DOM nodes
var DOMNode = {
__class__ : _b_.type,
__mro__: [object],
$infos: {
__module__: "browser",
__name__: "DOMNode"
}
}
DOMNode.$factory = function(elt, fromtag){
if(elt.__class__ === DOMNode){return elt}
if(typeof elt == "number" || typeof elt == "boolean" ||
typeof elt == "string"){return elt}
// if none of the above, fromtag determines if the call is made by
// the tag factory or by any other call to DOMNode
// if made by tag factory (fromtag will be defined, the value is not
// important), the regular plain old behavior is retained. Only the
// return value of a DOMNode is sought
// In other cases (fromtag is undefined), DOMNode tries to return a "tag"
// from the browser.html module by looking into "$tags" which is set
// by the browser.html module itself (external sources could override
// it) and piggybacks on the tag factory by adding an "elt_wrap"
// attribute to the class to let it know, that special behavior
// is needed. i.e: don't create the element, use the one provided
if(fromtag === undefined) {
if(DOMNode.tags !== undefined) { // tags is a python dictionary
var tdict = DOMNode.tags.$string_dict
if(tdict !== undefined && tdict.hasOwnProperty(elt.tagName)) {
try{
var klass = tdict[elt.tagName][0]
}catch(err){
console.log("tdict", tdict, "tag name", elt.tagName)
throw err
}
if(klass !== undefined) {
// all checks are good
klass.$elt_wrap = elt // tell class to wrap element
return klass.$factory() // and return what the factory wants
}
}
}
// all "else" ... default to old behavior of plain DOMNode wrapping
}
if(elt["$brython_id"] === undefined || elt.nodeType == 9){
// add a unique id for comparisons
elt.$brython_id = "DOM-" + $B.UUID()
}
var __dict__ = _b_.dict.$factory()
__dict__.$jsobj = elt
return {
__class__: DOMNode,
__dict__: __dict__,
elt: elt
}
}
DOMNode.__add__ = function(self, other){
// adding another element to self returns an instance of TagSum
var res = TagSum.$factory()
res.children = [self], pos = 1
if(_b_.isinstance(other, TagSum)){
res.children = res.children.concat(other.children)
}else if(_b_.isinstance(other,[_b_.str, _b_.int, _b_.float, _b_.list,
_b_.dict, _b_.set, _b_.tuple])){
res.children[pos++] = DOMNode.$factory(
document.createTextNode(_b_.str.$factory(other)))
}else if(_b_.isinstance(other, DOMNode)){
res.children[pos++] = other
}else{
// If other is iterable, add all items
try{res.children = res.children.concat(_b_.list.$factory(other))}
catch(err){throw _b_.TypeError.$factory("can't add '" +
$B.class_name(other) + "' object to DOMNode instance")
}
}
return res
}
DOMNode.__bool__ = function(self){return true}
DOMNode.__contains__ = function(self, key){
// For document, if key is a string, "key in document" tells if an element
// with id "key" is in the document
if(self.elt.nodeType == 9 && typeof key == "string"){
return document.getElementById(key) !== null
}
key = key.elt !==undefined ? key.elt : key
if(self.elt.length !== undefined && typeof self.elt.item == "function"){
for(var i = 0, len = self.elt.length; i < len; i++){
if(self.elt.item(i) === key){return true}
}
}
return false
}
DOMNode.__del__ = function(self){
// if element has a parent, calling __del__ removes object
// from the parent's children
if(!self.elt.parentNode){
throw _b_.ValueError.$factory("can't delete " + _b_.str.$factory(self.elt))
}
self.elt.parentNode.removeChild(self.elt)
}
DOMNode.__delattr__ = function(self, attr){
if(self.elt[attr] === undefined){
throw _b_.AttributeError.$factory(
`cannot delete DOMNode attribute '${attr}'`)
}
delete self.elt[attr]
return _b_.None
}
DOMNode.__delitem__ = function(self, key){
if(self.elt.nodeType == 9){ // document : remove by id
var res = self.elt.getElementById(key)
if(res){res.parentNode.removeChild(res)}
else{throw _b_.KeyError.$factory(key)}
}else{ // other node : remove by rank in child nodes
self.elt.parentNode.removeChild(self.elt)
}
}
DOMNode.__dir__ = function(self){
var res = []
// generic DOM attributes
for(var attr in self.elt){
if(attr.charAt(0) != "$"){res.push(attr)}
}
res.sort()
return res
}
DOMNode.__eq__ = function(self, other){
return self.elt == other.elt
}
DOMNode.__getattribute__ = function(self, attr){
if(attr.substr(0, 2) == "$$"){attr = attr.substr(2)}
switch(attr) {
case "attrs":
return Attributes.$factory(self.elt)
case "class_name":
case "html":
case "id":
case "parent":
case "query":
case "text":
return DOMNode[attr](self)
case "height":
case "left":
case "top":
case "width":
// Special case for Canvas
// http://stackoverflow.com/questions/4938346/canvas-width-and-height-in-html5
if(self.elt.tagName == "CANVAS" && self.elt[attr]){
return self.elt[attr]
}
if(self.elt instanceof SVGElement){
return self.elt[attr].baseVal.value
}
if(self.elt.style[attr]){
return parseInt(self.elt.style[attr])
}else{
var computed = window.getComputedStyle(self.elt)[attr]
if(computed !== undefined){
return Math.floor(parseFloat(computed) + 0.5)
}
throw _b_.AttributeError.$factory("style." + attr +
" is not set for " + _b_.str.$factory(self))
}
case "x":
case "y":
if(! (self.elt instanceof SVGElement)){
var pos = $getPosition(self.elt)
return attr == "x" ? pos.left : pos.top
}
case "clear":
case "closest":
return function(){
return DOMNode[attr](self, arguments[0])
}
case "headers":
if(self.elt.nodeType == 9){
// HTTP headers
var req = new XMLHttpRequest();
req.open("GET", document.location, false)
req.send(null);
var headers = req.getAllResponseHeaders()
headers = headers.split("\r\n")
var res = _b_.dict.$factory()
for(var i = 0; i < headers.length; i++){
var header = headers[i]
if(header.strip().length == 0){continue}
var pos = header.search(":")
res.__setitem__(header.substr(0, pos),
header.substr(pos + 1).lstrip())
}
return res
}
break
case "$$location":
attr = "location"
break
}
// Special case for attribute "select" of INPUT or TEXTAREA tags :
// they have a "select" methods ; element.select() selects the
// element text content.
// Return a function that, if called without arguments, uses this
// method ; otherwise, uses DOMNode.select
if(attr == "select" && self.elt.nodeType == 1 &&
["INPUT", "TEXTAREA"].indexOf(self.elt.tagName.toUpperCase()) > -1){
return function(selector){
if(selector === undefined){self.elt.select(); return _b_.None}
return DOMNode.select(self, selector)
}
}
// Looking for property. If the attribute is in the forbidden
// arena ... look for the aliased version
var property = self.elt[attr]
if(property === undefined && $B.aliased_names[attr]){
property = self.elt["$$" + attr]
}
if(property === undefined){
return object.__getattribute__(self, attr)
}
var res = property
if(res !== undefined){
if(res === null){return _b_.None}
if(typeof res === "function"){
// If elt[attr] is a function, it is converted in another function
// that produces a Python error message in case of failure.
var func = (function(f, elt){
return function(){
var args = [], pos = 0
for(var i = 0; i < arguments.length; i++){
var arg = arguments[i]
if(typeof arg == "function"){
// Conversion of function arguments into functions
// that handle exceptions. The converted function
// is cached, so that for instance in this code :
//
// element.addEventListener("click", f)
// element.removeEventListener("click", f)
//
// it is the same function "f" that is added and
// then removed (cf. issue #1157)
if(arg.$cache){
var f1 = arg.$cache
}else{
var f1 = function(dest_fn){
return function(){
try{
return dest_fn.apply(null, arguments)
}catch(err){
$B.handle_error(err)
}
}
}(arg)
arg.$cache = f1
}
args[pos++] = f1
}
else if(_b_.isinstance(arg, JSObject)){
args[pos++] = arg.js
}else if(_b_.isinstance(arg, DOMNode)){
args[pos++] = arg.elt
}else if(arg === _b_.None){
args[pos++] = null
}else if(arg.__class__ == _b_.dict){
args[pos++] = _b_.dict.$to_obj(arg)
}else{
args[pos++] = arg
}
}
var result = f.apply(elt, args)
return $B.$JS2Py(result)
}
})(res, self.elt)
func.$infos = {__name__ : attr, __qualname__: attr}
func.$is_func = true
return func
}
if(attr == 'options'){return Options.$factory(self.elt)}
if(attr == 'style'){return $B.JSObject.$factory(self.elt[attr])}
if(Array.isArray(res)){return res} // issue #619
return $B.$JS2Py(res)
}
return object.__getattribute__(self, attr)
}
DOMNode.__getitem__ = function(self, key){
if(self.elt.nodeType == 9){ // Document
if(typeof key == "string"){
var res = self.elt.getElementById(key)
if(res){return DOMNode.$factory(res)}
throw _b_.KeyError.$factory(key)
}else{
try{
var elts = self.elt.getElementsByTagName(key.$infos.__name__),
res = []
for(var i = 0; i < elts.length; i++){
res.push(DOMNode.$factory(elts[i]))
}
return res
}catch(err){
throw _b_.KeyError.$factory(_b_.str.$factory(key))
}
}
}else{
if((typeof key == "number" || typeof key == "boolean") &&
typeof self.elt.item == "function"){
var key_to_int = _b_.int.$factory(key)
if(key_to_int < 0){key_to_int += self.elt.length}
var res = DOMNode.$factory(self.elt.item(key_to_int))
if(res === undefined){throw _b_.KeyError.$factory(key)}
return res
}else if(typeof key == "string" &&
self.elt.attributes &&
typeof self.elt.attributes.getNamedItem == "function"){
var attr = self.elt.attributes.getNamedItem(key)
if(!!attr){return attr.value}
throw _b_.KeyError.$factory(key)
}
}
}
DOMNode.__hash__ = function(self){
return self.__hashvalue__ === undefined ?
(self.__hashvalue__ = $B.$py_next_hash--) :
self.__hashvalue__
}
DOMNode.__iter__ = function(self){
// iteration on a Node
if(self.elt.length !== undefined && typeof self.elt.item == "function"){
var items = []
for(var i = 0, len = self.elt.length; i < len; i++){
items.push(DOMNode.$factory(self.elt.item(i)))
}
}else if(self.elt.childNodes !== undefined){
var items = []
for(var i = 0, len = self.elt.childNodes.length; i < len; i++){
items.push(DOMNode.$factory(self.elt.childNodes[i]))
}
}
return $B.$iter(items)
}
DOMNode.__le__ = function(self, other){
// for document, append child to document.body
var elt = self.elt
if(self.elt.nodeType == 9){elt = self.elt.body}
if(_b_.isinstance(other, TagSum)){
for(var i = 0; i < other.children.length; i++){
elt.appendChild(other.children[i].elt)
}
}else if(typeof other == "string" || typeof other == "number"){
var $txt = document.createTextNode(other.toString())
elt.appendChild($txt)
}else if(_b_.isinstance(other, DOMNode)){
// other is a DOMNode instance
elt.appendChild(other.elt)
}else{
try{
// If other is an iterable, add the items
var items = _b_.list.$factory(other)
items.forEach(function(item){
DOMNode.__le__(self, item)
})
}catch(err){
throw _b_.TypeError.$factory("can't add '" +
$B.class_name(other) + "' object to DOMNode instance")
}
}
}
DOMNode.__len__ = function(self){return self.elt.length}
DOMNode.__mul__ = function(self,other){
if(_b_.isinstance(other, _b_.int) && other.valueOf() > 0){
var res = TagSum.$factory()
var pos = res.children.length
for(var i = 0; i < other.valueOf(); i++){
res.children[pos++] = DOMNode.clone(self)()
}
return res
}
throw _b_.ValueError.$factory("can't multiply " + self.__class__ +
"by " + other)
}
DOMNode.__ne__ = function(self, other){return ! DOMNode.__eq__(self, other)}
DOMNode.__next__ = function(self){
self.$counter++
if(self.$counter < self.elt.childNodes.length){
return DOMNode.$factory(self.elt.childNodes[self.$counter])
}
throw _b_.StopIteration.$factory("StopIteration")
}
DOMNode.__radd__ = function(self, other){ // add to a string
var res = TagSum.$factory()
var txt = DOMNode.$factory(document.createTextNode(other))
res.children = [txt, self]
return res
}
DOMNode.__str__ = DOMNode.__repr__ = function(self){
var proto = Object.getPrototypeOf(self.elt)
if(proto){
var name = proto.constructor.name
if(name === undefined){ // IE
var proto_str = proto.constructor.toString()
name = proto_str.substring(8, proto_str.length - 1)
}
return "<" + name + " object>"
}
var res = "<DOMNode object type '"
return res + $NodeTypes[self.elt.nodeType] + "' name '" +
self.elt.nodeName + "'>"
}
DOMNode.__setattr__ = function(self, attr, value){
// Sets the *property* attr of the underlying element (not its
// *attribute*)
if(attr.substr(0,2) == "on"){ // event
if(!$B.$bool(value)){ // remove all callbacks attached to event
DOMNode.unbind(self, attr.substr(2))
}else{
// value is a function taking an event as argument
DOMNode.bind(self, attr.substr(2), value)
}
}else{
switch(attr){
case "left":
case "top":
case "width":
case "height":
if(_b_.isinstance(value, _b_.int) && self.elt.nodeType == 1){
self.elt.style[attr] = value + "px"
return _b_.None
}else{
throw _b_.ValueError.$factory(attr + " value should be" +
" an integer, not " + $B.class_name(value))
}
break
}
if(DOMNode["set_" + attr] !== undefined) {
return DOMNode["set_" + attr](self, value)
}
function warn(msg){
console.log(msg)
var frame = $B.last($B.frames_stack)
if($B.debug > 0){
var info = frame[1].$line_info.split(",")
console.log("module", info[1], "line", info[0])
if($B.$py_src.hasOwnProperty(info[1])){
var src = $B.$py_src[info[1]]
console.log(src.split("\n")[parseInt(info[0]) - 1])
}
}else{
console.log("module", frame[2])
}
}
// Warns if attr is a descriptor of the element's prototype
// and it is not writable
var proto = Object.getPrototypeOf(self.elt),
nb = 0
while(!!proto && proto !== Object.prototype && nb++ < 10){
var descriptors = Object.getOwnPropertyDescriptors(proto)
if(!!descriptors &&
typeof descriptors.hasOwnProperty == "function"){
if(descriptors.hasOwnProperty(attr)){
if(!descriptors[attr].writable &&
descriptors[attr].set === undefined){
warn("Warning: property '" + attr +
"' is not writable. Use element.attrs['" +
attr +"'] instead.")
}
break
}
}else{
break
}
proto = Object.getPrototypeOf(proto)
}
// Warns if attribute is a property of style
if(self.elt.style && self.elt.style[attr] !== undefined){
warn("Warning: '" + attr + "' is a property of element.style")
}
// Set the property
if(value.__class__ === $B.JSObject &&
value.js instanceof EventTarget){
// Cf. issue #1393
value = value.js
}
self.elt[attr] = value
return _b_.None
}
}
DOMNode.__setitem__ = function(self, key, value){
if(typeof key == "number"){
self.elt.childNodes[key] = value
}else if(typeof key == "string"){
if(self.elt.attributes){
if(self.elt instanceof SVGElement){
self.elt.setAttributeNS(null, key, value)
}else if(typeof self.elt.setAttribute == "function"){
self.elt.setAttribute(key, value)
}
}
}
}
DOMNode.abs_left = {
__get__: function(self){
return $getPosition(self.elt).left
},
__set__: function(){
throw _b_.AttributeError.$factory("'DOMNode' objectattribute " +
"'abs_left' is read-only")
}
}
DOMNode.abs_top = {
__get__: function(self){
return $getPosition(self.elt).top
},
__set__: function(){
throw _b_.AttributeError.$factory("'DOMNode' objectattribute " +
"'abs_top' is read-only")
}
}
DOMNode.bind = function(self, event){
// bind functions to the event (event = "click", "mouseover" etc.)
var $ = $B.args("bind", 4,
{self: null, event: null, func: null, options: null},
["self", "event", "func", "options"], arguments,
{options: _b_.None}, null, null),
self = $.self,
event = $.event,
func = $.func,
options = $.options
var callback = (function(f){
return function(ev){
try{
return f($DOMEvent(ev))
}catch(err){
if(err.__class__ !== undefined){
$B.handle_error(err)
}else{
try{$B.$getattr($B.stderr, "write")(err)}
catch(err1){console.log(err)}
}
}
}}
)(func)
callback.$infos = func.$infos
callback.$attrs = func.$attrs || {}
callback.$func = func
if(typeof options == "boolean"){
self.elt.addEventListener(event, callback, options)
}else if(options.__class__ === _b_.dict){
self.elt.addEventListener(event, callback, _b_.dict.$to_obj(options))
}else if(options === _b_.None){
self.elt.addEventListener(event, callback, false)
}
self.elt.$events = self.elt.$events || {}
self.elt.$events[event] = self.elt.$events[event] || []
self.elt.$events[event].push([func, callback])
return self
}
DOMNode.children = function(self){
var res = [],
elt = self.elt
console.log(elt, elt.childNodes)
if(elt.nodeType == 9){elt = elt.body}
elt.childNodes.forEach(function(child){
res.push(DOMNode.$factory(child))
})
return res
}
DOMNode.clear = function(self){
// remove all children elements
var elt = self.elt
if(elt.nodeType == 9){elt = elt.body}
while(elt.firstChild){
elt.removeChild(elt.firstChild)
}
}
DOMNode.Class = function(self){
if(self.elt.className !== undefined){return self.elt.className}
return _b_.None
}
DOMNode.class_name = function(self){return DOMNode.Class(self)}
DOMNode.clone = function(self){
var res = DOMNode.$factory(self.elt.cloneNode(true))
// bind events on clone to the same callbacks as self
var events = self.elt.$events || {}
for(var event in events){
var evt_list = events[event]
evt_list.forEach(function(evt){
var func = evt[0]
DOMNode.bind(res, event, func)
})
}
return res
}
DOMNode.closest = function(self, tagName){
// Returns the first parent of self with specified tagName
// Raises KeyError if not found
var res = self.elt,
tagName = tagName.toLowerCase()
while(res.tagName.toLowerCase() != tagName){
res = res.parentNode
if(res === undefined || res.tagName === undefined){
throw _b_.KeyError.$factory("no parent of type " + tagName)
}
}
return DOMNode.$factory(res)
}
DOMNode.events = function(self, event){
self.elt.$events = self.elt.$events || {}
var evt_list = self.elt.$events[event] = self.elt.$events[event] || [],
callbacks = []
evt_list.forEach(function(evt){
callbacks.push(evt[1])
})
return callbacks
}
DOMNode.focus = function(self){
return (function(obj){
return function(){
// focus() is not supported in IE
setTimeout(function(){obj.focus()}, 10)
}
})(self.elt)
}
function make_list(node_list){
var res = []
for(var i = 0; i < node_list.length; i++){
res.push(DOMNode.$factory(node_list[i]))
}
return res
}
DOMNode.get = function(self){
// for document : doc.get(key1=value1[,key2=value2...]) returns a list of the elements
// with specified keys/values
// key can be 'id','name' or 'selector'
var obj = self.elt,
args = []
for(var i = 1; i < arguments.length; i++){args.push(arguments[i])}
var $ns = $B.args("get", 0, {}, [], args, {}, null, "kw"),
$dict = {},
items = _b_.list.$factory(_b_.dict.items($ns["kw"]))
items.forEach(function(item){
$dict[item[0]] = item[1]
})
if($dict["name"] !== undefined){
if(obj.getElementsByName === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by name")
}
return make_list(obj.getElementsByName($dict['name']))
}
if($dict["tag"] !== undefined){
if(obj.getElementsByTagName === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by tag name")
}
return make_list(obj.getElementsByTagName($dict["tag"]))
}
if($dict["classname"] !== undefined){
if(obj.getElementsByClassName === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by class name")
}
return make_list(obj.getElementsByClassName($dict['classname']))
}
if($dict["id"] !== undefined){
if(obj.getElementById === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by id")
}
var id_res = document.getElementById($dict['id'])
if(! id_res){return []}
return [DOMNode.$factory(id_res)]
}
if($dict["selector"] !== undefined){
if(obj.querySelectorAll === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by selector")
}
return make_list(obj.querySelectorAll($dict['selector']))
}
return res
}
DOMNode.getContext = function(self){ // for CANVAS tag
if(!("getContext" in self.elt)){
throw _b_.AttributeError.$factory("object has no attribute 'getContext'")
}
var obj = self.elt
return function(ctx){return JSObject.$factory(obj.getContext(ctx))}
}
DOMNode.getSelectionRange = function(self){ // for TEXTAREA
if(self.elt["getSelectionRange"] !== undefined){
return self.elt.getSelectionRange.apply(null, arguments)
}
}
DOMNode.html = function(self){
var res = self.elt.innerHTML
if(res === undefined){
if(self.elt.nodeType == 9){res = self.elt.body.innerHTML}
else{res = _b_.None}
}
return res
}
DOMNode.id = function(self){
if(self.elt.id !== undefined){return self.elt.id}
return _b_.None
}
DOMNode.index = function(self, selector){
var items
if(selector === undefined){
items = self.elt.parentElement.childNodes
}else{
items = self.elt.parentElement.querySelectorAll(selector)
}
var rank = -1
for(var i = 0; i < items.length; i++){
if(items[i] === self.elt){rank = i; break}
}
return rank
}
DOMNode.inside = function(self, other){
// Test if a node is inside another node
other = other.elt
var elt = self.elt
while(true){
if(other === elt){return true}
elt = elt.parentElement
if(! elt){return false}
}
}
DOMNode.options = function(self){ // for SELECT tag
return new $OptionsClass(self.elt)
}
DOMNode.parent = function(self){
if(self.elt.parentElement){
return DOMNode.$factory(self.elt.parentElement)
}
return _b_.None
}
DOMNode.reset = function(self){ // for FORM
return function(){self.elt.reset()}
}
DOMNode.scrolled_left = {
__get__: function(self){
return $getPosition(self.elt).left -
document.scrollingElement.scrollLeft
},
__set__: function(){
throw _b_.AttributeError.$factory("'DOMNode' objectattribute " +
"'scrolled_left' is read-only")
}
}
DOMNode.scrolled_top = {
__get__: function(self){
return $getPosition(self.elt).top -
document.scrollingElement.scrollTop
},
__set__: function(){
throw _b_.AttributeError.$factory("'DOMNode' objectattribute " +
"'scrolled_top' is read-only")
}
}
DOMNode.select = function(self, selector){
// alias for get(selector=...)
if(self.elt.querySelectorAll === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by selector")
}
return make_list(self.elt.querySelectorAll(selector))
}
DOMNode.select_one = function(self, selector){
// return the element matching selector, or None
if(self.elt.querySelector === undefined){
throw _b_.TypeError.$factory("DOMNode object doesn't support " +
"selection by selector")
}
var res = self.elt.querySelector(selector)
if(res === null) {
return _b_.None
}
return DOMNode.$factory(res)
}
DOMNode.style = function(self){
// set attribute "float" for cross-browser compatibility
self.elt.style.float = self.elt.style.cssFloat || self.style.styleFloat
return $B.JSObject.$factory(self.elt.style)
}
DOMNode.setSelectionRange = function(self){ // for TEXTAREA
if(this["setSelectionRange"] !== undefined){
return (function(obj){
return function(){
return obj.setSelectionRange.apply(obj, arguments)
}})(this)
}else if (this["createTextRange"] !== undefined){
return (function(obj){
return function(start_pos, end_pos){
if(end_pos == undefined){end_pos = start_pos}
var range = obj.createTextRange()
range.collapse(true)
range.moveEnd("character", start_pos)
range.moveStart("character", end_pos)
range.select()
}
})(this)
}
}
DOMNode.set_class_name = function(self, arg){
self.elt.setAttribute("class", arg)
}
DOMNode.set_html = function(self, value){
var elt = self.elt
if(elt.nodeType == 9){elt = elt.body}
elt.innerHTML = _b_.str.$factory(value)
}
DOMNode.set_style = function(self, style){ // style is a dict
if(!_b_.isinstance(style, _b_.dict)){
throw _b_.TypeError.$factory("style must be dict, not " +
$B.class_name(style))
}
var items = _b_.list.$factory(_b_.dict.items(style))
for(var i = 0; i < items.length; i++){
var key = items[i][0],
value = items[i][1]
if(key.toLowerCase() == "float"){
self.elt.style.cssFloat = value
self.elt.style.styleFloat = value
}else{
switch(key) {
case "top":
case "left":
case "width":
case "borderWidth":
if(_b_.isinstance(value,_b_.int)){value = value + "px"}
}
self.elt.style[key] = value
}
}
}
DOMNode.set_text = function(self,value){
var elt = self.elt
if(elt.nodeType == 9){elt = elt.body}
elt.innerText = _b_.str.$factory(value)
elt.textContent = _b_.str.$factory(value)
}
DOMNode.set_value = function(self, value){self.elt.value = _b_.str.$factory(value)}
DOMNode.submit = function(self){ // for FORM
return function(){self.elt.submit()}
}
DOMNode.text = function(self){
var elt = self.elt
if(elt.nodeType == 9){elt = elt.body}
var res = elt.innerText || elt.textContent
if(res === null){
res = _b_.None
}
return res
}
DOMNode.toString = function(self){
if(self === undefined){return 'DOMNode'}
return self.elt.nodeName
}
DOMNode.trigger = function (self, etype){
// Artificially triggers the event type provided for this DOMNode
if(self.elt.fireEvent){
self.elt.fireEvent("on" + etype)
}else{
var evObj = document.createEvent("Events")
evObj.initEvent(etype, true, false)
self.elt.dispatchEvent(evObj)
}
}
DOMNode.unbind = function(self, event){
// unbind functions from the event (event = "click", "mouseover" etc.)
// if no function is specified, remove all callback functions
// If no event is specified, remove all callbacks for all events
self.elt.$events = self.elt.$events || {}
if(self.elt.$events === {}){return _b_.None}
if(event === undefined){
for(var event in self.elt.$events){
DOMNode.unbind(self, event)
}
return _b_.None
}
if(self.elt.$events[event] === undefined ||
self.elt.$events[event].length == 0){
return _b_.None
}
var events = self.elt.$events[event]
if(arguments.length == 2){
// remove all callback functions
for(var i = 0; i < events.length; i++){
var callback = events[i][1]
self.elt.removeEventListener(event, callback, false)
}
self.elt.$events[event] = []
return _b_.None
}
for(var i = 2; i < arguments.length; i++){
var callback = arguments[i],
flag = false,
func = callback.$func
if(func === undefined){
// If a callback is created by an assignment to an existing
// function
var found = false
for(var j = 0; j < events.length; j++){
if(events[j][0] === callback){
var func = callback,
found = true
break
}
}
if(!found){
throw _b_.TypeError.$factory("function is not an event callback")
}
}
for(var j = 0; j < events.length; j++){
if($B.$getattr(func, '__eq__')(events[j][0])){
var callback = events[j][1]
self.elt.removeEventListener(event, callback, false)
events.splice(j, 1)
flag = true
break
}
}
// The indicated func was not found, error is thrown
if(!flag){
throw _b_.KeyError.$factory('missing callback for event ' + event)
}
}
}
$B.set_func_names(DOMNode, "browser")
// return query string as an object with methods to access keys and values
// same interface as cgi.FieldStorage, with getvalue / getlist / getfirst
var Query = {
__class__: _b_.type,
__mro__: [_b_.object],
$infos:{
__name__: "query"
}
}
Query.__contains__ = function(self, key){
return self._keys.indexOf(key) > -1
}
Query.__getitem__ = function(self, key){
// returns a single value or a list of values
// associated with key, or raise KeyError
var result = self._values[key]
if(result === undefined){
throw _b_.KeyError.$factory(key)
}else if(result.length == 1){
return result[0]
}
return result
}
var Query_iterator = $B.make_iterator_class("query string iterator")
Query.__iter__ = function(self){
return Query_iterator.$factory(self._keys)
}
Query.__setitem__ = function(self, key, value){
self._values[key] = [value]
return _b_.None
}
Query.__str__ = Query.__repr__ = function(self){
// build query string from keys/values
var elts = []
for(var key in self._values){
for(const val of self._values[key]){
elts.push(encodeURIComponent(key) + "=" + encodeURIComponent(val))
}
}
if(elts.length == 0){
return ""
}else{
return "?" + elts.join("&")
}
}
Query.getfirst = function(self, key, _default){
// returns the first value associated with key
var result = self._values[key]
if(result === undefined){
if(_default === undefined){return _b_.None}
return _default
}
return result[0]
}
Query.getlist = function(self, key){
// always return a list
var result = self._values[key]
if(result === undefined){return []}
return result
}
Query.getvalue = function(self, key, _default){
try{return Query.__getitem__(self, key)}
catch(err){
if(_default === undefined){return _b_.None}
return _default
}
}
Query.keys = function(self){
return self._keys
}
DOMNode.query = function(self){
var res = {
__class__: Query,
_keys : [],
_values : {}
}
var qs = location.search.substr(1).split('&')
for(var i = 0; i < qs.length; i++){
var pos = qs[i].search("="),
elts = [qs[i].substr(0,pos),qs[i].substr(pos + 1)],
key = decodeURIComponent(elts[0]),
value = decodeURIComponent(elts[1])
if(res._keys.indexOf(key) > -1){
res._values[key].push(value)
}else{
res._keys.push(key)
res._values[key] = [value]
}
}
return res
}
// class used for tag sums
var TagSum = {
__class__ : _b_.type,
__mro__: [object],
$infos: {
__module__: "<pydom>",
__name__: "TagSum"
}
}
TagSum.appendChild = function(self, child){
self.children.push(child)
}
TagSum.__add__ = function(self, other){
if($B.get_class(other) === TagSum){
self.children = self.children.concat(other.children)
}else if(_b_.isinstance(other, [_b_.str, _b_.int, _b_.float,
_b_.dict, _b_.set, _b_.list])){
self.children = self.children.concat(
DOMNode.$factory(document.createTextNode(other)))
}else{self.children.push(other)}
return self
}
TagSum.__radd__ = function(self, other){
var res = TagSum.$factory()
res.children = self.children.concat(
DOMNode.$factory(document.createTextNode(other)))
return res
}
TagSum.__repr__ = function(self){
var res = "<object TagSum> "
for(var i = 0; i < self.children.length; i++){
res += self.children[i]
if(self.children[i].toString() == "[object Text]"){
res += " [" + self.children[i].textContent + "]\n"
}
}
return res
}
TagSum.__str__ = TagSum.toString = TagSum.__repr__
TagSum.clone = function(self){
var res = TagSum.$factory()
for(var i = 0; i < self.children.length; i++){
res.children.push(self.children[i].cloneNode(true))
}
return res
}
TagSum.$factory = function(){
return {
__class__: TagSum,
children: [],
toString: function(){return "(TagSum)"}
}
}
$B.set_func_names(TagSum, "<dom>")
$B.TagSum = TagSum // used in _html.js and _svg.js
var win = JSObject.$factory(_window)
win.get_postMessage = function(msg,targetOrigin){
if(_b_.isinstance(msg, dict)){
var temp = {__class__:"dict"},
items = _b_.list.$factory(_b_.dict.items(msg))
items.forEach(function(item){
temp[item[0]] = item[1]
})
msg = temp
}
return _window.postMessage(msg, targetOrigin)
}
$B.DOMNode = DOMNode
$B.win = win
})(__BRYTHON__)
| kikocorreoso/brython | www/src/py_dom.js | JavaScript | bsd-3-clause | 54,564 |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './synced_device_manager.js';
import 'chrome://resources/cr_elements/cr_action_menu/cr_action_menu.m.js';
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_checkbox/cr_checkbox.m.js';
import 'chrome://resources/cr_elements/cr_dialog/cr_dialog.m.js';
import 'chrome://resources/cr_elements/cr_drawer/cr_drawer.m.js';
import 'chrome://resources/cr_elements/cr_icon_button/cr_icon_button.m.js';
import 'chrome://resources/cr_elements/cr_toolbar/cr_toolbar_selection_overlay.m.js';
| endlessm/chromium-browser | chrome/browser/resources/history/lazy_load.js | JavaScript | bsd-3-clause | 709 |
<?php
/**
* Copyright 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Cloud Storage Write Client implements the stream wrapper functions required
* to write to a Google Cloud Storage object.
*
*/
namespace google\appengine\ext\cloud_storage_streams;
// TODO: Retry on transient errors.
final class CloudStorageWriteClient extends CloudStorageClient {
// GS requires all chunks of data written to be multiples of 256K except for
// the last chunk.
const WRITE_CHUNK_SIZE = 262144;
// Conservative pattern for metadata headers name - could be relaxed
const METADATA_KEY_REGEX = "/^[[:alnum:]-]+$/";
// Metadata header value must be printable US ascii
// http://tools.ietf.org/html/rfc2616#section-4.2
const METADATA_VALUE_REGEX = "/^[[:print:]]*$/";
// The array of bytes to be written to GS
private $byte_buffer;
// The resumable upload ID we are using for this upload.
private $upload_id;
// The offset in the file where the current buffer starts
private $buffer_start_offset;
// The number of bytes we've written to GS so far.
private $total_bytes_uploaded;
public function __construct($bucket, $object, $context) {
parent::__construct($bucket, $object, $context);
}
/**
* Called when the stream is being opened. Try and start a resumable upload
* here.
*
* @return true if the streamable upload started, false otherwise.
*/
public function initialize() {
$headers = parent::$upload_start_header;
$token_header = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
if ($token_header === false) {
trigger_error("Unable to acquire OAuth token.", E_USER_WARNING);
return false;
}
$headers = array_merge($headers, $token_header);
if (array_key_exists("Content-Type", $this->context_options)) {
$headers["Content-Type"] = $this->context_options["Content-Type"];
$this->content_type = $this->context_options["Content-Type"];
}
if (array_key_exists("acl", $this->context_options)) {
$acl = $this->context_options["acl"];
if (in_array($acl, parent::$valid_acl_values)) {
$headers["x-goog-acl"] = $acl;
} else {
trigger_error(sprintf("Invalid ACL value: %s", $acl), E_USER_WARNING);
return false;
}
}
if (array_key_exists("metadata", $this->context_options)) {
$metadata = $this->context_options["metadata"];
foreach ($metadata as $name => $value) {
if (!preg_match(self::METADATA_KEY_REGEX, $name)) {
trigger_error(sprintf("Invalid metadata key: %s", $name),
E_USER_WARNING);
return false;
}
if (!preg_match(self::METADATA_KEY_REGEX, $value)) {
trigger_error(sprintf("Invalid metadata value: %s", $value),
E_USER_WARNING);
return false;
}
$headers['x-goog-meta-' . $name] = $value;
$this->metadata[$name] = $value;
}
}
$http_response = $this->makeHttpRequest($this->url,
"POST",
$headers);
if ($http_response === false) {
trigger_error("Unable to connect to Google Cloud Storage Service.",
E_USER_WARNING);
return false;
}
$status_code = $http_response['status_code'];
if ($status_code == HttpResponse::FORBIDDEN) {
trigger_error("Access Denied", E_USER_WARNING);
return false;
}
if ($status_code != HttpResponse::CREATED) {
trigger_error($this->getErrorMessage($http_response['status_code'],
$http_response['body']),
E_USER_WARNING);
return false;
}
$location = $this->getHeaderValue("Location", $http_response['headers']);
$query_str = parse_url($location)["query"];
parse_str($query_str, $query_arr);
$this->upload_id = $query_arr["upload_id"];
if (!isset($this->upload_id)) {
trigger_error(sprintf("Location Header was not returned (%s).",
implode(",",
array_keys($http_response['headers']))),
E_USER_WARNING);
return false;
}
$this->buffer_start_offset = 0;
$this->total_bytes_uploaded = 0;
$this->byte_buffer = "";
return true;
}
/**
* Return the number of bytes written.
*/
public function write($data) {
$this->byte_buffer .= $data;
$current_buffer_len = strlen($this->byte_buffer);
$data_len = strlen($data);
// If this data doesn't fill the buffer then write it and return.
if ($current_buffer_len < self::WRITE_CHUNK_SIZE) {
return $data_len;
}
// Write out this data
if (!$this->writeBufferToGS()) {
// Remove the bytes we added to the buffer
$this->byte_buffer = substr($this->byte_buffer, 0, -strlen($data));
return 0;
}
// We wrote the buffered content - but only return the amount of $data
// we wrote as per the contract of write()
return $data_len;
}
/**
* Because of the write byte alignment required by GS we will not write any
* data on a flush. If there is data remaining in the buffer we'll write it
* during close.
*/
public function flush() {
return true;
}
/**
* When closing the stream we need to complete the upload.
*/
public function close() {
$this->writeBufferToGS(true);
}
public function getMetaData() {
if (array_key_exists("metadata", $this->context_options)) {
return $this->context_options["metadata"];
}
return [];
}
public function getContentType() {
if (array_key_exists("Content-Type", $this->context_options)) {
return $this->context_options["Content-Type"];
}
return null;
}
private function writeBufferToGS($complete = false) {
$headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
if ($headers === false) {
trigger_error("Unable to acquire OAuth token.", E_USER_ERROR);
return false;
}
$buffer_len = strlen($this->byte_buffer);
if ($complete) {
$write_size = $buffer_len;
} else {
// Incomplete writes should never be less than WRITE_CHUNK_SIZE
assert($buffer_len >= self::WRITE_CHUNK_SIZE);
// Is PHP the only language in the world where the quotient of two
// integers is a double?
$write_size =
floor($buffer_len / self::WRITE_CHUNK_SIZE) * self::WRITE_CHUNK_SIZE;
}
// Determine the final byte of the buffer we're writing for Range header.
if ($write_size !== 0) {
$write_end_byte = $this->buffer_start_offset + $write_size - 1;
$body = substr($this->byte_buffer, 0, $write_size);
} else {
$body = null;
}
if ($complete) {
$object_length = $this->buffer_start_offset + $write_size;
if ($write_size === 0) {
$headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_NO_DATA,
$object_length);
} else {
$headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_FORMAT,
$this->buffer_start_offset,
$write_end_byte,
$object_length);
}
} else {
$headers['Content-Range'] = sprintf(parent::PARTIAL_CONTENT_RANGE_FORMAT,
$this->buffer_start_offset,
$write_end_byte);
}
$url = sprintf("%s?upload_id=%s", $this->url, $this->upload_id);
$http_response = $this->makeHttpRequest($url, "PUT", $headers, $body);
$code = $http_response['status_code'];
// TODO: Retry on some status codes.
if (($complete && $code != HttpResponse::OK) ||
(!$complete && $code != HttpResponse::RESUME_INCOMPLETE)) {
trigger_error($this->getErrorMessage($http_response['status_code'],
$http_response['body']),
E_USER_WARNING);
return false;
}
// Buffer flushed, update pointers if we actually wrote something.
if ($write_size !== 0) {
$this->buffer_start_offset = $write_end_byte + 1;
$this->byte_buffer = substr($this->byte_buffer, $write_size);
}
// Invalidate any cached object with the same name. Note that there is a
// potential race condition when using optimistic caching and invalidate
// on write where the old version of an object can still be returned from
// the cache.
if ($complete && $this->context_options['enable_cache'] === true) {
if ($object_length > 0) {
$key_names = [];
for ($i = 0; $i < $object_length; $i += parent::DEFAULT_READ_SIZE) {
$range = $this->getRangeHeader($i,
$i + parent::DEFAULT_READ_SIZE - 1);
$key_names[] = sprintf(parent::MEMCACHE_KEY_FORMAT,
$this->url,
$range['Range']);
}
$memcached = new \Memcached();
$memcached->deleteMulti($key_names);
}
}
return true;
}
}
| Kazade/NeHe-Website | google_appengine/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php | PHP | bsd-3-clause | 9,743 |
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using Thinktecture.IdentityServer.Core.Configuration;
namespace Thinktecture.IdentityServer.Tests.Configuration
{
[TestClass]
public class RegistrationTests
{
[TestMethod]
public void RegisterSingleton_NullInstance_Throws()
{
try
{
Registration.RegisterSingleton<object>(null);
Assert.Fail();
}
catch(ArgumentNullException ex)
{
Assert.AreEqual("instance", ex.ParamName);
}
}
[TestMethod]
public void RegisterSingleton_Instance_FactoryReturnsSameInstance()
{
object theSingleton = new object();
var reg = Registration.RegisterSingleton<object>(theSingleton);
var result = reg.ImplementationFactory();
Assert.AreSame(theSingleton, result);
}
[TestMethod]
public void RegisterFactory_NullFunc_Throws()
{
try
{
Registration.RegisterFactory<object>(null);
Assert.Fail();
}
catch (ArgumentNullException ex)
{
Assert.AreEqual("typeFunc", ex.ParamName);
}
}
[TestMethod]
public void RegisterFactory_FactoryInvokesFunc()
{
var wasCalled = false;
Func<object> f = () => { wasCalled = true; return new object(); };
var reg = Registration.RegisterFactory<object>(f);
var result = reg.ImplementationFactory();
Assert.IsTrue(wasCalled);
}
[TestMethod]
public void RegisterType_NullType_Throws()
{
try
{
Registration.RegisterType<object>(null);
Assert.Fail();
}
catch (ArgumentNullException ex)
{
Assert.AreEqual("type", ex.ParamName);
}
}
[TestMethod]
public void RegisterType_SetsTypeOnRegistration()
{
var result = Registration.RegisterType<object>(typeof(string));
Assert.AreEqual(typeof(string), result.ImplementationType);
}
}
}
| maz100/Thinktecture.IdentityServer.v3 | source/Tests/UnitTests/Configuration/RegistrationTests.cs | C# | bsd-3-clause | 2,304 |
@section ('title')
Account Settings :: Mangapie
@endsection
@extends ('settings.layout')
@section ('tab-content')
<div class="card">
<div class="card-header">
<ul class="nav nav-pills card-header-pills">
<li class="nav-item">
<a class="nav-link active" href="{{ URL::action('UserSettingsController@account') }}">Account</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ URL::action('UserSettingsController@visuals') }}">Visuals</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{ URL::action('UserSettingsController@profile') }}">Profile</a>
</li>
</ul>
</div>
<div class="card-body">
{{ Form::open(['action' => 'UserSettingsController@patchPassword', 'method' => 'patch']) }}
<div class="row">
<div class="col-12 col-md-6">
<label>Change Password</label>
<div class="form-group">
<div class="input-group">
<div class="input-group-prepend">
<span class="input-group-text">Current</span>
</div>
<input class="form-control" type="password" name="current" title="Your current password" placeholder="Your current password">
</div>
</div>
<div class="form-group">
<div class="input-group">
<div class="input-group-prepend">
<span class="input-group-text">New</span>
</div>
<input class="form-control" type="password" name="new" title="The new password" placeholder="The new password">
</div>
</div>
<div class="form-group">
<div class="input-group">
<div class="input-group-prepend">
<span class="input-group-text">Verify</span>
</div>
<input class="form-control" type="password" name="verify" title="Verify your new password" placeholder="Verify your new password">
</div>
</div>
<div class="form-group">
<button class="btn btn-primary form-control" type="submit">
<span class="fa fa-check"></span>
Set
</button>
</div>
</div>
</div>
{{ Form::close() }}
</div>
</div>
@endsection
| pierobot/mangapie | resources/views/settings/account.blade.php | PHP | bsd-3-clause | 2,895 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome_frame/urlmon_url_request.h"
#include <urlmon.h>
#include <wininet.h>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop.h"
#include "base/string_number_conversions.h"
#include "base/stringprintf.h"
#include "base/threading/platform_thread.h"
#include "base/threading/thread.h"
#include "base/utf_string_conversions.h"
#include "chrome/common/automation_messages.h"
#include "chrome_frame/bind_context_info.h"
#include "chrome_frame/chrome_frame_activex_base.h"
#include "chrome_frame/extra_system_apis.h"
#include "chrome_frame/html_utils.h"
#include "chrome_frame/urlmon_upload_data_stream.h"
#include "chrome_frame/urlmon_url_request_private.h"
#include "chrome_frame/utils.h"
#include "net/base/load_flags.h"
#include "net/http/http_response_headers.h"
#include "net/http/http_util.h"
#define IS_HTTP_SUCCESS_CODE(code) (code >= 200 && code <= 299)
UrlmonUrlRequest::UrlmonUrlRequest()
: pending_read_size_(0),
headers_received_(false),
calling_delegate_(0),
thread_(NULL),
parent_window_(NULL),
privileged_mode_(false),
pending_(false),
is_expecting_download_(true),
cleanup_transaction_(false) {
DVLOG(1) << __FUNCTION__ << me();
}
UrlmonUrlRequest::~UrlmonUrlRequest() {
DVLOG(1) << __FUNCTION__ << me();
}
std::string UrlmonUrlRequest::me() const {
return base::StringPrintf(" id: %i Obj: %X ", id(), this);
}
bool UrlmonUrlRequest::Start() {
DVLOG(1) << __FUNCTION__ << me() << url();
DCHECK(thread_ == 0 || thread_ == base::PlatformThread::CurrentId());
thread_ = base::PlatformThread::CurrentId();
status_.Start();
// Initialize the net::HostPortPair structure from the url initially. We may
// not receive the ip address of the host if the request is satisfied from
// the cache.
socket_address_ = net::HostPortPair::FromURL(GURL(url()));
// The UrlmonUrlRequest instance can get destroyed in the context of
// StartAsyncDownload if BindToStorage finishes synchronously with an error.
// Grab a reference to protect against this.
scoped_refptr<UrlmonUrlRequest> ref(this);
HRESULT hr = StartAsyncDownload();
if (FAILED(hr) && status_.get_state() != UrlmonUrlRequest::Status::DONE) {
status_.Done();
status_.set_result(net::URLRequestStatus::FAILED, HresultToNetError(hr));
NotifyDelegateAndDie();
}
return true;
}
void UrlmonUrlRequest::Stop() {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DCHECK((status_.get_state() != Status::DONE) == (binding_ != NULL));
Status::State state = status_.get_state();
delegate_ = NULL;
// If DownloadInHost is already requested, we will quit soon anyway.
if (terminate_requested())
return;
switch (state) {
case Status::WORKING:
status_.Cancel();
if (binding_)
binding_->Abort();
break;
case Status::ABORTING:
status_.Cancel();
break;
case Status::DONE:
status_.Cancel();
NotifyDelegateAndDie();
break;
}
}
bool UrlmonUrlRequest::Read(int bytes_to_read) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DCHECK_GE(bytes_to_read, 0);
DCHECK_EQ(0, calling_delegate_);
DVLOG(1) << __FUNCTION__ << me();
is_expecting_download_ = false;
// Re-entrancy check. Thou shall not call Read() while process OnReadComplete!
DCHECK_EQ(0u, pending_read_size_);
if (pending_read_size_ != 0)
return false;
DCHECK((status_.get_state() != Status::DONE) == (binding_ != NULL));
if (status_.get_state() == Status::ABORTING)
return true;
// Send data if available.
size_t bytes_copied = 0;
if ((bytes_copied = SendDataToDelegate(bytes_to_read))) {
DVLOG(1) << __FUNCTION__ << me() << " bytes read: " << bytes_copied;
return true;
}
if (status_.get_state() == Status::WORKING) {
DVLOG(1) << __FUNCTION__ << me() << " pending: " << bytes_to_read;
pending_read_size_ = bytes_to_read;
} else {
DVLOG(1) << __FUNCTION__ << me() << " Response finished.";
NotifyDelegateAndDie();
}
return true;
}
HRESULT UrlmonUrlRequest::InitPending(const GURL& url, IMoniker* moniker,
IBindCtx* bind_context,
bool enable_frame_busting,
bool privileged_mode,
HWND notification_window,
IStream* cache) {
DVLOG(1) << __FUNCTION__ << me() << url.spec();
DCHECK(bind_context_ == NULL);
DCHECK(moniker_ == NULL);
DCHECK(cache_ == NULL);
DCHECK(thread_ == 0 || thread_ == base::PlatformThread::CurrentId());
thread_ = base::PlatformThread::CurrentId();
bind_context_ = bind_context;
moniker_ = moniker;
enable_frame_busting_ = enable_frame_busting;
privileged_mode_ = privileged_mode;
parent_window_ = notification_window;
cache_ = cache;
set_url(url.spec());
set_pending(true);
// Request has already started and data is fetched. We will get the
// GetBindInfo call as per contract but the return values are
// ignored. So just set "get" as a method to make our GetBindInfo
// implementation happy.
method_ = "get";
return S_OK;
}
void UrlmonUrlRequest::TerminateBind(const TerminateBindCallback& callback) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DVLOG(1) << __FUNCTION__ << me();
cleanup_transaction_ = false;
if (status_.get_state() == Status::DONE) {
// Binding is stopped. Note result could be an error.
callback.Run(moniker_, bind_context_, upload_data_,
request_headers_.c_str());
} else {
// WORKING (ABORTING?). Save the callback.
// Now we will return INET_TERMINATE_BIND from ::OnDataAvailable() and in
// ::OnStopBinding will invoke the callback passing our moniker and
// bind context.
terminate_bind_callback_ = callback;
if (pending_data_) {
// For downloads to work correctly, we must induce a call to
// OnDataAvailable so that we can download INET_E_TERMINATED_BIND and
// get IE into the correct state.
// To accomplish this we read everything that's readily available in
// the current stream. Once we've reached the end of the stream we
// should get E_PENDING back and then later we'll get that call
// to OnDataAvailable.
std::string data;
base::win::ScopedComPtr<IStream> read_stream(pending_data_);
HRESULT hr;
while ((hr = ReadStream(read_stream, 0xffff, &data)) == S_OK) {
// Just drop the data.
}
DLOG_IF(WARNING, hr != E_PENDING) << __FUNCTION__ <<
base::StringPrintf(" expected E_PENDING but got 0x%08X", hr);
}
}
}
size_t UrlmonUrlRequest::SendDataToDelegate(size_t bytes_to_read) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DCHECK_NE(id(), -1);
DCHECK_GT(bytes_to_read, 0U);
size_t bytes_copied = 0;
if (delegate_) {
std::string read_data;
if (cache_) {
HRESULT hr = ReadStream(cache_, bytes_to_read, &read_data);
if (hr == S_FALSE || read_data.length() < bytes_to_read) {
DVLOG(1) << __FUNCTION__ << me() << "all cached data read";
cache_.Release();
}
}
if (read_data.empty() && pending_data_) {
size_t pending_data_read_save = pending_read_size_;
pending_read_size_ = 0;
// AddRef the stream while we call Read to avoid a potential issue
// where we can get a call to OnDataAvailable while inside Read and
// in our OnDataAvailable call, we can release the stream object
// while still using it.
base::win::ScopedComPtr<IStream> pending(pending_data_);
HRESULT hr = ReadStream(pending, bytes_to_read, &read_data);
if (read_data.empty())
pending_read_size_ = pending_data_read_save;
// If we received S_FALSE it indicates that there is no more data in the
// stream. Clear it to ensure that OnStopBinding correctly sends over the
// response end notification to chrome.
if (hr == S_FALSE)
pending_data_.Release();
}
bytes_copied = read_data.length();
if (bytes_copied) {
++calling_delegate_;
DCHECK_NE(id(), -1);
// The delegate can go away in the middle of ReadStream
if (delegate_)
delegate_->OnReadComplete(id(), read_data);
--calling_delegate_;
}
} else {
DLOG(ERROR) << __FUNCTION__ << me() << "no delegate";
}
return bytes_copied;
}
STDMETHODIMP UrlmonUrlRequest::OnStartBinding(DWORD reserved,
IBinding* binding) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
binding_ = binding;
if (pending_) {
response_headers_ = GetHttpHeadersFromBinding(binding_);
DCHECK(!response_headers_.empty());
}
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::GetPriority(LONG *priority) {
if (!priority)
return E_POINTER;
*priority = THREAD_PRIORITY_NORMAL;
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnLowResource(DWORD reserved) {
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnProgress(ULONG progress, ULONG max_progress,
ULONG status_code, LPCWSTR status_text) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
if (status_.get_state() != Status::WORKING)
return S_OK;
// Ignore any notifications received while we are in the pending state
// waiting for the request to be initiated by Chrome.
if (pending_ && status_code != BINDSTATUS_REDIRECTING)
return S_OK;
if (!delegate_) {
DVLOG(1) << "Invalid delegate";
return S_OK;
}
switch (status_code) {
case BINDSTATUS_CONNECTING: {
if (status_text) {
socket_address_.set_host(WideToUTF8(status_text));
}
break;
}
case BINDSTATUS_REDIRECTING: {
// If we receive a redirect for the initial pending request initiated
// when our document loads we should stash it away and inform Chrome
// accordingly when it requests data for the original URL.
base::win::ScopedComPtr<BindContextInfo> info;
BindContextInfo::FromBindContext(bind_context_, info.Receive());
DCHECK(info);
GURL previously_redirected(info ? info->GetUrl() : std::wstring());
if (GURL(status_text) != previously_redirected) {
DVLOG(1) << __FUNCTION__ << me() << "redirect from " << url()
<< " to " << status_text;
// Fetch the redirect status as they aren't all equal (307 in particular
// retains the HTTP request verb).
int http_code = GetHttpResponseStatusFromBinding(binding_);
status_.SetRedirected(http_code, WideToUTF8(status_text));
// Abort. We will inform Chrome in OnStopBinding callback.
binding_->Abort();
return E_ABORT;
}
break;
}
case BINDSTATUS_COOKIE_SENT:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_READ);
break;
case BINDSTATUS_COOKIE_SUPPRESSED:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_SUPPRESS);
break;
case BINDSTATUS_COOKIE_STATE_ACCEPT:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_ACCEPT);
break;
case BINDSTATUS_COOKIE_STATE_REJECT:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_REJECT);
break;
case BINDSTATUS_COOKIE_STATE_LEASH:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_LEASH);
break;
case BINDSTATUS_COOKIE_STATE_DOWNGRADE:
delegate_->AddPrivacyDataForUrl(url(), "", COOKIEACTION_DOWNGRADE);
break;
case BINDSTATUS_COOKIE_STATE_UNKNOWN:
NOTREACHED() << L"Unknown cookie state received";
break;
default:
DVLOG(1) << __FUNCTION__ << me()
<< base::StringPrintf(L"code: %i status: %ls", status_code,
status_text);
break;
}
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnStopBinding(HRESULT result, LPCWSTR error) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DVLOG(1) << __FUNCTION__ << me()
<< "- Request stopped, Result: " << std::hex << result;
DCHECK(status_.get_state() == Status::WORKING ||
status_.get_state() == Status::ABORTING);
Status::State state = status_.get_state();
// Mark we a are done.
status_.Done();
if (result == INET_E_TERMINATED_BIND) {
if (terminate_requested()) {
terminate_bind_callback_.Run(moniker_, bind_context_, upload_data_,
request_headers_.c_str());
} else {
cleanup_transaction_ = true;
}
// We may have returned INET_E_TERMINATED_BIND from OnDataAvailable.
result = S_OK;
}
if (state == Status::WORKING) {
status_.set_result(result);
if (FAILED(result)) {
int http_code = GetHttpResponseStatusFromBinding(binding_);
// For certain requests like empty POST requests the server can return
// back a HTTP success code in the range 200 to 299. We need to flag
// these requests as succeeded.
if (IS_HTTP_SUCCESS_CODE(http_code)) {
// If this DCHECK fires it means that the server returned a HTTP
// success code outside the standard range 200-206. We need to confirm
// if the following code path is correct.
DCHECK_LE(http_code, 206);
status_.set_result(S_OK);
std::string headers = GetHttpHeadersFromBinding(binding_);
OnResponse(0, UTF8ToWide(headers).c_str(), NULL, NULL);
} else if (net::HttpResponseHeaders::IsRedirectResponseCode(http_code) &&
result == E_ACCESSDENIED) {
// Special case. If the last request was a redirect and the current OS
// error value is E_ACCESSDENIED, that means an unsafe redirect was
// attempted. In that case, correct the OS error value to be the more
// specific ERR_UNSAFE_REDIRECT error value.
status_.set_result(net::URLRequestStatus::FAILED,
net::ERR_UNSAFE_REDIRECT);
}
}
// The code below seems easy but it is not. :)
// The network policy in Chrome network is that error code/end_of_stream
// should be returned only as a result of read (or start) request.
// Here are the possible cases:
// pending_data_|pending_read
// FALSE |FALSE => EndRequest if no headers, otherwise wait for Read.
// FALSE |TRUE => EndRequest.
// TRUE |FALSE => Wait for Read.
// TRUE |TRUE => Something went wrong!!
if (pending_data_) {
DCHECK_EQ(pending_read_size_, 0UL);
ReleaseBindings();
return S_OK;
}
if (headers_received_ && pending_read_size_ == 0) {
ReleaseBindings();
return S_OK;
}
// No headers or there is a pending read from Chrome.
NotifyDelegateAndDie();
return S_OK;
}
// Status::ABORTING
if (status_.was_redirected()) {
// Just release bindings here. Chrome will issue EndRequest(request_id)
// after processing headers we had provided.
if (!pending_) {
std::string headers = GetHttpHeadersFromBinding(binding_);
OnResponse(0, UTF8ToWide(headers).c_str(), NULL, NULL);
}
ReleaseBindings();
return S_OK;
}
// Stop invoked.
NotifyDelegateAndDie();
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::GetBindInfo(DWORD* bind_flags,
BINDINFO* bind_info) {
if ((bind_info == NULL) || (bind_info->cbSize == 0) || (bind_flags == NULL))
return E_INVALIDARG;
*bind_flags = BINDF_ASYNCHRONOUS | BINDF_ASYNCSTORAGE | BINDF_PULLDATA;
bind_info->dwOptionsFlags = INTERNET_FLAG_NO_AUTO_REDIRECT;
bind_info->dwOptions = BINDINFO_OPTIONS_WININETFLAG;
// TODO(ananta)
// Look into whether the other load flags need to be supported in chrome
// frame.
if (load_flags_ & net::LOAD_VALIDATE_CACHE)
*bind_flags |= BINDF_RESYNCHRONIZE;
if (load_flags_ & net::LOAD_BYPASS_CACHE)
*bind_flags |= BINDF_GETNEWESTVERSION;
if (LowerCaseEqualsASCII(method(), "get")) {
bind_info->dwBindVerb = BINDVERB_GET;
} else if (LowerCaseEqualsASCII(method(), "post")) {
bind_info->dwBindVerb = BINDVERB_POST;
} else if (LowerCaseEqualsASCII(method(), "put")) {
bind_info->dwBindVerb = BINDVERB_PUT;
} else {
std::wstring verb(ASCIIToWide(StringToUpperASCII(method())));
bind_info->dwBindVerb = BINDVERB_CUSTOM;
bind_info->szCustomVerb = reinterpret_cast<wchar_t*>(
::CoTaskMemAlloc((verb.length() + 1) * sizeof(wchar_t)));
lstrcpyW(bind_info->szCustomVerb, verb.c_str());
}
if (bind_info->dwBindVerb == BINDVERB_POST ||
bind_info->dwBindVerb == BINDVERB_PUT ||
post_data_len() > 0) {
// Bypass caching proxies on upload requests and avoid writing responses to
// the browser's cache.
*bind_flags |= BINDF_GETNEWESTVERSION | BINDF_PRAGMA_NO_CACHE;
// Attempt to avoid storing the response for upload requests.
// See http://crbug.com/55918
if (resource_type_ != ResourceType::MAIN_FRAME)
*bind_flags |= BINDF_NOWRITECACHE;
// Initialize the STGMEDIUM.
memset(&bind_info->stgmedData, 0, sizeof(STGMEDIUM));
bind_info->grfBindInfoF = 0;
if (bind_info->dwBindVerb != BINDVERB_CUSTOM)
bind_info->szCustomVerb = NULL;
if ((post_data_len() || is_chunked_upload()) &&
get_upload_data(&bind_info->stgmedData.pstm) == S_OK) {
bind_info->stgmedData.tymed = TYMED_ISTREAM;
if (!is_chunked_upload()) {
bind_info->cbstgmedData = static_cast<DWORD>(post_data_len());
}
DVLOG(1) << __FUNCTION__ << me() << method()
<< " request with " << base::Int64ToString(post_data_len())
<< " bytes. url=" << url();
} else {
DVLOG(1) << __FUNCTION__ << me() << "POST request with no data!";
}
}
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnDataAvailable(DWORD flags, DWORD size,
FORMATETC* formatetc,
STGMEDIUM* storage) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DVLOG(1) << __FUNCTION__ << me() << "bytes available: " << size;
if (terminate_requested()) {
DVLOG(1) << " Download requested. INET_E_TERMINATED_BIND returned";
return INET_E_TERMINATED_BIND;
}
if (!storage || (storage->tymed != TYMED_ISTREAM)) {
NOTREACHED();
return E_INVALIDARG;
}
IStream* read_stream = storage->pstm;
if (!read_stream) {
NOTREACHED();
return E_UNEXPECTED;
}
// Some requests such as HEAD have zero data.
if (size > 0)
pending_data_ = read_stream;
if (pending_read_size_) {
size_t bytes_copied = SendDataToDelegate(pending_read_size_);
DVLOG(1) << __FUNCTION__ << me() << "size read: " << bytes_copied;
} else {
DVLOG(1) << __FUNCTION__ << me() << "- waiting for remote read";
}
if (BSCF_LASTDATANOTIFICATION & flags) {
if (!is_expecting_download_ || pending()) {
DVLOG(1) << __FUNCTION__ << me() << "EOF";
return S_OK;
}
// Always return INET_E_TERMINATED_BIND to allow bind context reuse
// if DownloadToHost is suddenly requested.
DVLOG(1) << __FUNCTION__ << " EOF: INET_E_TERMINATED_BIND returned";
return INET_E_TERMINATED_BIND;
}
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnObjectAvailable(REFIID iid, IUnknown* object) {
// We are calling BindToStorage on the moniker we should always get called
// back on OnDataAvailable and should never get OnObjectAvailable
NOTREACHED();
return E_NOTIMPL;
}
STDMETHODIMP UrlmonUrlRequest::BeginningTransaction(const wchar_t* url,
const wchar_t* current_headers, DWORD reserved,
wchar_t** additional_headers) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
if (!additional_headers) {
NOTREACHED();
return E_POINTER;
}
DVLOG(1) << __FUNCTION__ << me() << "headers: \n" << current_headers;
if (status_.get_state() == Status::ABORTING) {
// At times the BINDSTATUS_REDIRECTING notification which is sent to the
// IBindStatusCallback interface does not have an accompanying HTTP
// redirect status code, i.e. the attempt to query the HTTP status code
// from the binding returns 0, 200, etc which are invalid redirect codes.
// We don't want urlmon to follow redirects. We return E_ABORT in our
// IBindStatusCallback::OnProgress function and also abort the binding.
// However urlmon still tries to establish a transaction with the
// redirected URL which confuses the web server.
// Fix is to abort the attempted transaction.
DLOG(WARNING) << __FUNCTION__ << me()
<< ": Aborting connection to URL:"
<< url
<< " as the binding has been aborted";
return E_ABORT;
}
HRESULT hr = S_OK;
std::string new_headers;
if (is_chunked_upload()) {
new_headers = base::StringPrintf("Transfer-Encoding: chunked\r\n");
}
if (!extra_headers().empty()) {
// TODO(robertshield): We may need to sanitize headers on POST here.
new_headers += extra_headers();
}
if (!referrer().empty()) {
// Referrer is famously misspelled in HTTP:
new_headers += base::StringPrintf("Referer: %s\r\n", referrer().c_str());
}
// In the rare case if "User-Agent" string is already in |current_headers|.
// We send Chrome's user agent in requests initiated within ChromeFrame to
// enable third party content in pages rendered in ChromeFrame to correctly
// send content for Chrome as the third party content may not be equipped to
// identify chromeframe as the user agent. This also ensures that the user
// agent reported in scripts in chrome frame is consistent with that sent
// in outgoing requests.
std::string user_agent = http_utils::AddChromeFrameToUserAgentValue(
http_utils::GetChromeUserAgent());
new_headers += ReplaceOrAddUserAgent(current_headers, user_agent);
if (!new_headers.empty()) {
*additional_headers = reinterpret_cast<wchar_t*>(
CoTaskMemAlloc((new_headers.size() + 1) * sizeof(wchar_t)));
if (*additional_headers == NULL) {
NOTREACHED();
hr = E_OUTOFMEMORY;
} else {
lstrcpynW(*additional_headers, ASCIIToWide(new_headers).c_str(),
new_headers.size());
}
}
request_headers_ = new_headers;
return hr;
}
STDMETHODIMP UrlmonUrlRequest::OnResponse(DWORD dwResponseCode,
const wchar_t* response_headers, const wchar_t* request_headers,
wchar_t** additional_headers) {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DVLOG(1) << __FUNCTION__ << me() << "headers: \n"
<< (response_headers == NULL ? L"EMPTY" : response_headers);
if (!delegate_) {
DLOG(WARNING) << "Invalid delegate";
return S_OK;
}
delegate_->AddPrivacyDataForUrl(url(), "", 0);
std::string raw_headers;
if (response_headers)
raw_headers = WideToUTF8(response_headers);
// Security check for frame busting headers. We don't honor the headers
// as-such, but instead simply kill requests which we've been asked to
// look for if they specify a value for "X-Frame-Options" other than
// "ALLOWALL" (the others are "deny" and "sameorigin"). This puts the onus
// on the user of the UrlRequest to specify whether or not requests should
// be inspected. For ActiveDocuments, the answer is "no", since WebKit's
// detection/handling is sufficient and since ActiveDocuments cannot be
// hosted as iframes. For NPAPI and ActiveX documents, the Initialize()
// function of the PluginUrlRequest object allows them to specify how they'd
// like requests handled. Both should set enable_frame_busting_ to true to
// avoid CSRF attacks. Should WebKit's handling of this ever change, we will
// need to re-visit how and when frames are killed to better mirror a policy
// which may do something other than kill the sub-document outright.
// NOTE(slightlyoff): We don't use net::HttpResponseHeaders here because
// of lingering ICU/base_noicu issues.
if (enable_frame_busting_) {
if (http_utils::HasFrameBustingHeader(raw_headers)) {
DLOG(ERROR) << "X-Frame-Options header other than ALLOWALL " <<
"detected, navigation canceled";
return E_FAIL;
}
}
DVLOG(1) << __FUNCTION__ << me() << "Calling OnResponseStarted";
// Inform the delegate.
headers_received_ = true;
DCHECK_NE(id(), -1);
delegate_->OnResponseStarted(id(),
"", // mime_type
raw_headers.c_str(), // headers
0, // size
base::Time(), // last_modified
status_.get_redirection().utf8_url,
status_.get_redirection().http_code,
socket_address_,
post_data_len());
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::GetWindow(const GUID& guid_reason,
HWND* parent_window) {
if (!parent_window)
return E_INVALIDARG;
#ifndef NDEBUG
wchar_t guid[40] = {0};
::StringFromGUID2(guid_reason, guid, arraysize(guid));
const wchar_t* str = guid;
if (guid_reason == IID_IAuthenticate)
str = L"IAuthenticate";
else if (guid_reason == IID_IHttpSecurity)
str = L"IHttpSecurity";
else if (guid_reason == IID_IWindowForBindingUI)
str = L"IWindowForBindingUI";
DVLOG(1) << __FUNCTION__ << me() << "GetWindow: " << str;
#endif
// We should return a non-NULL HWND as parent. Otherwise no dialog is shown.
// TODO(iyengar): This hits when running the URL request tests.
DLOG_IF(WARNING, !::IsWindow(parent_window_))
<< "UrlmonUrlRequest::GetWindow - no window!";
*parent_window = parent_window_;
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::Authenticate(HWND* parent_window,
LPWSTR* user_name,
LPWSTR* password) {
if (!parent_window)
return E_INVALIDARG;
if (privileged_mode_)
return E_ACCESSDENIED;
DCHECK(::IsWindow(parent_window_));
*parent_window = parent_window_;
return S_OK;
}
STDMETHODIMP UrlmonUrlRequest::OnSecurityProblem(DWORD problem) {
// Urlmon notifies the client of authentication problems, certificate
// errors, etc by querying the object implementing the IBindStatusCallback
// interface for the IHttpSecurity interface. If this interface is not
// implemented then Urlmon checks for the problem codes defined below
// and performs actions as defined below:-
// It invokes the ReportProgress method of the protocol sink with
// these problem codes and eventually invokes the ReportResult method
// on the protocol sink which ends up in a call to the OnStopBinding
// method of the IBindStatusCallBack interface.
// MSHTML's implementation of the IBindStatusCallback interface does not
// implement the IHttpSecurity interface. However it handles the
// OnStopBinding call with a HRESULT of 0x800c0019 and navigates to
// an interstitial page which presents the user with a choice of whether
// to abort the navigation.
// In our OnStopBinding implementation we stop the navigation and inform
// Chrome about the result. Ideally Chrome should behave in a manner similar
// to IE, i.e. display the SSL error interstitial page and if the user
// decides to proceed anyway we would turn off SSL warnings for that
// particular navigation and allow IE to download the content.
// We would need to return the certificate information to Chrome for display
// purposes. Currently we only return a dummy certificate to Chrome.
// At this point we decided that it is a lot of work at this point and
// decided to go with the easier option of implementing the IHttpSecurity
// interface and replicating the checks performed by Urlmon. This
// causes Urlmon to display a dialog box on the same lines as IE6.
DVLOG(1) << __FUNCTION__ << me() << "Security problem : " << problem;
// On IE6 the default IBindStatusCallback interface does not implement the
// IHttpSecurity interface and thus causes IE to put up a certificate error
// dialog box. We need to emulate this behavior for sites with mismatched
// certificates to work.
if (GetIEVersion() == IE_6)
return S_FALSE;
HRESULT hr = E_ABORT;
switch (problem) {
case ERROR_INTERNET_SEC_CERT_REV_FAILED: {
hr = RPC_E_RETRY;
break;
}
case ERROR_INTERNET_SEC_CERT_DATE_INVALID:
case ERROR_INTERNET_SEC_CERT_CN_INVALID:
case ERROR_INTERNET_INVALID_CA: {
hr = S_FALSE;
break;
}
default: {
NOTREACHED() << "Unhandled security problem : " << problem;
break;
}
}
return hr;
}
HRESULT UrlmonUrlRequest::StartAsyncDownload() {
DVLOG(1) << __FUNCTION__ << me() << url();
HRESULT hr = E_FAIL;
DCHECK((moniker_ && bind_context_) || (!moniker_ && !bind_context_));
if (!moniker_.get()) {
std::wstring wide_url = UTF8ToWide(url());
hr = CreateURLMonikerEx(NULL, wide_url.c_str(), moniker_.Receive(),
URL_MK_UNIFORM);
if (FAILED(hr)) {
NOTREACHED() << "CreateURLMonikerEx failed. Error: " << hr;
return hr;
}
}
if (bind_context_.get() == NULL) {
hr = ::CreateAsyncBindCtxEx(NULL, 0, this, NULL,
bind_context_.Receive(), 0);
DCHECK(SUCCEEDED(hr)) << "CreateAsyncBindCtxEx failed. Error: " << hr;
} else {
// Use existing bind context.
hr = ::RegisterBindStatusCallback(bind_context_, this, NULL, 0);
DCHECK(SUCCEEDED(hr)) << "RegisterBindStatusCallback failed. Error: " << hr;
}
if (SUCCEEDED(hr)) {
base::win::ScopedComPtr<IStream> stream;
// BindToStorage may complete synchronously.
// We still get all the callbacks - OnStart/StopBinding, this may result
// in destruction of our object. It's fine but we access some members
// below for debug info. :)
base::win::ScopedComPtr<IHttpSecurity> self(this);
// Inform our moniker patch this binding should not be tortured.
base::win::ScopedComPtr<BindContextInfo> info;
BindContextInfo::FromBindContext(bind_context_, info.Receive());
DCHECK(info);
if (info)
info->set_chrome_request(true);
hr = moniker_->BindToStorage(bind_context_, NULL, __uuidof(IStream),
reinterpret_cast<void**>(stream.Receive()));
if (hr == S_OK)
DCHECK(binding_ != NULL || status_.get_state() == Status::DONE);
if (FAILED(hr)) {
// TODO(joshia): Look into. This currently fails for:
// http://user2:secret@localhost:1337/auth-basic?set-cookie-if-challenged
// when running the UrlRequest unit tests.
DLOG(ERROR) << __FUNCTION__ << me() <<
base::StringPrintf("IUrlMoniker::BindToStorage failed 0x%08X.", hr);
// In most cases we'll get a MK_E_SYNTAX error here but if we abort
// the navigation ourselves such as in the case of seeing something
// else than ALLOWALL in X-Frame-Options.
}
}
DLOG_IF(ERROR, FAILED(hr)) << me() <<
base::StringPrintf(L"StartAsyncDownload failed: 0x%08X", hr);
return hr;
}
void UrlmonUrlRequest::NotifyDelegateAndDie() {
DCHECK_EQ(thread_, base::PlatformThread::CurrentId());
DVLOG(1) << __FUNCTION__ << me();
PluginUrlRequestDelegate* delegate = delegate_;
delegate_ = NULL;
ReleaseBindings();
TerminateTransaction();
if (delegate && id() != -1) {
net::URLRequestStatus result = status_.get_result();
delegate->OnResponseEnd(id(), result);
} else {
DLOG(WARNING) << __FUNCTION__ << me() << "no delegate";
}
}
void UrlmonUrlRequest::TerminateTransaction() {
if (cleanup_transaction_ && bind_context_ && moniker_) {
// We return INET_E_TERMINATED_BIND from our OnDataAvailable implementation
// to ensure that the transaction stays around if Chrome decides to issue
// a download request when it finishes inspecting the headers received in
// OnResponse. However this causes the urlmon transaction object to leak.
// To workaround this we save away the IInternetProtocol interface which is
// implemented by the urlmon CTransaction object in our BindContextInfo
// instance which is maintained per bind context. Invoking Terminate
// on this with the special flags 0x2000000 cleanly releases the
// transaction.
static const int kUrlmonTerminateTransactionFlags = 0x2000000;
base::win::ScopedComPtr<BindContextInfo> info;
BindContextInfo::FromBindContext(bind_context_, info.Receive());
DCHECK(info);
if (info && info->protocol()) {
info->protocol()->Terminate(kUrlmonTerminateTransactionFlags);
}
}
bind_context_.Release();
}
void UrlmonUrlRequest::ReleaseBindings() {
binding_.Release();
// Do not release bind_context here!
// We may get DownloadToHost request and therefore we want the bind_context
// to be available.
if (bind_context_)
::RevokeBindStatusCallback(bind_context_, this);
}
net::Error UrlmonUrlRequest::HresultToNetError(HRESULT hr) {
const int kInvalidHostName = 0x8007007b;
// Useful reference:
// http://msdn.microsoft.com/en-us/library/ms775145(VS.85).aspx
net::Error ret = net::ERR_UNEXPECTED;
switch (hr) {
case S_OK:
ret = net::OK;
break;
case MK_E_SYNTAX:
ret = net::ERR_INVALID_URL;
break;
case INET_E_CANNOT_CONNECT:
ret = net::ERR_CONNECTION_FAILED;
break;
case INET_E_DOWNLOAD_FAILURE:
case INET_E_CONNECTION_TIMEOUT:
case E_ABORT:
ret = net::ERR_CONNECTION_ABORTED;
break;
case INET_E_DATA_NOT_AVAILABLE:
ret = net::ERR_EMPTY_RESPONSE;
break;
case INET_E_RESOURCE_NOT_FOUND:
// To behave more closely to the chrome network stack, we translate this
// error value as tunnel connection failed. This error value is tested
// in the ProxyTunnelRedirectTest and UnexpectedServerAuthTest tests.
ret = net::ERR_TUNNEL_CONNECTION_FAILED;
break;
// The following error codes can be returned while processing an invalid
// url. http://msdn.microsoft.com/en-us/library/bb250493(v=vs.85).aspx
case INET_E_INVALID_URL:
case INET_E_UNKNOWN_PROTOCOL:
case INET_E_REDIRECT_FAILED:
case INET_E_SECURITY_PROBLEM:
case kInvalidHostName:
case E_INVALIDARG:
case E_OUTOFMEMORY:
ret = net::ERR_INVALID_URL;
break;
case INET_E_INVALID_CERTIFICATE:
ret = net::ERR_CERT_INVALID;
break;
case E_ACCESSDENIED:
ret = net::ERR_ACCESS_DENIED;
break;
default:
DLOG(WARNING)
<< base::StringPrintf("TODO: translate HRESULT 0x%08X to net::Error",
hr);
break;
}
return ret;
}
PluginUrlRequestManager::ThreadSafeFlags
UrlmonUrlRequestManager::GetThreadSafeFlags() {
return PluginUrlRequestManager::NOT_THREADSAFE;
}
void UrlmonUrlRequestManager::SetInfoForUrl(const std::wstring& url,
IMoniker* moniker, LPBC bind_ctx) {
CComObject<UrlmonUrlRequest>* new_request = NULL;
CComObject<UrlmonUrlRequest>::CreateInstance(&new_request);
if (new_request) {
GURL start_url(url);
DCHECK(start_url.is_valid());
DCHECK(pending_request_ == NULL);
base::win::ScopedComPtr<BindContextInfo> info;
BindContextInfo::FromBindContext(bind_ctx, info.Receive());
DCHECK(info);
IStream* cache = info ? info->cache() : NULL;
pending_request_ = new_request;
pending_request_->InitPending(start_url, moniker, bind_ctx,
enable_frame_busting_, privileged_mode_,
notification_window_, cache);
// Start the request
bool is_started = pending_request_->Start();
DCHECK(is_started);
}
}
void UrlmonUrlRequestManager::StartRequest(int request_id,
const AutomationURLRequest& request_info) {
DVLOG(1) << __FUNCTION__ << " id: " << request_id;
if (stopping_) {
DLOG(WARNING) << __FUNCTION__ << " request not started (stopping)";
return;
}
DCHECK(request_map_.find(request_id) == request_map_.end());
#ifndef NDEBUG
if (background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
DCHECK(background_request_map_.find(request_id) ==
background_request_map_.end());
}
#endif // NDEBUG
DCHECK(GURL(request_info.url).is_valid());
// Non frame requests like sub resources, images, etc are handled on the
// background thread.
if (background_worker_thread_enabled_ &&
!ResourceType::IsFrame(
static_cast<ResourceType::Type>(request_info.resource_type))) {
DLOG(INFO) << "Downloading resource type "
<< request_info.resource_type
<< " on background thread";
background_thread_->message_loop()->PostTask(
FROM_HERE,
base::Bind(&UrlmonUrlRequestManager::StartRequestHelper,
base::Unretained(this), request_id, request_info,
&background_request_map_, &background_resource_map_lock_));
return;
}
StartRequestHelper(request_id, request_info, &request_map_, NULL);
}
void UrlmonUrlRequestManager::StartRequestHelper(
int request_id,
const AutomationURLRequest& request_info,
RequestMap* request_map,
base::Lock* request_map_lock) {
DCHECK(request_map);
scoped_refptr<UrlmonUrlRequest> new_request;
bool is_started = false;
if (pending_request_) {
if (pending_request_->url() != request_info.url) {
DLOG(INFO) << __FUNCTION__
<< "Received url request for url:"
<< request_info.url
<< ". Stopping pending url request for url:"
<< pending_request_->url();
pending_request_->Stop();
pending_request_ = NULL;
} else {
new_request.swap(pending_request_);
is_started = true;
DVLOG(1) << __FUNCTION__ << new_request->me()
<< " assigned id " << request_id;
}
}
if (!is_started) {
CComObject<UrlmonUrlRequest>* created_request = NULL;
CComObject<UrlmonUrlRequest>::CreateInstance(&created_request);
new_request = created_request;
}
// Format upload data if it's chunked.
if (request_info.upload_data && request_info.upload_data->is_chunked()) {
ScopedVector<net::UploadElement>* elements =
request_info.upload_data->elements_mutable();
for (size_t i = 0; i < elements->size(); ++i) {
net::UploadElement* element = (*elements)[i];
DCHECK(element->type() == net::UploadElement::TYPE_BYTES);
std::string chunk_length = base::StringPrintf(
"%X\r\n", static_cast<unsigned int>(element->bytes_length()));
std::vector<char> bytes;
bytes.insert(bytes.end(), chunk_length.data(),
chunk_length.data() + chunk_length.length());
const char* data = element->bytes();
bytes.insert(bytes.end(), data, data + element->bytes_length());
const char* crlf = "\r\n";
bytes.insert(bytes.end(), crlf, crlf + strlen(crlf));
if (i == elements->size() - 1) {
const char* end_of_data = "0\r\n\r\n";
bytes.insert(bytes.end(), end_of_data,
end_of_data + strlen(end_of_data));
}
element->SetToBytes(&bytes[0], static_cast<int>(bytes.size()));
}
}
new_request->Initialize(static_cast<PluginUrlRequestDelegate*>(this),
request_id,
request_info.url,
request_info.method,
request_info.referrer,
request_info.extra_request_headers,
request_info.upload_data,
static_cast<ResourceType::Type>(request_info.resource_type),
enable_frame_busting_,
request_info.load_flags);
new_request->set_parent_window(notification_window_);
new_request->set_privileged_mode(privileged_mode_);
if (request_map_lock)
request_map_lock->Acquire();
(*request_map)[request_id] = new_request;
if (request_map_lock)
request_map_lock->Release();
if (!is_started) {
// Freshly created, start now.
new_request->Start();
} else {
// Request is already underway, call OnResponse so that the
// other side can start reading.
DCHECK(!new_request->response_headers().empty());
new_request->OnResponse(
0, UTF8ToWide(new_request->response_headers()).c_str(), NULL, NULL);
}
}
void UrlmonUrlRequestManager::ReadRequest(int request_id, int bytes_to_read) {
DVLOG(1) << __FUNCTION__ << " id: " << request_id;
// if we fail to find the request in the normal map and the background
// request map, it may mean that the request could have failed with a
// network error.
scoped_refptr<UrlmonUrlRequest> request = LookupRequest(request_id,
&request_map_);
if (request) {
request->Read(bytes_to_read);
} else if (background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
request = LookupRequest(request_id, &background_request_map_);
if (request) {
background_thread_->message_loop()->PostTask(
FROM_HERE, base::Bind(base::IgnoreResult(&UrlmonUrlRequest::Read),
request.get(), bytes_to_read));
}
}
if (!request)
DLOG(ERROR) << __FUNCTION__ << " no request found for " << request_id;
}
void UrlmonUrlRequestManager::DownloadRequestInHost(int request_id) {
DVLOG(1) << __FUNCTION__ << " " << request_id;
if (!IsWindow(notification_window_)) {
NOTREACHED() << "Cannot handle download if we don't have anyone to hand it "
"to.";
return;
}
scoped_refptr<UrlmonUrlRequest> request(LookupRequest(request_id,
&request_map_));
if (request) {
DownloadRequestInHostHelper(request);
} else if (background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
request = LookupRequest(request_id, &background_request_map_);
if (request) {
background_thread_->message_loop()->PostTask(
FROM_HERE,
base::Bind(&UrlmonUrlRequestManager::DownloadRequestInHostHelper,
base::Unretained(this), request.get()));
}
}
if (!request)
DLOG(ERROR) << __FUNCTION__ << " no request found for " << request_id;
}
void UrlmonUrlRequestManager::DownloadRequestInHostHelper(
UrlmonUrlRequest* request) {
DCHECK(request);
UrlmonUrlRequest::TerminateBindCallback callback =
base::Bind(&UrlmonUrlRequestManager::BindTerminated,
base::Unretained(this));
request->TerminateBind(callback);
}
void UrlmonUrlRequestManager::BindTerminated(IMoniker* moniker,
IBindCtx* bind_ctx,
IStream* post_data,
const char* request_headers) {
DownloadInHostParams* download_params = new DownloadInHostParams;
download_params->bind_ctx = bind_ctx;
download_params->moniker = moniker;
download_params->post_data = post_data;
if (request_headers) {
download_params->request_headers = request_headers;
}
::PostMessage(notification_window_, WM_DOWNLOAD_IN_HOST,
reinterpret_cast<WPARAM>(download_params), 0);
}
void UrlmonUrlRequestManager::GetCookiesForUrl(const GURL& url, int cookie_id) {
DWORD cookie_size = 0;
bool success = true;
std::string cookie_string;
int32 cookie_action = COOKIEACTION_READ;
BOOL result = InternetGetCookieA(url.spec().c_str(), NULL, NULL,
&cookie_size);
DWORD error = 0;
if (cookie_size) {
scoped_array<char> cookies(new char[cookie_size + 1]);
if (!InternetGetCookieA(url.spec().c_str(), NULL, cookies.get(),
&cookie_size)) {
success = false;
error = GetLastError();
NOTREACHED() << "InternetGetCookie failed. Error: " << error;
} else {
cookie_string = cookies.get();
}
} else {
success = false;
error = GetLastError();
DVLOG(1) << "InternetGetCookie failed. Error: " << error;
}
OnCookiesRetrieved(success, url, cookie_string, cookie_id);
if (!success && !error)
cookie_action = COOKIEACTION_SUPPRESS;
AddPrivacyDataForUrl(url.spec(), "", cookie_action);
}
void UrlmonUrlRequestManager::SetCookiesForUrl(const GURL& url,
const std::string& cookie) {
DCHECK(container_);
// Grab a reference on the container to ensure that we don't get destroyed in
// case the InternetSetCookie call below puts up a dialog box, which can
// happen if the cookie policy is set to prompt.
if (container_) {
container_->AddRef();
}
InternetCookieState cookie_state = static_cast<InternetCookieState>(
InternetSetCookieExA(url.spec().c_str(), NULL, cookie.c_str(),
INTERNET_COOKIE_EVALUATE_P3P, NULL));
int32 cookie_action = MapCookieStateToCookieAction(cookie_state);
AddPrivacyDataForUrl(url.spec(), "", cookie_action);
if (container_) {
container_->Release();
}
}
void UrlmonUrlRequestManager::EndRequest(int request_id) {
DVLOG(1) << __FUNCTION__ << " id: " << request_id;
scoped_refptr<UrlmonUrlRequest> request = LookupRequest(request_id,
&request_map_);
if (request) {
request_map_.erase(request_id);
request->Stop();
} else if (background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
request = LookupRequest(request_id, &background_request_map_);
if (request) {
background_request_map_.erase(request_id);
background_thread_->message_loop()->PostTask(
FROM_HERE, base::Bind(&UrlmonUrlRequest::Stop, request.get()));
}
}
if (!request)
DLOG(ERROR) << __FUNCTION__ << " no request found for " << request_id;
}
void UrlmonUrlRequestManager::StopAll() {
DVLOG(1) << __FUNCTION__;
if (stopping_)
return;
stopping_ = true;
DVLOG(1) << __FUNCTION__ << " stopping " << request_map_.size()
<< " requests";
StopAllRequestsHelper(&request_map_, NULL);
if (background_worker_thread_enabled_) {
DCHECK(background_thread_.get());
background_thread_->message_loop()->PostTask(
FROM_HERE, base::Bind(&UrlmonUrlRequestManager::StopAllRequestsHelper,
base::Unretained(this), &background_request_map_,
&background_resource_map_lock_));
background_thread_->Stop();
background_thread_.reset();
}
}
void UrlmonUrlRequestManager::StopAllRequestsHelper(
RequestMap* request_map,
base::Lock* request_map_lock) {
DCHECK(request_map);
DVLOG(1) << __FUNCTION__ << " stopping " << request_map->size()
<< " requests";
if (request_map_lock)
request_map_lock->Acquire();
for (RequestMap::iterator it = request_map->begin();
it != request_map->end(); ++it) {
DCHECK(it->second != NULL);
it->second->Stop();
}
request_map->clear();
if (request_map_lock)
request_map_lock->Release();
}
void UrlmonUrlRequestManager::OnResponseStarted(
int request_id, const char* mime_type, const char* headers, int size,
base::Time last_modified, const std::string& redirect_url,
int redirect_status, const net::HostPortPair& socket_address,
uint64 upload_size) {
DCHECK_NE(request_id, -1);
DVLOG(1) << __FUNCTION__;
#ifndef NDEBUG
scoped_refptr<UrlmonUrlRequest> request = LookupRequest(request_id,
&request_map_);
if (request == NULL && background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
request = LookupRequest(request_id, &background_request_map_);
}
DCHECK(request != NULL);
#endif // NDEBUG
delegate_->OnResponseStarted(
request_id, mime_type, headers, size, last_modified, redirect_url,
redirect_status, socket_address, upload_size);
}
void UrlmonUrlRequestManager::OnReadComplete(int request_id,
const std::string& data) {
DCHECK_NE(request_id, -1);
DVLOG(1) << __FUNCTION__ << " id: " << request_id;
#ifndef NDEBUG
scoped_refptr<UrlmonUrlRequest> request = LookupRequest(request_id,
&request_map_);
if (request == NULL && background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
request = LookupRequest(request_id, &background_request_map_);
}
DCHECK(request != NULL);
#endif // NDEBUG
delegate_->OnReadComplete(request_id, data);
DVLOG(1) << __FUNCTION__ << " done id: " << request_id;
}
void UrlmonUrlRequestManager::OnResponseEnd(
int request_id,
const net::URLRequestStatus& status) {
DCHECK_NE(request_id, -1);
DVLOG(1) << __FUNCTION__;
DCHECK(status.status() != net::URLRequestStatus::CANCELED);
RequestMap::size_type erased_count = request_map_.erase(request_id);
if (erased_count != 1u && background_worker_thread_enabled_) {
base::AutoLock lock(background_resource_map_lock_);
erased_count = background_request_map_.erase(request_id);
if (erased_count != 1u) {
DLOG(WARNING) << __FUNCTION__
<< " Failed to find request id:"
<< request_id;
}
}
delegate_->OnResponseEnd(request_id, status);
}
void UrlmonUrlRequestManager::OnCookiesRetrieved(bool success, const GURL& url,
const std::string& cookie_string, int cookie_id) {
DCHECK(url.is_valid());
delegate_->OnCookiesRetrieved(success, url, cookie_string, cookie_id);
}
scoped_refptr<UrlmonUrlRequest> UrlmonUrlRequestManager::LookupRequest(
int request_id, RequestMap* request_map) {
RequestMap::iterator it = request_map->find(request_id);
if (request_map->end() != it)
return it->second;
return NULL;
}
UrlmonUrlRequestManager::UrlmonUrlRequestManager()
: stopping_(false), notification_window_(NULL),
privileged_mode_(false),
container_(NULL),
background_worker_thread_enabled_(true) {
background_thread_.reset(new base::Thread("cf_iexplore_background_thread"));
background_thread_->init_com_with_mta(false);
background_worker_thread_enabled_ =
GetConfigBool(true, kUseBackgroundThreadForSubResources);
if (background_worker_thread_enabled_) {
base::Thread::Options options;
options.message_loop_type = MessageLoop::TYPE_UI;
background_thread_->StartWithOptions(options);
}
}
UrlmonUrlRequestManager::~UrlmonUrlRequestManager() {
StopAll();
}
void UrlmonUrlRequestManager::AddPrivacyDataForUrl(
const std::string& url, const std::string& policy_ref,
int32 flags) {
DCHECK(!url.empty());
bool fire_privacy_event = false;
if (privacy_info_.privacy_records.empty())
flags |= PRIVACY_URLISTOPLEVEL;
if (!privacy_info_.privacy_impacted) {
if (flags & (COOKIEACTION_ACCEPT | COOKIEACTION_REJECT |
COOKIEACTION_DOWNGRADE)) {
privacy_info_.privacy_impacted = true;
fire_privacy_event = true;
}
}
PrivacyInfo::PrivacyEntry& privacy_entry =
privacy_info_.privacy_records[UTF8ToWide(url)];
privacy_entry.flags |= flags;
privacy_entry.policy_ref = UTF8ToWide(policy_ref);
if (fire_privacy_event && IsWindow(notification_window_)) {
PostMessage(notification_window_, WM_FIRE_PRIVACY_CHANGE_NOTIFICATION, 1,
0);
}
}
| timopulkkinen/BubbleFish | chrome_frame/urlmon_url_request.cc | C++ | bsd-3-clause | 51,845 |
<?php namespace Exchange\EWSType; use Exchange\EWSType;
/**
* Contains EWSType_DeleteAttachmentResponseMessageType.
*/
/**
* Represents the status and result of a single DeleteAttachment operation
* request.
*
* @package php-ews\Types
*
* @todo Extends EWSType_ResponseMessageType.
*/
class EWSType_DeleteAttachmentResponseMessageType extends EWSType
{
/**
* Currently unused and reserved for future use.
*
* This element contains a value of 0.
*
* @since Exchange 2007
*
* @var integer
*/
public $DescriptiveLinkKey;
/**
* Provides a text description of the status of the response.
*
* @since Exchange 2007
*
* @var string
*/
public $MessageText;
/**
* Provides additional error response information.
*
* @since Exchange 2007
*
* @var string
*
* @todo Determine if we can use SimpleXML or DOMDocument here.
*/
public $MessageXml;
/**
* Describes the status of the response.
*
* @since Exchange 2007
*
* @var EWSType_ResponseClassType
*/
public $ResponseClass;
/**
* Provides an error code that identifies the specific error that the
* request encountered.
*
* @since Exchange 2007
*
* @var EWSType_ResponseCodeType
*/
public $ResponseCode;
/**
* Identifies the parent item of a deleted attachment.
*
* @since Exchange 2007
*
* @var EWSType_RootItemIdType
*/
public $RootItemId;
}
| segpacto/yii2-exchange | EWSType/EWSType_DeleteAttachmentResponseMessageType.php | PHP | bsd-3-clause | 1,549 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview Polymer element for displaying a summary of network states
* by type: Ethernet, WiFi, Cellular, and VPN.
*/
(function() {
const mojom = chromeos.networkConfig.mojom;
Polymer({
is: 'network-summary',
behaviors: [
NetworkListenerBehavior,
],
properties: {
/**
* Highest priority connected network or null. Set here to update
* internet-page which updates internet-subpage and internet-detail-page.
* @type {?OncMojo.NetworkStateProperties}
*/
defaultNetwork: {
type: Object,
value: null,
notify: true,
},
/**
* The device state for each network device type. We initialize this to
* include a disabled WiFi type since WiFi is always present. This reduces
* the amount of visual change on first load.
* @private {!Object<!OncMojo.DeviceStateProperties>}
*/
deviceStates: {
type: Object,
value() {
const result = {};
result[chromeos.networkConfig.mojom.NetworkType.kWiFi] = {
deviceState: chromeos.networkConfig.mojom.DeviceStateType.kDisabled,
type: chromeos.networkConfig.mojom.NetworkType.kWiFi,
};
return result;
},
notify: true,
},
/**
* Array of active network states, one per device type. Initialized to
* include a default WiFi state (see deviceStates comment).
* @private {!Array<!OncMojo.NetworkStateProperties>}
*/
activeNetworkStates_: {
type: Array,
value() {
return [OncMojo.getDefaultNetworkState(mojom.NetworkType.kWiFi)];
},
},
/**
* List of network state data for each network type.
* @private {!Object<!Array<!OncMojo.NetworkStateProperties>>}
*/
networkStateLists_: {
type: Object,
value() {
const result = {};
result[mojom.NetworkType.kWiFi] = [];
return result;
},
},
},
/** @private {?chromeos.networkConfig.mojom.CrosNetworkConfigRemote} */
networkConfig_: null,
/**
* Set of GUIDs identifying active networks, one for each type.
* @private {?Set<string>}
*/
activeNetworkIds_: null,
/** @override */
created() {
this.networkConfig_ = network_config.MojoInterfaceProviderImpl.getInstance()
.getMojoServiceRemote();
},
/** @override */
attached() {
this.getNetworkLists_();
},
/**
* CrosNetworkConfigObserver impl
* Updates any matching existing active networks. Note: newly active networks
* will trigger onNetworkStateListChanged which triggers getNetworkLists_.
* @param {!Array<OncMojo.NetworkStateProperties>} networks
*/
onActiveNetworksChanged(networks) {
if (!this.activeNetworkIds_) {
// Initial list of networks not received yet.
return;
}
networks.forEach(network => {
const index = this.activeNetworkStates_.findIndex(
state => state.guid == network.guid);
if (index != -1) {
this.set(['activeNetworkStates_', index], network);
}
});
},
/** CrosNetworkConfigObserver impl */
onNetworkStateListChanged() {
this.getNetworkLists_();
},
/** CrosNetworkConfigObserver impl */
onDeviceStateListChanged() {
this.getNetworkLists_();
},
/**
* Requests the list of device states and network states from Chrome.
* Updates deviceStates, activeNetworkStates, and networkStateLists once the
* results are returned from Chrome.
* @private
*/
getNetworkLists_() {
// First get the device states.
this.networkConfig_.getDeviceStateList().then(response => {
// Second get the network states.
this.getNetworkStates_(response.result);
});
},
/**
* Requests the list of network states from Chrome. Updates
* activeNetworkStates and networkStateLists once the results are returned
* from Chrome.
* @param {!Array<!OncMojo.DeviceStateProperties>} deviceStateList
* @private
*/
getNetworkStates_(deviceStateList) {
const filter = {
filter: chromeos.networkConfig.mojom.FilterType.kVisible,
limit: chromeos.networkConfig.mojom.NO_LIMIT,
networkType: mojom.NetworkType.kAll,
};
this.networkConfig_.getNetworkStateList(filter).then(response => {
this.updateNetworkStates_(response.result, deviceStateList);
});
},
/**
* Called after network states are received from getNetworks.
* @param {!Array<!OncMojo.NetworkStateProperties>} networkStates The state
* properties for all visible networks.
* @param {!Array<!OncMojo.DeviceStateProperties>} deviceStateList
* @private
*/
updateNetworkStates_(networkStates, deviceStateList) {
const newDeviceStates = {};
for (const device of deviceStateList) {
newDeviceStates[device.type] = device;
}
const orderedNetworkTypes = [
mojom.NetworkType.kEthernet,
mojom.NetworkType.kWiFi,
mojom.NetworkType.kCellular,
mojom.NetworkType.kTether,
mojom.NetworkType.kVPN,
];
// Clear any current networks.
const activeNetworkStatesByType =
/** @type {!Map<mojom.NetworkType, !OncMojo.NetworkStateProperties>} */
(new Map);
// Complete list of states by type.
const newNetworkStateLists = {};
for (const type of orderedNetworkTypes) {
newNetworkStateLists[type] = [];
}
let firstConnectedNetwork = null;
networkStates.forEach(function(networkState) {
const type = networkState.type;
if (!activeNetworkStatesByType.has(type)) {
activeNetworkStatesByType.set(type, networkState);
if (!firstConnectedNetwork &&
networkState.type != mojom.NetworkType.kVPN &&
OncMojo.connectionStateIsConnected(networkState.connectionState)) {
firstConnectedNetwork = networkState;
}
}
newNetworkStateLists[type].push(networkState);
}, this);
this.defaultNetwork = firstConnectedNetwork;
// Create a VPN entry in deviceStates if there are any VPN networks.
if (newNetworkStateLists[mojom.NetworkType.kVPN].length > 0) {
newDeviceStates[mojom.NetworkType.kVPN] = {
type: mojom.NetworkType.kVPN,
deviceState: chromeos.networkConfig.mojom.DeviceStateType.kEnabled,
};
}
// Push the active networks onto newActiveNetworkStates in order based on
// device priority, creating an empty state for devices with no networks.
const newActiveNetworkStates = [];
this.activeNetworkIds_ = new Set;
for (const type of orderedNetworkTypes) {
const device = newDeviceStates[type];
if (!device) {
continue; // The technology for this device type is unavailable.
}
// If both 'Tether' and 'Cellular' technologies exist, merge the network
// lists and do not add an active network for 'Tether' so that there is
// only one 'Mobile data' section / subpage.
if (type == mojom.NetworkType.kTether &&
newDeviceStates[mojom.NetworkType.kCellular]) {
newNetworkStateLists[mojom.NetworkType.kCellular] =
newNetworkStateLists[mojom.NetworkType.kCellular].concat(
newNetworkStateLists[mojom.NetworkType.kTether]);
continue;
}
// Note: The active state for 'Cellular' may be a Tether network if both
// types are enabled but no Cellular network exists (edge case).
const networkState =
this.getActiveStateForType_(activeNetworkStatesByType, type);
if (networkState.source == mojom.OncSource.kNone &&
device.deviceState == mojom.DeviceStateType.kProhibited) {
// Prohibited technologies are enforced by the device policy.
networkState.source =
chromeos.networkConfig.mojom.OncSource.kDevicePolicy;
}
newActiveNetworkStates.push(networkState);
this.activeNetworkIds_.add(networkState.guid);
}
this.deviceStates = newDeviceStates;
this.networkStateLists_ = newNetworkStateLists;
// Set activeNetworkStates last to rebuild the dom-repeat.
this.activeNetworkStates_ = newActiveNetworkStates;
},
/**
* Returns the active network state for |type| or a default network state.
* If there is no 'Cellular' network, return the active 'Tether' network if
* any since the two types are represented by the same section / subpage.
* @param {!Map<mojom.NetworkType, !OncMojo.NetworkStateProperties>}
* activeStatesByType
* @param {!mojom.NetworkType} type
* @return {!OncMojo.NetworkStateProperties|undefined}
* @private
*/
getActiveStateForType_(activeStatesByType, type) {
let activeState = activeStatesByType.get(type);
if (!activeState && type == mojom.NetworkType.kCellular) {
activeState = activeStatesByType.get(mojom.NetworkType.kTether);
}
return activeState || OncMojo.getDefaultNetworkState(type);
},
/**
* Provides an id string for summary items. Used in tests.
* @param {!OncMojo.NetworkStateProperties} network
* @return {string}
* @private
*/
getTypeString_(network) {
return OncMojo.getNetworkTypeString(network.type);
},
/**
* @param {!Object<!OncMojo.DeviceStateProperties>} deviceStates
* @return {!OncMojo.DeviceStateProperties|undefined}
* @private
*/
getTetherDeviceState_(deviceStates) {
return this.deviceStates[mojom.NetworkType.kTether];
},
});
})();
| endlessm/chromium-browser | chrome/browser/resources/settings/chromeos/internet_page/network_summary.js | JavaScript | bsd-3-clause | 9,561 |
#!/usr/bin/env python
from amino import *
print Quat(XAngle(3.14)) * Quat(YAngle(3.14))
# print Quat((1,2,3,4))
# print Quat(RotMat(YAngle(3.14)))
# print Quat(RotMat(RotMat(YAngle(3.14))))
# print TfMat( (XAngle(3.14), (0,0,1)) ).translation()
# print aa.RotMat(aa.YAngle(3.14)).cx
# print aa.RotMat(aa.YAngle(3.14)).cy
# print aa.RotMat(aa.YAngle(3.14)).cz
# print aa.RotMat(1).cx
# print aa.RotMat(1).cy
# print aa.RotMat(1).cz
print "end"
| golems/amino | demo/cpython/tf.py | Python | bsd-3-clause | 453 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/common/api/sockets/sockets_manifest_permission.h"
#include <memory>
#include "base/stl_util.h"
#include "base/strings/string_split.h"
#include "base/strings/utf_string_conversions.h"
#include "base/values.h"
#include "extensions/common/api/extensions_manifest_types.h"
#include "extensions/common/api/sockets/sockets_manifest_data.h"
#include "extensions/common/error_utils.h"
#include "extensions/common/manifest_constants.h"
#include "extensions/strings/grit/extensions_strings.h"
#include "ipc/ipc_message.h"
#include "ui/base/l10n/l10n_util.h"
namespace extensions {
namespace sockets_errors {
const char kErrorInvalidHostPattern[] = "Invalid host:port pattern '*'";
}
using api::extensions_manifest_types::Sockets;
using api::extensions_manifest_types::SocketHostPatterns;
using content::SocketPermissionRequest;
namespace {
static bool ParseHostPattern(
SocketsManifestPermission* permission,
content::SocketPermissionRequest::OperationType operation_type,
const std::string& host_pattern,
base::string16* error) {
SocketPermissionEntry entry;
if (!SocketPermissionEntry::ParseHostPattern(
operation_type, host_pattern, &entry)) {
*error = ErrorUtils::FormatErrorMessageUTF16(
sockets_errors::kErrorInvalidHostPattern, host_pattern);
return false;
}
permission->AddPermission(entry);
return true;
}
static bool ParseHostPatterns(
SocketsManifestPermission* permission,
content::SocketPermissionRequest::OperationType operation_type,
const std::unique_ptr<SocketHostPatterns>& host_patterns,
base::string16* error) {
if (!host_patterns)
return true;
if (host_patterns->as_string) {
return ParseHostPattern(
permission, operation_type, *host_patterns->as_string, error);
}
CHECK(host_patterns->as_strings);
for (std::vector<std::string>::const_iterator it =
host_patterns->as_strings->begin();
it != host_patterns->as_strings->end();
++it) {
if (!ParseHostPattern(permission, operation_type, *it, error)) {
return false;
}
}
return true;
}
static void SetHostPatterns(
std::unique_ptr<SocketHostPatterns>& host_patterns,
const SocketsManifestPermission* permission,
content::SocketPermissionRequest::OperationType operation_type) {
host_patterns.reset(new SocketHostPatterns());
host_patterns->as_strings.reset(new std::vector<std::string>());
for (auto it = permission->entries().cbegin();
it != permission->entries().cend(); ++it) {
if (it->pattern().type == operation_type) {
host_patterns->as_strings->push_back(it->GetHostPatternAsString());
}
}
}
// Helper function for adding the 'any host' permission. Determines if the
// message is needed from |sockets|, and adds the permission to |ids|.
// Returns true if it added the message.
bool AddAnyHostMessage(const SocketPermissionEntrySet& sockets,
PermissionIDSet* ids) {
for (const auto& socket : sockets) {
if (socket.IsAddressBoundType() &&
socket.GetHostType() == SocketPermissionEntry::ANY_HOST) {
ids->insert(APIPermission::kSocketAnyHost);
return true;
}
}
return false;
}
// Helper function for adding subdomain socket permissions. Determines what
// messages are needed from |sockets|, and adds permissions to |ids|.
void AddSubdomainHostMessage(const SocketPermissionEntrySet& sockets,
PermissionIDSet* ids) {
std::set<base::string16> domains;
for (const auto& socket : sockets) {
if (socket.GetHostType() == SocketPermissionEntry::HOSTS_IN_DOMAINS)
domains.insert(base::UTF8ToUTF16(socket.pattern().host));
}
if (!domains.empty()) {
for (const auto& domain : domains)
ids->insert(APIPermission::kSocketDomainHosts, domain);
}
}
// Helper function for adding specific host socket permissions. Determines what
// messages are needed from |sockets|, and adds permissions to |ids|.
void AddSpecificHostMessage(const SocketPermissionEntrySet& sockets,
PermissionIDSet* ids) {
std::set<base::string16> hostnames;
for (const auto& socket : sockets) {
if (socket.GetHostType() == SocketPermissionEntry::SPECIFIC_HOSTS)
hostnames.insert(base::UTF8ToUTF16(socket.pattern().host));
}
if (!hostnames.empty()) {
for (const auto& hostname : hostnames)
ids->insert(APIPermission::kSocketSpecificHosts, hostname);
}
}
// Helper function for adding the network list socket permission. Determines if
// the message is needed from |sockets|, and adds the permission to |ids|.
void AddNetworkListMessage(const SocketPermissionEntrySet& sockets,
PermissionIDSet* ids) {
for (const auto& socket : sockets) {
if (socket.pattern().type == SocketPermissionRequest::NETWORK_STATE) {
ids->insert(APIPermission::kNetworkState);
}
}
}
} // namespace
SocketsManifestPermission::SocketsManifestPermission() {}
SocketsManifestPermission::~SocketsManifestPermission() {}
// static
std::unique_ptr<SocketsManifestPermission> SocketsManifestPermission::FromValue(
const base::Value& value,
base::string16* error) {
std::unique_ptr<Sockets> sockets = Sockets::FromValue(value, error);
if (!sockets)
return std::unique_ptr<SocketsManifestPermission>();
std::unique_ptr<SocketsManifestPermission> result(
new SocketsManifestPermission());
if (sockets->udp) {
if (!ParseHostPatterns(result.get(),
SocketPermissionRequest::UDP_BIND,
sockets->udp->bind,
error)) {
return std::unique_ptr<SocketsManifestPermission>();
}
if (!ParseHostPatterns(result.get(),
SocketPermissionRequest::UDP_SEND_TO,
sockets->udp->send,
error)) {
return std::unique_ptr<SocketsManifestPermission>();
}
if (!ParseHostPatterns(result.get(),
SocketPermissionRequest::UDP_MULTICAST_MEMBERSHIP,
sockets->udp->multicast_membership,
error)) {
return std::unique_ptr<SocketsManifestPermission>();
}
}
if (sockets->tcp) {
if (!ParseHostPatterns(result.get(),
SocketPermissionRequest::TCP_CONNECT,
sockets->tcp->connect,
error)) {
return std::unique_ptr<SocketsManifestPermission>();
}
}
if (sockets->tcp_server) {
if (!ParseHostPatterns(result.get(),
SocketPermissionRequest::TCP_LISTEN,
sockets->tcp_server->listen,
error)) {
return std::unique_ptr<SocketsManifestPermission>();
}
}
return result;
}
bool SocketsManifestPermission::CheckRequest(
const Extension* extension,
const SocketPermissionRequest& request) const {
for (auto it = permissions_.cbegin(); it != permissions_.cend(); ++it) {
if (it->Check(request))
return true;
}
return false;
}
std::string SocketsManifestPermission::name() const {
return manifest_keys::kSockets;
}
std::string SocketsManifestPermission::id() const { return name(); }
PermissionIDSet SocketsManifestPermission::GetPermissions() const {
PermissionIDSet ids;
AddSocketHostPermissions(permissions_, &ids);
return ids;
}
bool SocketsManifestPermission::FromValue(const base::Value* value) {
if (!value)
return false;
base::string16 error;
std::unique_ptr<SocketsManifestPermission> manifest_permission(
SocketsManifestPermission::FromValue(*value, &error));
if (!manifest_permission)
return false;
permissions_ = manifest_permission->permissions_;
return true;
}
std::unique_ptr<base::Value> SocketsManifestPermission::ToValue() const {
Sockets sockets;
sockets.udp.reset(new Sockets::Udp());
SetHostPatterns(sockets.udp->bind, this, SocketPermissionRequest::UDP_BIND);
SetHostPatterns(
sockets.udp->send, this, SocketPermissionRequest::UDP_SEND_TO);
SetHostPatterns(sockets.udp->multicast_membership,
this,
SocketPermissionRequest::UDP_MULTICAST_MEMBERSHIP);
if (sockets.udp->bind->as_strings->size() == 0 &&
sockets.udp->send->as_strings->size() == 0 &&
sockets.udp->multicast_membership->as_strings->size() == 0) {
sockets.udp.reset(NULL);
}
sockets.tcp.reset(new Sockets::Tcp());
SetHostPatterns(
sockets.tcp->connect, this, SocketPermissionRequest::TCP_CONNECT);
if (sockets.tcp->connect->as_strings->size() == 0) {
sockets.tcp.reset(NULL);
}
sockets.tcp_server.reset(new Sockets::TcpServer());
SetHostPatterns(
sockets.tcp_server->listen, this, SocketPermissionRequest::TCP_LISTEN);
if (sockets.tcp_server->listen->as_strings->size() == 0) {
sockets.tcp_server.reset(NULL);
}
return std::unique_ptr<base::Value>(sockets.ToValue().release());
}
std::unique_ptr<ManifestPermission> SocketsManifestPermission::Diff(
const ManifestPermission* rhs) const {
const SocketsManifestPermission* other =
static_cast<const SocketsManifestPermission*>(rhs);
auto result = std::make_unique<SocketsManifestPermission>();
result->permissions_ = base::STLSetDifference<SocketPermissionEntrySet>(
permissions_, other->permissions_);
return result;
}
std::unique_ptr<ManifestPermission> SocketsManifestPermission::Union(
const ManifestPermission* rhs) const {
const SocketsManifestPermission* other =
static_cast<const SocketsManifestPermission*>(rhs);
auto result = std::make_unique<SocketsManifestPermission>();
result->permissions_ = base::STLSetUnion<SocketPermissionEntrySet>(
permissions_, other->permissions_);
return result;
}
std::unique_ptr<ManifestPermission> SocketsManifestPermission::Intersect(
const ManifestPermission* rhs) const {
const SocketsManifestPermission* other =
static_cast<const SocketsManifestPermission*>(rhs);
auto result = std::make_unique<SocketsManifestPermission>();
result->permissions_ = base::STLSetIntersection<SocketPermissionEntrySet>(
permissions_, other->permissions_);
return result;
}
void SocketsManifestPermission::AddPermission(
const SocketPermissionEntry& entry) {
permissions_.insert(entry);
}
// static
void SocketsManifestPermission::AddSocketHostPermissions(
const SocketPermissionEntrySet& sockets,
PermissionIDSet* ids) {
if (!AddAnyHostMessage(sockets, ids)) {
AddSpecificHostMessage(sockets, ids);
AddSubdomainHostMessage(sockets, ids);
}
AddNetworkListMessage(sockets, ids);
}
bool SocketsManifestPermission::RequiresManagementUIWarning() const {
return false;
}
bool SocketsManifestPermission::RequiresManagedSessionFullLoginWarning() const {
return false;
}
} // namespace extensions
| endlessm/chromium-browser | extensions/common/api/sockets/sockets_manifest_permission.cc | C++ | bsd-3-clause | 11,080 |
/*
Copyright (c) 2011-2012, Brandon Aubie
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "stdafx.h"
#include "uiFileDetailsTreeView.h"
uiFileDetailsTreeView::uiFileDetailsTreeView(sqlite3 **db, Gtk::Window *parent)
{
m_parent = parent;
this->db = db;
mrp_ListStore = Gtk::ListStore::create(m_Columns);
this->linkModel();
this->append_column("", m_Columns.m_col_props);
this->append_column("AnimalID", m_Columns.m_col_animalID);
this->append_column("CellID", m_Columns.m_col_cellID);
this->append_column("#", m_Columns.m_col_filenum);
this->append_column("X-Var", m_Columns.m_col_xaxis);
this->append_column("Type", m_Columns.m_col_type);
this->append_column("Trials", m_Columns.m_col_trials);
this->append_column("CarFreq (Hz)", m_Columns.m_col_freq);
this->append_column("FreqDev (Hz)", m_Columns.m_col_freqdev);
this->append_column("Dur (ms)", m_Columns.m_col_dur);
this->append_column("Onset (ms)", m_Columns.m_col_onset);
this->append_column("Atten (db)", m_Columns.m_col_atten);
mrp_Selection = this->get_selection();
mrp_Selection->set_mode(Gtk::SELECTION_MULTIPLE);
this->set_rules_hint(false);
// Setup right click handling
this->signal_button_press_event().connect_notify(
sigc::mem_fun(*this, &uiFileDetailsTreeView::on_file_details_button_press_event)
);
//Fill menu
{
Gtk::Menu::MenuList& menulist = m_Menu_FileDetails.items();
menulist.push_back( Gtk::Menu_Helpers::MenuElem("_View Details",
sigc::mem_fun(*this, &uiFileDetailsTreeView::on_view_file_details)));
}
hasRows = false;
}
uiFileDetailsTreeView::~uiFileDetailsTreeView() {}
Glib::RefPtr<Gtk::TreeSelection> uiFileDetailsTreeView::treeSelection()
{
return mrp_Selection;
}
void uiFileDetailsTreeView::linkModel()
{
this->set_model(mrp_ListStore);
}
void uiFileDetailsTreeView::clear()
{
mrp_ListStore->clear();
this->unset_model();
hasRows = false;
}
Gtk::TreeModel::Row uiFileDetailsTreeView::newrow()
{
if (!hasRows) {
m_lastInsertedRow = mrp_ListStore->append();
hasRows = true;
} else {
m_lastInsertedRow = mrp_ListStore->insert_after(m_lastInsertedRow);
}
return *m_lastInsertedRow;
}
Glib::ustring uiFileDetailsTreeView::animalID(const Gtk::TreeModel::iterator& iter)
{
Gtk::TreeModel::Row row = *iter;
return row.get_value(m_Columns.m_col_animalID);
}
int uiFileDetailsTreeView::cellID(const Gtk::TreeModel::iterator& iter)
{
Gtk::TreeModel::Row row = *iter;
return row.get_value(m_Columns.m_col_cellID);
}
int uiFileDetailsTreeView::fileID(const Gtk::TreeModel::iterator& iter)
{
Gtk::TreeModel::Row row = *iter;
return row.get_value(m_Columns.m_col_filenum);
}
void uiFileDetailsTreeView::on_file_details_button_press_event(GdkEventButton* event)
{
if ( (event->type == GDK_BUTTON_PRESS) && (event->button == 3) )
{
m_signal_menu_will_show.emit();
Gtk::Menu::MenuList& menulist = m_Menu_FileDetails.items();
if (mrp_Selection->count_selected_rows() > 1)
{
menulist[0].set_sensitive(false);
} else {
menulist[0].set_sensitive(true);
}
m_Menu_FileDetails.popup(event->button, event->time);
}
}
void uiFileDetailsTreeView::show_file_details(const Gtk::TreeModel::iterator& iter)
{
Gtk::TreeModel::Row row = *iter;
sqlite3_stmt *stmt = 0;
const char query[] = "SELECT header FROM files WHERE animalID=? AND cellID=? AND fileID=?";
sqlite3_prepare_v2(*db, query, strlen(query), &stmt, NULL);
sqlite3_bind_text(stmt, 1, row.get_value(m_Columns.m_col_animalID).c_str(), -1, SQLITE_TRANSIENT);
sqlite3_bind_int(stmt, 2, row.get_value(m_Columns.m_col_cellID));
sqlite3_bind_int(stmt, 3, row.get_value(m_Columns.m_col_filenum));
int r = sqlite3_step(stmt);
if (r == SQLITE_ROW) {
SpikeData sd;
void *header = (void*)sqlite3_column_blob(stmt, 0);
sd.setHeader(header);
Gtk::Dialog dialog("File Details", true);
dialog.set_transient_for(*m_parent);
dialog.set_resizable(false);
dialog.add_button(Gtk::Stock::CANCEL, Gtk::RESPONSE_CANCEL);
dialog.add_button(Gtk::Stock::SAVE, Gtk::RESPONSE_OK);
Gtk::HBox hbTime;
Gtk::Label lblTimeName("Recording Time: ");
Gtk::Label lblTime(sd.iso8601(sd.m_head.cDateTime));
hbTime.pack_start(lblTimeName);
hbTime.pack_start(lblTime);
dialog.get_vbox()->pack_start(hbTime);
Gtk::CheckButton cbHidden("Hide file in file list");
cbHidden.set_active(row.get_value(m_Columns.m_col_hidden));
dialog.get_vbox()->pack_start(cbHidden);
sqlite3_stmt *stmt2 = 0;
const char query2[] = "SELECT tag FROM tags WHERE animalID=? AND cellID=? AND fileID=?";
sqlite3_prepare_v2(*db, query2, -1, &stmt2, 0);
sqlite3_bind_text(stmt2, 1, row.get_value(m_Columns.m_col_animalID).c_str(), -1, SQLITE_TRANSIENT);
sqlite3_bind_int(stmt2, 2, row.get_value(m_Columns.m_col_cellID));
sqlite3_bind_int(stmt2, 3, row.get_value(m_Columns.m_col_filenum));
std::vector<Glib::ustring> tags;
while (sqlite3_step(stmt2) == SQLITE_ROW) {
Glib::ustring t = (char*)sqlite3_column_text(stmt2, 0);
tags.push_back(t);
}
sqlite3_finalize(stmt2);
Gtk::Frame frameTags("File Tags");
uiTags tagsFile(m_parent);
tagsFile.tags(tags);
tagsFile.delete_assist = true;
frameTags.add(tagsFile);
dialog.get_vbox()->pack_start(frameTags);
tagsFile.signal_deleted().connect(sigc::mem_fun(*this, &uiFileDetailsTreeView::on_tag_deleted));
tagsFile.signal_added().connect(sigc::mem_fun(*this, &uiFileDetailsTreeView::on_tag_added));
dialog.show_all_children();
int result = dialog.run();
switch (result) {
case (Gtk::RESPONSE_OK):
if (row.get_value(m_Columns.m_col_hidden) != cbHidden.get_active())
m_signal_file_set_hidden.emit(cbHidden.get_active());
break;
}
} else {
Gtk::MessageDialog dialog(*m_parent, "Error loading file from database.", false, Gtk::MESSAGE_ERROR, Gtk::BUTTONS_OK);
dialog.set_secondary_text(sqlite3_errmsg(*db));
dialog.run();
}
sqlite3_finalize(stmt);
}
void uiFileDetailsTreeView::on_view_file_details()
{
std::vector<Gtk::TreeModel::Path> sel = mrp_Selection->get_selected_rows();
if (sel.size() == 1)
{
show_file_details(mrp_ListStore->get_iter(sel[0]));
}
}
void uiFileDetailsTreeView::on_tag_deleted(Glib::ustring tag)
{
m_signal_tag_deleted.emit(tag);
}
bool uiFileDetailsTreeView::on_tag_added(Glib::ustring tag)
{
return m_signal_tag_added.emit(tag);
}
uiFileDetailsTreeView::type_signal_file_set_hidden uiFileDetailsTreeView::signal_file_set_hidden()
{
return m_signal_file_set_hidden;
}
uiFileDetailsTreeView::type_signal_tag_added uiFileDetailsTreeView::signal_tag_added()
{
return m_signal_tag_added;
}
uiFileDetailsTreeView::type_signal_tag_deleted uiFileDetailsTreeView::signal_tag_deleted()
{
return m_signal_tag_deleted;
}
uiFileDetailsTreeView::type_signal_menu_will_show uiFileDetailsTreeView::signal_menu_will_show()
{
return m_signal_menu_will_show;
}
| baubie/SpikeDB | src/uiFileDetailsTreeView.cpp | C++ | bsd-3-clause | 8,226 |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("AdaBoostClassifier" , "FourClass_10" , "sqlite")
| antoinecarme/sklearn2sql_heroku | tests/classification/FourClass_10/ws_FourClass_10_AdaBoostClassifier_sqlite_code_gen.py | Python | bsd-3-clause | 146 |
/**
* $LastChangedRevision$
* $HeadURL$
* $LastChangedDate$
* $LastChangedBy$
*/
package net.sf.cafemocha.validation.string;
import java.util.regex.Pattern;
import net.sf.cafemocha.validation.Constraint;
import org.apache.commons.lang.Validate;
/**
* @author computerguy5
*
*/
public class NoLowercaseConstraint implements Constraint<String> {
private static final NoLowercaseConstraint NO_LOWERCASE = new NoLowercaseConstraint();
public static NoLowercaseConstraint noLowercase() {
return NO_LOWERCASE;
}
/**
* Matches a lowercase alphabetic character.
*/
protected static final Pattern LOWERCASE_PATTERN = Pattern
.compile("\\p{Lower}");
public void validate(String t) throws IllegalArgumentException {
if (t != null) {
Validate.isTrue(!LOWERCASE_PATTERN.matcher(t).find(),
"May not contain lowercase characters");
}
}
}
| jbeard6/CafeMocha | validation/validation-api/src/main/java/net/sf/cafemocha/validation/string/NoLowercaseConstraint.java | Java | bsd-3-clause | 909 |
import resource
import numpy as np
from HPOlibConfigSpace.configuration_space import ConfigurationSpace
from HPOlibConfigSpace.conditions import InCondition
from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \
UniformIntegerHyperparameter, CategoricalHyperparameter, \
UnParametrizedHyperparameter
from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm
from autosklearn.pipeline.constants import *
class LibSVM_SVR(AutoSklearnRegressionAlgorithm):
def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0,
degree=3, coef0=0.0, verbose=False,
max_iter=-1, random_state=None):
self.kernel = kernel
self.C = C
self.epsilon = epsilon
self.tol = tol
self.shrinking = shrinking
self.degree = degree
self.gamma = gamma
self.coef0 = coef0
self.verbose = verbose
self.max_iter = max_iter
self.random_state = random_state
self.estimator = None
def fit(self, X, Y):
import sklearn.svm
try:
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
if soft > 0:
soft /= 1024 * 1024
maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024
cache_size = (soft - maxrss) / 1.5
else:
cache_size = 200
except Exception:
cache_size = 200
self.C = float(self.C)
self.epsilon = float(self.epsilon)
self.tol = float(self.tol)
self.shrinking = self.shrinking == 'True'
self.degree = int(self.degree)
self.gamma = float(self.gamma)
if self.coef0 is None:
self.coef0 = 0.0
else:
self.coef0 = float(self.coef0)
self.verbose = int(self.verbose)
self.max_iter = int(self.max_iter)
self.estimator = sklearn.svm.SVR(
kernel=self.kernel,
C=self.C,
epsilon=self.epsilon,
tol=self.tol,
shrinking=self.shrinking,
degree=self.degree,
gamma=self.gamma,
coef0=self.coef0,
cache_size=cache_size,
verbose=self.verbose,
max_iter=self.max_iter
)
self.scaler = sklearn.preprocessing.StandardScaler(copy=True)
self.scaler.fit(Y)
Y_scaled = self.scaler.transform(Y)
self.estimator.fit(X, Y_scaled)
return self
def predict(self, X):
if self.estimator is None:
raise NotImplementedError
if self.scaler is None:
raise NotImplementedError
Y_pred = self.estimator.predict(X)
return self.scaler.inverse_transform(Y_pred)
@staticmethod
def get_properties(dataset_properties=None):
return {'shortname': 'SVR',
'name': 'Support Vector Regression',
'handles_regression': True,
'handles_classification': False,
'handles_multiclass': False,
'handles_multilabel': False,
'prefers_data_normalized': True,
'is_deterministic': True,
'input': (SPARSE, DENSE, UNSIGNED_DATA),
'output': (PREDICTIONS,)}
@staticmethod
def get_hyperparameter_search_space(dataset_properties=None):
# Copied from libsvm_c
C = UniformFloatHyperparameter(
name="C", lower=0.03125, upper=32768, log=True, default=1.0)
kernel = CategoricalHyperparameter(
name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'],
default="rbf")
degree = UniformIntegerHyperparameter(
name="degree", lower=1, upper=5, default=3)
# Changed the gamma value to 0.0 (is 0.1 for classification)
gamma = UniformFloatHyperparameter(
name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1)
# TODO this is totally ad-hoc
coef0 = UniformFloatHyperparameter(
name="coef0", lower=-1, upper=1, default=0)
# probability is no hyperparameter, but an argument to the SVM algo
shrinking = CategoricalHyperparameter(
name="shrinking", choices=["True", "False"], default="True")
tol = UniformFloatHyperparameter(
name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True)
max_iter = UnParametrizedHyperparameter("max_iter", -1)
# Random Guess
epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001,
upper=1, default=0.1, log=True)
cs = ConfigurationSpace()
cs.add_hyperparameter(C)
cs.add_hyperparameter(kernel)
cs.add_hyperparameter(degree)
cs.add_hyperparameter(gamma)
cs.add_hyperparameter(coef0)
cs.add_hyperparameter(shrinking)
cs.add_hyperparameter(tol)
cs.add_hyperparameter(max_iter)
cs.add_hyperparameter(epsilon)
degree_depends_on_kernel = InCondition(child=degree, parent=kernel,
values=('poly', 'rbf', 'sigmoid'))
gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel,
values=('poly', 'rbf'))
coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel,
values=('poly', 'sigmoid'))
cs.add_condition(degree_depends_on_kernel)
cs.add_condition(gamma_depends_on_kernel)
cs.add_condition(coef0_depends_on_kernel)
return cs
| hmendozap/auto-sklearn | autosklearn/pipeline/components/regression/libsvm_svr.py | Python | bsd-3-clause | 5,653 |
from django.conf import settings
from paymentexpress.gateway import (
AUTH, COMPLETE, PURCHASE, REFUND, VALIDATE, Gateway
)
from paymentexpress.models import OrderTransaction
from oscar.apps.payment.exceptions import (UnableToTakePayment,
InvalidGatewayRequestError)
import random
class Facade(object):
"""
A bridge between oscar's objects and the core gateway object
"""
def __init__(self):
self.gateway = Gateway(
settings.PAYMENTEXPRESS_POST_URL,
settings.PAYMENTEXPRESS_USERNAME,
settings.PAYMENTEXPRESS_PASSWORD,
getattr(settings, 'PAYMENTEXPRESS_CURRENCY', 'AUD')
)
def _check_amount(self, amount):
if amount == 0 or amount is None:
raise UnableToTakePayment("Order amount must be non-zero")
def _get_merchant_reference(self, order_number, txn_type):
num_previous = OrderTransaction.objects.filter(
order_number=order_number,
txn_type=txn_type).count()
# Get a random number to append to the end. This solves the problem
# where a previous request crashed out and didn't save a model instance
# Hence we can get a clash of merchant references.
rand = "%04.f" % (random.random() * 10000)
return u'%s_%s_%d_%s' % (
order_number, txn_type.upper(), num_previous + 1, rand
)
def _get_friendly_decline_message(self):
return ('The transaction was declined by your bank - ' +
'please check your bankcard details and try again')
def _handle_response(self, txn_type, order_number, amount, response):
OrderTransaction.objects.create(
order_number=order_number,
txn_type=txn_type,
txn_ref=response['dps_txn_ref'],
amount=amount,
response_code=response['response_code'],
response_message=response.get_message(),
request_xml=response.request_xml,
response_xml=response.response_xml
)
if response.is_successful():
return {
'txn_reference': response['dps_txn_ref'],
'partner_reference': response['dps_billing_id'],
}
elif response.is_declined():
raise UnableToTakePayment(self._get_friendly_decline_message())
else:
raise InvalidGatewayRequestError(response.get_message())
def _format_card_date(self, str_date):
# Dirty hack so that Oscar's BankcardForm doesn't need to be overridden
if str_date is None:
return None
return str_date.replace('/', '')
def authorise(self, order_number, amount, bankcard):
"""
Authorizes a transaction.
Must be completed within 7 days using the "Complete" TxnType
"""
self._check_amount(amount)
card_issue_date = self._format_card_date(bankcard.start_date)
card_expiry_date = self._format_card_date(bankcard.expiry_date)
merchant_ref = self._get_merchant_reference(order_number, AUTH)
res = self.gateway.authorise(card_holder=bankcard.card_holder_name,
card_number=bankcard.card_number,
card_issue_date=card_issue_date,
card_expiry=card_expiry_date,
cvc2=bankcard.cvv,
amount=amount,
merchant_ref=merchant_ref)
return self._handle_response(AUTH, order_number, amount, res)
def complete(self, order_number, amount, dps_txn_ref):
"""
Completes (settles) a pre-approved Auth Transaction.
The DpsTxnRef value returned by the original approved Auth transaction
must be supplied.
"""
self._check_amount(amount)
merchant_ref = self._get_merchant_reference(order_number, COMPLETE)
res = self.gateway.complete(amount=amount,
dps_txn_ref=dps_txn_ref,
merchant_ref=merchant_ref)
return self._handle_response(COMPLETE, order_number, amount, res)
def purchase(self, order_number, amount, billing_id=None, bankcard=None):
"""
Purchase - Funds are transferred immediately.
"""
self._check_amount(amount)
res = None
merchant_ref = self._get_merchant_reference(order_number, PURCHASE)
if billing_id:
res = self.gateway.purchase(amount=amount,
dps_billing_id=billing_id,
merchant_ref=merchant_ref)
elif bankcard:
card_issue_date = self._format_card_date(bankcard.start_date)
card_expiry_date = self._format_card_date(bankcard.expiry_date)
res = self.gateway.purchase(amount=amount,
card_holder=bankcard.card_holder_name,
card_number=bankcard.card_number,
card_issue_date=card_issue_date,
card_expiry=card_expiry_date,
cvc2=bankcard.cvv,
merchant_ref=merchant_ref,
enable_add_bill_card=1)
else:
raise ValueError("You must specify either a billing id or " +
"a merchant reference")
return self._handle_response(PURCHASE, order_number, amount, res)
def refund(self, order_number, amount, dps_txn_ref):
"""
Refund - Funds transferred immediately.
Must be enabled as a special option.
"""
self._check_amount(amount)
merchant_ref = self._get_merchant_reference(order_number, REFUND)
res = self.gateway.refund(amount=amount,
dps_txn_ref=dps_txn_ref,
merchant_ref=merchant_ref)
return self._handle_response(REFUND, order_number, amount, res)
def validate(self, bankcard):
"""
Validation Transaction.
Effects a $1.00 Auth to validate card details including expiry date.
Often utilised with the EnableAddBillCard property set to 1 to
automatically add to Billing Database if the transaction is approved.
"""
amount = 1.00
card_issue_date = self._format_card_date(bankcard.start_date)
card_expiry_date = self._format_card_date(bankcard.expiry_date)
res = self.gateway.validate(amount=amount,
card_holder=bankcard.card_holder_name,
card_number=bankcard.card_number,
card_issue_date=card_issue_date,
card_expiry=card_expiry_date,
cvc2=bankcard.cvv,
enable_add_bill_card=1)
return self._handle_response(VALIDATE, None, amount, res)
| django-oscar/django-oscar-paymentexpress | paymentexpress/facade.py | Python | bsd-3-clause | 7,203 |
from django.apps import AppConfig
class WagtailLinkchekerAppConfig(AppConfig):
name = 'wagtaillinkchecker'
verbose_name = "Wagtail Link Checker"
| takeflight/wagtail-linkchecker | wagtaillinkchecker/apps.py | Python | bsd-3-clause | 155 |
////////////////
// control_atk.h
// This file is a portion of the immature engine.
// It is distributed under the BSD license.
// Copyright 2015 Huang Yiting (http://endrollex.com)
////////////////
////////////////
#include "control_atk.h"
#include "imm_ptr.h"
namespace imm
{
////////////////
// combo_data
////////////////
////////////////
combo_data::combo_data():
frame_rate(24.0f)
{
;
}
//
void combo_data::build(const std::string &name)
{
if (name == "sinon") {
atk.push_back("Atk01");
atk.push_back("Atk02");
frame_end.push_back(9.0f);
frame_end.push_back(11.0f);
frame_turn.push_back(4.5f-2.0f);
frame_turn.push_back(5.5f-2.0f);
frame_speed.push_back(1.0f);
frame_speed.push_back(0.5f);
std::vector<std::string> box_name;
box_name.push_back("hand_L");
atk_box.push_back(box_name);
box_name.clear();
box_name.push_back("foot_R");
atk_box.push_back(box_name);
}
for (auto &end: frame_end) end /= frame_rate;
for (auto &turn: frame_turn) turn /= frame_rate;
}
//
void combo_data::current_apply(combo_para &pa)
{
if (pa.combo_ix > 0) PTR->m_Attack.set_active_box(pa.inst_ix, atk_box[pa.combo_ix-1], false);
pa.time_count_down = frame_end[pa.combo_ix];
PTR->m_Inst.m_Stat[pa.inst_ix].check_set_ClipName(atk[pa.combo_ix], true);
math::set_instance_speed(pa.inst_ix, frame_speed[pa.combo_ix]);
pa.is_busy = true;
PTR->m_Attack.set_active_box(pa.inst_ix, atk_box[pa.combo_ix], true);
}
//
void combo_data::current_over(combo_para &pa)
{
pa.is_busy = false;
PTR->m_Attack.deactive_box(pa.inst_ix);
}
//
void combo_data::strike(combo_para &pa)
{
if (pa.combo_ix < 0 && pa.time_count_down > 0.0f) return;
if (pa.combo_ix == -1) {
++pa.combo_ix;
current_apply(pa);
return;
}
if (!pa.is_turn_next && pa.time_count_down > frame_turn[pa.combo_ix]) {
++pa.combo_ix;
pa.is_turn_next = true;
return;
}
}
//
void combo_data::update(const float &dt, combo_para &pa)
{
auto &tro = PTR->m_Inst.m_Troll[pa.inst_ix];
if (pa.time_count_down > -5.0f) pa.time_count_down -= dt;
if (pa.is_busy && pa.time_count_down < 0.0f) {
tro.order |= ORDER_IDLE;
current_over(pa);
pa.combo_ix = -1;
return;
}
if (!pa.is_busy && pa.time_count_down < -2.0f) {
if (tro.current_state == pose_Idle::instance())
PTR->m_Inst.m_Stat[pa.inst_ix].check_set_ClipName(act::Idle);
pa.time_count_down = -6.0f;
return;
}
if (pa.is_turn_next && pa.time_count_down < frame_turn[pa.combo_ix]) {
current_apply(pa);
pa.is_turn_next = false;
if (pa.combo_ix == atk.size()-1) pa.combo_ix = -1;
return;
}
}
////////////////
// control_atk
////////////////
////////////////
template <typename T_app>
control_atk<T_app>::control_atk():
app(nullptr)
{
;
}
//
template <typename T_app>
void control_atk<T_app>::init(T_app *app_in)
{
app = app_in;
combo["sinon"].build("sinon");
}
//
template <typename T_app>
void control_atk<T_app>::init_combo_para(const size_t &index_in)
{
combo_p[index_in];
combo_p[index_in].model_name = *app->m_Inst.m_Stat[index_in].get_ModelName();
combo_p[index_in].inst_ix = index_in;
}
//
template <typename T_app>
void control_atk<T_app>::perform(const size_t &index_in)
{
if (!combo.count(*app->m_Inst.m_Stat[index_in].get_ModelName())) {
PTR->m_Inst.m_Troll[index_in].order |= ORDER_IDLE;
return;
}
if (!combo_p.count(index_in)) init_combo_para(index_in);
combo[combo_p[index_in].model_name].strike(combo_p[index_in]);
}
//
template <typename T_app>
void control_atk<T_app>::update(const float &dt)
{
for (auto ¶_it: combo_p) {
combo[para_it.second.model_name].update(dt, para_it.second);
}
}
//
} | RealRui/imm_engine | imm_include/control_atk.cc | C++ | bsd-3-clause | 3,584 |
fig, ax = plt.subplots()
count_weekday_years.median(axis=0).plot(kind='barh', ax=ax, color='#66b266')
xticks = ax.set_yticklabels(['Monday', 'Tuesday', 'Wednesday', "Thursday", "Friday", "Saturday", "Sunday"]) | jorisvandenbossche/DS-python-data-analysis | notebooks/_solutions/case2_biodiversity_analysis41.py | Python | bsd-3-clause | 209 |
// Copyright (c) 2018, Smart Projects Holdings Ltd
// All rights reserved.
// See LICENSE file for license details.
/*
* Properties class implementation.
*/
#include <ugcs/vsm/properties.h>
#include <ugcs/vsm/platform.h>
#include <ugcs/vsm/debug.h>
#include <ugcs/vsm/utils.h>
#include <climits>
#include <cmath>
#include <functional>
#include <memory>
#include <sstream>
#include <map>
#include <algorithm>
using namespace ugcs::vsm;
/* Properties::Property class. */
Properties::Property::Property(std::string &&value):
str_repr(std::move(value)),
description(LINE_TERMINATOR)
{
std::string trimmed(str_repr);
Trim(trimmed);
try {
size_t pos;
int_repr = std::stol(trimmed, &pos, 0);
if (pos == trimmed.size()) {
int_valid = true;
} else {
int_valid = false;
}
} catch(...) {
int_valid = false;
}
try {
size_t pos;
float_repr = std::stod(trimmed, &pos);
if (pos == trimmed.size()) {
float_valid = true;
} else {
float_valid = false;
}
} catch (...) {
float_valid = false;
}
/* Create integer representation from float if possible. */
if (!int_valid && float_valid &&
float_repr >= LLONG_MIN && float_repr <= LLONG_MAX) {
int_repr = std::lrint(float_repr);
int_valid = true;
}
}
Properties::Property::Property(int32_t value):
str_repr(std::to_string(value)), int_repr(value), float_repr(value),
int_valid(true), float_valid(true),
description(LINE_TERMINATOR)
{
}
Properties::Property::Property(double value):
str_repr(std::to_string(value)), float_repr(value), float_valid(true),
description(LINE_TERMINATOR)
{
if (value >= LLONG_MIN && value <= LLONG_MAX) {
int_repr = std::lrint(value);
int_valid = true;
} else {
int_valid = false;
}
}
/* Properties class. */
Singleton<Properties> Properties::singleton;
Properties::Properties()
{}
void
Properties::Load(std::istream &stream)
{
table.clear();
/* State machine. */
/* Token value available as a result from state handler. */
class Token {
public:
/* Token type. */
enum class Type {
/* Empty token. */
NONE,
/* One character. */
CHAR,
/* Characters string. */
STRING,
/* Full property. */
PROPERTY
};
/* Token type. */
Type type = Type::NONE;
/* Character value. */
int v_char;
/* String value. */
std::string v_string;
/* Property value. */
struct {
std::string key, value;
} v_property;
Token() = default;
Token(Token &&token):
type(token.type), v_char(token.v_char),
v_string(std::move(token.v_string)),
v_property(std::move(token.v_property))
{
token.type = Type::NONE;
}
void
Set(int c)
{
type = Type::CHAR;
v_char = c;
}
void
Set(const std::string &str)
{
type = Type::STRING;
v_string = str;
}
void
Set(const std::string &key, const std::string &value)
{
type = Type::PROPERTY;
v_property.key = key;
v_property.value = value;
}
explicit operator bool() const
{
return type != Type::NONE;
}
};
/* Indicates that character was consumed by a state. */
static constexpr int CHAR_CONSUMED = 0;
/* Represents parser FA state. */
class State {
public:
typedef std::unique_ptr<State> Ptr;
virtual
~State()
{}
/* Feed token for the state.
* @param token Token to feed.
* @param next_state Receives next state if a next one is created.
* @return true if state is processed and a token can be obtained.
*/
bool
Feed(Token &&token, std::unique_ptr<State> &next_state)
{
std::unique_ptr<State> next_substate;
while (substate && token && substate->Feed(std::move(token), next_substate)) {
substate->Get_token(token);
substate = std::move(next_substate);
}
if (substate) {
return false;
}
if (token) {
return On_token(std::move(token), next_state);
}
return false;
}
/* Feed next character for the state.
* @param c Next character. Set to CHAR_CONSUMED if consumed.
* @param next_state Receives next state if a next one is created.
* @return true if state is processed and a token can be obtained.
*/
bool
Feed(int &c, std::unique_ptr<State> &next_state)
{
if (substate) {
std::unique_ptr<State> next_substate;
if (substate->Feed(c, next_substate)) {
Token token;
substate->Get_token(token);
substate = std::move(next_substate);
return Feed(std::move(token), next_state);
}
} else {
return On_char(c, next_state);
}
return false;
}
/* Get token after state is processed. */
virtual void
Get_token(Token &token __UNUSED)
{}
protected:
/* Called on each character fed.
* @param c Next character, should be set to CHAR_CONSUMED if consumed.
* @param next_state Receives next state if a next one is created.
* @return true if state is processed and a token can be obtained.
*/
virtual bool
On_char(int &c __UNUSED, std::unique_ptr<State> &next_state __UNUSED)
{
return true;
}
/* Token retrieved from substate or previous state.
* @param token Retrieved token.
* @param next_state Receives next state if a next one is created.
* @return true if state is processed and a token can be obtained.
*/
virtual bool
On_token(Token &&token __UNUSED, std::unique_ptr<State> &next_state __UNUSED)
{
return true;
}
void
Set_substate(std::unique_ptr<State> &&state)
{
substate = std::move(state);
}
private:
/* Current sub-state. */
std::unique_ptr<State> substate;
};
/* Allowed whitespaces. */
static auto Is_whitespace = [](int c)
{
return c == ' ' || c == '\t' || c == '\f';
};
/* Line terminator started and should be skipped. */
class Line_terminator_state: public State {
bool cr_seen = false;
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state __UNUSED) override
{
if (c == std::istream::traits_type::eof()) {
return true;
}
/* Skip either CR, CR+LF or LF. */
if (c == '\r') {
if (cr_seen) {
return true;
}
cr_seen = true;
c = CHAR_CONSUMED;
return false;
}
if (c == '\n') {
c = CHAR_CONSUMED;
return true;
}
return true;
}
};
/* Comment encountered and should be skipped. */
class Comment_state: public State {
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state) override
{
if (c == std::istream::traits_type::eof()) {
return true;
}
/* Comment terminated by new line. */
if (c == '\n' || c == '\r') {
next_state = Ptr(new Line_terminator_state);
return true;
}
/* All the rest considered as comment and consumed. */
c = CHAR_CONSUMED;
return false;
}
};
/* Line is wrapped by '\' in the end. Skip new line and leading whitespaces. */
class Line_break_state: public State {
public:
Line_break_state()
{
/* Skip new line first. */
Set_substate(Ptr(new Line_terminator_state));
}
private:
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state __UNUSED) override
{
if (c == std::istream::traits_type::eof()) {
return true;
}
/* Skip all leading whitespaces. */
if (Is_whitespace(c)) {
c = CHAR_CONSUMED;
return false;
}
return true;
}
};
/* Read and parse escaped character. */
class Escape_state: public State {
public:
/* @param in_key Indicates whether escape code was encountered in key
* identifier and thus '\=' and '\:' are allowed.
*/
Escape_state(bool in_key = false): in_key(in_key)
{}
private:
bool in_key;
bool backslash_seen = false;
int escaped_char = 0;
/* Currently reading 4-digits character code. */
bool reading_code = false;
int num_digits_read = 0;
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state) override
{
if (c == std::istream::traits_type::eof()) {
VSM_EXCEPTION(Parse_exception, "Unexpected EOF - unclosed escape");
}
if (!backslash_seen) {
ASSERT(c == '\\');
backslash_seen = true;
c = CHAR_CONSUMED;
return false;
}
/* Parse unicode escape. */
if (reading_code) {
ASSERT(num_digits_read < 4);
int digit;
if (c >= '0' && c <= '9') {
digit = c - '0';
} else if (c >= 'a' && c <= 'f') {
digit = c - 'a' + 10;
} else if (c >= 'A' && c <= 'F') {
digit = c - 'A' + 10;
} else {
VSM_EXCEPTION(Parse_exception, "Invalid digit in unicode escape");
}
escaped_char = (escaped_char << 4) | digit;
num_digits_read++;
c = CHAR_CONSUMED;
return num_digits_read == 4;
}
switch (c) {
case ' ':
escaped_char = ' ';
break;
case '\t':
case 't':
escaped_char = '\t';
break;
case '\f':
case 'f':
escaped_char = '\f';
break;
case 'r':
escaped_char = '\r';
break;
case 'n':
escaped_char = '\n';
break;
case '\\':
escaped_char = '\\';
break;
case '\r':
case '\n':
next_state = Ptr(new Line_break_state());
break;
case 'u':
reading_code = true;
c = CHAR_CONSUMED;
return false;
default:
if (in_key) {
switch (c) {
case '=':
case ':':
escaped_char = c;
break;
default:
VSM_EXCEPTION(Parse_exception,
"Invalid escape character: %c", c);
}
} else {
VSM_EXCEPTION(Parse_exception,
"Invalid escape character: %c", c);
}
}
c = CHAR_CONSUMED;
return true;
}
virtual void
Get_token(Token &token) override
{
if (escaped_char) {
token.Set(escaped_char);
}
}
};
/* Read string (either key name or value string). */
class Read_string_state: public State {
public:
Read_string_state(bool is_key): is_key(is_key)
{}
private:
bool is_key;
std::string str;
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state __UNUSED) override
{
if (c == std::istream::traits_type::eof()) {
return true;
}
if (c == '\\') {
Set_substate(Ptr(new Escape_state(is_key)));
return false;
}
/* New line terminates the token. */
if (c == '\r' || c == '\n') {
return true;
}
/* Whitespace, '=' and ':' terminates key token. */
if (is_key && (Is_whitespace(c) || c == '=' || c == ':')) {
return true;
}
str += c;
c = CHAR_CONSUMED;
return false;
}
virtual bool
On_token(Token &&token, std::unique_ptr<State> &next_state __UNUSED) override
{
if (token.type == Token::Type::CHAR) {
str += token.v_char;
} else {
ASSERT(false);
}
return false;
}
virtual void
Get_token(Token &token) override
{
token.Set(str);
}
};
/* Reading key-value string. */
class Key_value_state: public State {
std::string key, value;
bool assignment_seen = false;
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state __UNUSED) override
{
if (c == std::istream::traits_type::eof()) {
return true;
}
/* Skip whitespaces. */
if (Is_whitespace(c)) {
c = CHAR_CONSUMED;
return false;
}
/* Skip assignment token. */
if (!key.empty() && !assignment_seen &&
(c == '=' || c == ':')) {
assignment_seen = true;
c = CHAR_CONSUMED;
return false;
}
Set_substate(Ptr(new Read_string_state(key.empty())));
return false;
}
virtual bool
On_token(Token &&token, std::unique_ptr<State> &next_state __UNUSED) override
{
if (token.type == Token::Type::STRING) {
if (key.empty()) {
if (token.v_string.empty()) {
VSM_EXCEPTION(Parse_exception, "Empty key name");
}
key = std::move(token.v_string);
} else {
/* Value parsed, state is terminated. */
value = std::move(token.v_string);
return true;
}
} else {
ASSERT(false);
}
return false;
}
virtual void
Get_token(Token &token) override
{
token.Set(key, value);
}
};
/* New line of new property just started. Skipping whitespaces. */
class Initial_state: public State {
virtual bool
On_char(int &c, std::unique_ptr<State> &next_state) override
{
/* Can safely terminate on EOF. */
if (c == std::istream::traits_type::eof()) {
return true;
}
/* Skip whitespaces. */
if (Is_whitespace(c)) {
c = CHAR_CONSUMED;
return false;
}
/* Detect comments. */
if (c == '!' || c == '#') {
next_state = Ptr(new Comment_state());
return true;
}
/* New line does nothing. */
if (c == '\n' || c == '\r') {
Set_substate(Ptr(new Line_terminator_state()));
return false;
}
/* Key-value string started. */
next_state = Ptr(new Key_value_state());
return true;
}
};
class Position_tracker {
public:
/* Feed character to the tracker.
* @param trailing Indicates whether it is next parsed character or one
* from trailing line after exception.
* @return true if need more characters to display position information.
*/
bool
Feed(int c, bool trailing = false)
{
if (c == '\t' || c == '\f') {
c = ' ';
}
if (trailing) {
if (c == '\r' || c == '\n' || c == std::istream::traits_type::eof()) {
return false;
}
cur_line += c;
} else if (c != std::istream::traits_type::eof()) {
if (c == '\r' || c == '\n') {
cur_line.clear();
col_idx = 0;
line_idx++;
} else {
cur_line += c;
col_idx++;
}
}
return true;
}
std::string
Get_position() const
{
std::stringstream result;
result << "Line " << line_idx << " column " << col_idx << ":\n";
result << cur_line << '\n';
for (size_t idx = 0; idx + 1 < col_idx; idx++) {
result << '-';
}
result << '^';
return result.str();
}
private:
std::string cur_line;
size_t line_idx = 1, col_idx = 0;
};
State::Ptr cur_state;
int original_c, pocessed_c;
Position_tracker pos_tracker;
std::string cur_description;
while (true) {
original_c = stream.get();
pocessed_c = original_c;
pos_tracker.Feed(original_c);
State::Ptr next_state;
try {
do {
if (!cur_state) {
cur_state = State::Ptr(new Initial_state());
}
if (cur_state->Feed(pocessed_c, next_state)) {
Token token;
cur_state->Get_token(token);
if (token.type == Token::Type::PROPERTY) {
/* New property parsed, check for duplicate and add. */
if (table.find(token.v_property.key) != table.end()) {
VSM_EXCEPTION(Parse_exception, "Duplicated entry: %s",
token.v_property.key.c_str());
}
auto result = table.insert(std::pair<std::string, Property>(
std::move(token.v_property.key),
Property(std::move(token.v_property.value))));
result.first->second.seq_number = last_sequence_number++;
result.first->second.description = cur_description;
cur_description.clear();
}
cur_state = std::move(next_state);
}
/* End-of-file encountered. */
if (pocessed_c == std::istream::traits_type::eof()) {
if (cur_state) {
VSM_EXCEPTION(Parse_exception, "Unexpected end of stream");
} else {
break;
}
}
} while (pocessed_c != CHAR_CONSUMED);
} catch(Parse_exception &e) {
while (pos_tracker.Feed(stream.get(), true)) {}
LOG_WARNING("Exception thrown during properties parsing:\n%s\n%s",
e.what(), pos_tracker.Get_position().c_str());
throw;
}
if (original_c == std::istream::traits_type::eof()) {
break;
} else {
if (cur_state) {
Token token;
cur_state->Get_token(token);
if (token.type != Token::Type::PROPERTY) {
cur_description += original_c;
}
} else {
cur_description += original_c;
}
}
}
trailer = cur_description;
}
const Properties::Property &
Properties::Find_property(const std::string &key) const
{
auto it = table.find(key);
if (it == table.end()) {
VSM_EXCEPTION(Not_found_exception, "Specified key not found: %s",
key.c_str());
}
return it->second;
}
Properties::Property *
Properties::Find_property(const std::string &key)
{
auto it = table.find(key);
if (it == table.end()) {
return nullptr;
}
return &it->second;
}
std::string
Properties::Get(const std::string &key) const
{
const Property &prop = Find_property(key);
return prop.str_repr;
}
/** Check if the property with the specified key exists. */
bool
Properties::Exists(const std::string &key) const
{
return const_cast<Properties *>(this)->Find_property(key) != nullptr;
}
int32_t
Properties::Get_int(const std::string &key) const
{
const Property &prop = Find_property(key);
if (!prop.int_valid) {
VSM_EXCEPTION(Not_convertible_exception,
"Property value '%s' cannot be represented as integer value",
prop.str_repr.c_str());
}
return prop.int_repr;
}
double
Properties::Get_float(const std::string &key) const
{
const Property &prop = Find_property(key);
if (!prop.float_valid) {
VSM_EXCEPTION(Not_convertible_exception,
"Property value '%s' cannot be represented as floating point number value",
prop.str_repr.c_str());
}
return prop.float_repr;
}
void
Properties::Delete(const std::string &key)
{
auto it = table.find(key);
if (it == table.end()) {
VSM_EXCEPTION(Not_found_exception, "Specified key not found: %s",
key.c_str());
}
table.erase(it);
}
namespace {
// do not make this public.
void
String_replace(std::string& str, const std::string& search, const std::string& replace)
{
for (size_t pos = 0; ; pos += replace.length()) {
pos = str.find(search, pos);
if (pos == std::string::npos) {
break;
}
str.erase(pos, search.length());
str.insert(pos, replace);
}
}
} // namespace
void
Properties::Set_description(const std::string &key, const std::string &desc)
{
Property *prop = Find_property(key);
if (prop == nullptr) {
prop = &table.insert(std::pair<std::string, Property>(key, std::string(""))).first->second;
prop->seq_number = last_sequence_number++;
}
prop->description = LINE_TERMINATOR + desc;
String_replace(prop->description, LINE_TERMINATOR, LINE_TERMINATOR "# ");
prop->description += LINE_TERMINATOR;
}
void
Properties::Set(const std::string &key, const std::string &value)
{
Property *prop = Find_property(key);
if (prop) {
*prop = Property(std::string(value));
} else {
auto result = table.insert(std::pair<std::string, Property>(key, std::string(value)));
result.first->second.seq_number = last_sequence_number++;
}
}
void
Properties::Set(const std::string &key, int32_t value)
{
Property *prop = Find_property(key);
if (prop) {
*prop = Property(value);
} else {
auto result = table.insert(std::pair<std::string, Property>(key, value));
result.first->second.seq_number = last_sequence_number++;
}
}
void
Properties::Set(const std::string &key, double value)
{
Property *prop = Find_property(key);
if (prop) {
*prop = Property(value);
} else {
auto result = table.insert(std::pair<std::string, Property>(key, value));
result.first->second.seq_number = last_sequence_number++;
}
}
std::string
Properties::Escape(const std::string &str, bool is_key)
{
std::string result;
for (int c : str) {
switch (c) {
case '\n':
result += "\\n";
break;
case '\r':
result += "\\r";
break;
case '\\':
result += "\\\\";
break;
default:
if (is_key) {
switch (c) {
case ' ':
result += "\\ ";
break;
case '\t':
result += "\\t";
break;
case '\f':
result += "\\f";
break;
case '=':
case ':':
result += '\\';
result += c;
break;
default:
result += c;
}
} else {
result += c;
}
}
}
return result;
}
void
Properties::Store(std::ostream &stream)
{
/* Want sorted by seq_ids. */
std::map<int, std::string> map;
for (auto pair : table) {
map.insert({pair.second.seq_number, pair.first});
}
for (auto pair : map) {
auto item = table.find(pair.second);
stream << item->second.description;
stream << Escape(item->first, true);
if (item->second.str_repr.size()) {
stream << " = " << Escape(item->second.str_repr);
}
}
stream << trailer;
}
void
Properties::Iterator::_NextProp()
{
if (prefix.empty()) {
return;
}
size_t prefix_len = prefix.size();
while (table_iterator != table_end) {
if (table_iterator->first.size() >= prefix_len &&
!table_iterator->first.compare(0, prefix_len, prefix, 0, prefix_len)) {
/* Match found. */
break;
}
table_iterator++;
}
}
void
Properties::Iterator::operator ++()
{
if (table_iterator == table_end) {
VSM_EXCEPTION(Internal_error_exception, "Iterated past the end");
}
table_iterator++;
_NextProp();
}
int
Properties::Iterator::Get_count()
{
int ret = 0;
if (table_iterator != table_end) {
const std::string &s = table_iterator->first;
for (size_t cur_idx = 0; cur_idx != std::string::npos; cur_idx = s.find(separator, cur_idx)) {
cur_idx++;
ret++;
}
}
return ret;
}
std::string
Properties::Iterator::operator[](size_t comp_idx)
{
if (table_iterator == table_end) {
VSM_EXCEPTION(Internal_error_exception, "Accessing end iterator");
}
const std::string &s = table_iterator->first;
size_t cur_idx = 0;
while (comp_idx) {
cur_idx = s.find(separator, cur_idx);
if (cur_idx == std::string::npos) {
VSM_EXCEPTION(Invalid_param_exception, "Component index out of range");
}
cur_idx++;
comp_idx--;
}
/* Until next separator. */
size_t end_idx = s.find(separator, cur_idx);
if (end_idx == std::string::npos) {
return s.substr(cur_idx);
}
return s.substr(cur_idx, end_idx - cur_idx);
}
| UgCS/vsm-cpp-sdk | src/properties.cpp | C++ | bsd-3-clause | 27,390 |
/*
* Copyright (C) 2009 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/workers/SharedWorkerThread.h"
#include "core/workers/SharedWorkerGlobalScope.h"
#include "core/workers/WorkerThreadStartupData.h"
namespace blink {
PassOwnPtr<SharedWorkerThread> SharedWorkerThread::create(const String& name, PassRefPtr<WorkerLoaderProxy> workerLoaderProxy, WorkerReportingProxy& workerReportingProxy)
{
return adoptPtr(new SharedWorkerThread(name, workerLoaderProxy, workerReportingProxy));
}
SharedWorkerThread::SharedWorkerThread(const String& name, PassRefPtr<WorkerLoaderProxy> workerLoaderProxy, WorkerReportingProxy& workerReportingProxy)
: WorkerThread(workerLoaderProxy, workerReportingProxy)
, m_name(name.isolatedCopy())
{
}
SharedWorkerThread::~SharedWorkerThread()
{
}
WorkerGlobalScope* SharedWorkerThread::createWorkerGlobalScope(PassOwnPtr<WorkerThreadStartupData> startupData)
{
return SharedWorkerGlobalScope::create(m_name, this, startupData);
}
WebThreadSupportingGC& SharedWorkerThread::backingThread()
{
if (!m_thread)
m_thread = WebThreadSupportingGC::create("SharedWorker Thread");
return *m_thread.get();
}
} // namespace blink
| was4444/chromium.src | third_party/WebKit/Source/core/workers/SharedWorkerThread.cpp | C++ | bsd-3-clause | 2,689 |
# -*- coding: utf-8 -*-
"""STOMP client
.. module:: network.jms.stomp_client
:platform: Unix
:synopsis: STOMP client
.. moduleauthor:: Petr Rašek <bowman@hydratk.org>
"""
"""
Events:
-------
jms_before_connect
jms_after_connect
jms_before_send
jms_after_send
jms_before_receive
jms_after_receive
jms_before_browse
jms_after_browse
"""
from hydratk.core.masterhead import MasterHead
from hydratk.core import event
from logging import basicConfig, getLogger, DEBUG, CRITICAL
from stompest.config import StompConfig
from stompest.sync import Stomp
from stompest.protocol import StompSpec
from stompest.error import StompError
from sys import version_info
getLogger('stompest.sync.client').setLevel(CRITICAL)
mapping = {
'JMSCorrelationID': 'correlation-id',
'JMSExpiration': 'expires',
'JMSDeliveryMode': 'persistent',
'JMSPriority': 'priority',
'JMSReplyTo': 'reply-to',
'JMSType': 'type',
'JMSMessageID': 'message-id',
'JMSDestination': 'destination',
'JMSTimestamp': 'timestamp',
'JMSRedelivered': 'redelivered'
}
class JMSClient(object):
"""Class JMSClient
"""
_mh = None
_client = None
_host = None
_port = None
_user = None
_passw = None
_verbose = None
_is_connected = None
def __init__(self, verbose=False):
"""Class constructor
Called when the object is initialized
Args:
verbose (bool): verbose mode
"""
try:
self._mh = MasterHead.get_head()
self._verbose = verbose
if (self._verbose):
basicConfig()
getLogger().setLevel(DEBUG)
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
@property
def client(self):
""" STOMP client property getter """
return self._client
@property
def host(self):
""" server host property getter """
return self._host
@property
def port(self):
""" server port property getter """
return self._port
@property
def user(self):
""" username property getter """
return self._user
@property
def passw(self):
""" user password property getter """
return self._passw
@property
def verbose(self):
""" verbose mode property getter """
return self._verbose
@property
def is_connected(self):
""" is_connected property getter """
return self._is_connected
def connect(self, host, port=61613, user=None, passw=None, timeout=10):
"""Method connects to server
Args:
host (str): hostname
port (str): port
user (str): username
passw (str): password
timeout (int): timeout
Returns:
bool: result
Raises:
event: jms_before_connect
event: jms_after_connected
"""
try:
msg = 'host:{0}, port:{1}, user:{2}, passw:{3}, timeout:{4}'.format(
host, port, user, passw, timeout)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_connecting', msg), self._mh.fromhere())
ev = event.Event(
'jms_before_connect', host, port, user, passw, timeout)
if (self._mh.fire_event(ev) > 0):
host = ev.argv(0)
port = ev.argv(1)
user = ev.argv(2)
passw = ev.argv(3)
timeout = ev.argv(4)
self._host = host
self._port = port
self._user = user
self._passw = passw
if (ev.will_run_default()):
self._client = Stomp(StompConfig('tcp://{0}:{1}'.format(self._host, self._port),
login=self._user, passcode=self._passw))
self._client.connect(
connectTimeout=timeout, connectedTimeout=timeout)
self._is_connected = True
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_connected'), self._mh.fromhere())
ev = event.Event('jms_after_connect')
self._mh.fire_event(ev)
return True
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
return False
def disconnect(self):
"""Method disconnects from server
Args:
none
Returns:
bool: result
"""
try:
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_disconnecting'), self._mh.fromhere())
if (not self._is_connected):
self._mh.demsg('htk_on_warning', self._mh._trn.msg(
'htk_jms_not_connected'), self._mh.fromhere())
return False
else:
self._client.disconnect()
self._client.close()
self._is_connected = False
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_disconnected'), self._mh.fromhere())
return True
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
return False
def send(self, destination_name, message, destination_type='queue', headers={}):
"""Method sends message
JMS headers - JMSCorrelationID, JMSExpiration, JMSDeliveryMode, JMSPriority,
JMSReplyTo, JMSType
Args:
destination_name (str): queue|topic name
message (str): message
destination_type (str): queue|topic
headers (dict): JMS headers, key - title, value - string
Returns:
bool: result
Raises:
event: jms_before_send
event: jms_after_send
"""
try:
msg = 'destination_name:{0}, message:{1}, destination_type:{2}, headers:{3}'.format(
destination_name, message, destination_type, headers)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_sending_msg', msg), self._mh.fromhere())
if (not self._is_connected):
self._mh.demsg('htk_on_warning', self._mh._trn.msg(
'htk_jms_not_connected'), self._mh.fromhere())
return False
ev = event.Event(
'jms_before_send', destination_name, message, destination_type, headers)
if (self._mh.fire_event(ev) > 0):
destination_name = ev.argv(0)
message = ev.argv(1)
destination_type = ev.argv(2)
headers = ev.argv(3)
if (ev.will_run_default()):
headers_new = {}
for key, value in headers.items():
if (key in mapping):
headers_new[mapping[key]] = value
self._client.send('/{0}/{1}'.format(destination_type, destination_name), message if (
version_info[0] == 2) else message.encode('utf-8'), headers_new)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_msg_sent'), self._mh.fromhere())
ev = event.Event('jms_after_send')
self._mh.fire_event(ev)
return True
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
return False
def receive(self, destination_name, cnt=1):
"""Method receives messages
Args:
destination_name (str): queue name
cnt (int): count of messages
Returns:
list: messages as dictionary {'message', JMS headers}
Raises:
event: jms_before_receive
event: jms_after_receive
"""
try:
msg = 'destination_name:{0}, count:{1}'.format(
destination_name, cnt)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_receiving_msg', msg), self._mh.fromhere())
if (not self._is_connected):
self._mh.demsg('htk_on_warning', self._mh._trn.msg(
'htk_jms_not_connected'), self._mh.fromhere())
return None
ev = event.Event('jms_before_receive', destination_name, cnt)
if (self._mh.fire_event(ev) > 0):
destination_name = ev.argv(0)
cnt = ev.argv(1)
if (ev.will_run_default()):
token = self._client.subscribe('/queue/{0}'.format(destination_name),
{StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL})
msgs = []
i = 0
while (i < cnt and self._client.canRead(1)):
frame = self._client.receiveFrame()
if (frame.command != 'MESSAGE'):
break
self._client.ack(frame)
msgs.append(frame)
i = i + 1
self._client.unsubscribe(token)
messages = []
for msg in msgs:
message = {}
message['message'] = msg.body.decode()
for header in msg.rawHeaders:
if (header[0] in mapping.values()):
message[
list(mapping.keys())[list(mapping.values()).index(header[0])]] = header[1]
messages.append(message)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_msg_received', len(messages)), self._mh.fromhere())
ev = event.Event('jms_after_receive')
self._mh.fire_event(ev)
return messages
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
return None
def browse(self, destination_name, cnt=100, jms_correlation_id=None, jms_type=None):
"""Method browses queue
Args:
destination_name (str): queue name
cnt (int): count of messages
jms_correlation_id (str): requested JMSCorrelationID
jms_type (str): requested JMSType
Returns:
list: messages as dictionary {'message', JMS headers}
Raises:
event: jms_before_browse
event: jms_after_browse
"""
try:
msg = 'destination_name:{0}, count:{1}, jms_correlation_id:{2}, jms_type:{3}'.format(
destination_name, cnt, jms_correlation_id, jms_type)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_browsing', msg), self._mh.fromhere())
if (not self._is_connected):
self._mh.demsg('htk_on_warning', self._mh._trn.msg(
'htk_jms_not_connected'), self._mh.fromhere())
return None
ev = event.Event(
'jms_before_browse', destination_name, cnt, jms_correlation_id, jms_type)
if (self._mh.fire_event(ev) > 0):
destination_name = ev.argv(0)
cnt = ev.argv(1)
jms_correlation_id = ev.argv(2)
jms_type = ev.argv(3)
if (ev.will_run_default()):
token = self._client.subscribe('/queue/{0}'.format(destination_name),
{StompSpec.ACK_HEADER: StompSpec.ACK_CLIENT_INDIVIDUAL})
msgs = []
i = 0
while (i < cnt and self._client.canRead(1)):
frame = self._client.receiveFrame()
correlation_id = None
type = None
for header in frame.rawHeaders:
if (header[0] == 'correlation-id'):
correlation_id = header[1]
elif (header[0] == 'type'):
type = header[1]
if ((jms_correlation_id == None or jms_correlation_id == correlation_id) and
(jms_type == None or jms_type == type)):
msgs.append(frame)
i = i + 1
self._client.unsubscribe(token)
messages = []
for msg in msgs:
message = {}
message['message'] = msg.body.decode()
for header in msg.rawHeaders:
if (header[0] in mapping.values()):
message[
list(mapping.keys())[list(mapping.values()).index(header[0])]] = header[1]
messages.append(message)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'htk_jms_msg_received', len(messages)), self._mh.fromhere())
ev = event.Event('jms_after_browse')
self._mh.fire_event(ev)
return messages
except StompError as ex:
self._mh.demsg('htk_on_error', ex, self._mh.fromhere())
return None
| hydratk/hydratk-lib-network | src/hydratk/lib/network/jms/stomp_client.py | Python | bsd-3-clause | 13,434 |
#!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Lin, Wanming <wanming.lin@intel.com>
import unittest
import os
import sys
import commands
import comm
import time
class TestWebAppFunctions(unittest.TestCase):
def test_close(self):
comm.setUp()
app_name = "helloworld"
pkg_name = "com.example." + app_name.lower()
if not comm.check_app_installed(pkg_name, self):
comm.app_install(app_name, pkg_name, self)
if not comm.check_app_launched(pkg_name, self):
print "Close app ---------------->%s App haven't launched, need to launch it!" % app_name
comm.app_launch(app_name, pkg_name, self)
time.sleep(1)
comm.app_stop(pkg_name, self)
if __name__ == '__main__':
unittest.main()
| jiajiax/crosswalk-test-suite | cordova/cordova-webapp-android-tests/webapp/webapp_close.py | Python | bsd-3-clause | 2,249 |
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.db import transaction
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from greenmine.core.generic import GenericView
from greenmine.core.decorators import login_required, staff_required
from greenmine import models
from greenmine.wiki import models as wiki_models
import datetime
import subprocess
import shutil
import pickle
import base64
import zlib
import copy
import sys
import os
import re
import io
class BinaryFile(object):
def __init__(self, name):
self.name = name
def __enter__(self):
self._file = io.open(self.name, mode='w+b')
return self._file
def __exit__(self, exc_type, exc_value, traceback):
self._file.flush()
self._file.close()
class ProjectExportView(GenericView):
template_path = 'config/project-export.html'
menu = ['settings', 'export']
@login_required
def get(self, request, pslug):
project = get_object_or_404(models.Project, slug=pslug)
context = {
'project': project,
'flist': models.ExportDirectoryCache.objects.all()
}
return self.render_to_response(self.template_path, context)
class RehashExportsDirectory(GenericView):
def backup_path_list(self):
for path in os.listdir(settings.BACKUP_PATH):
if os.path.splitext(path)[1] != '.xz':
continue
yield os.path.join(settings.BACKUP_PATH, path)
def backup_file_list(self):
for path in self.backup_path_list():
yield path, os.path.basename(path), os.path.getsize(path)
@login_required
def get(self, request, pslug):
project = get_object_or_404(models.Project, slug=pslug)
models.ExportDirectoryCache.objects.all().delete()
for path, name, size in self.backup_file_list():
models.ExportDirectoryCache.objects.create(
path = name,
size = size,
)
return self.redirect_referer(_(u"Now rehashed"))
class PerojectImportNow(GenericView):
@login_required
def get(self, request, project, iid):
project = get_object_or_404(models.Project, slug=pslug)
class ProjectExportNow(GenericView):
def _clean_copy(self, obj):
new_object = copy.deepcopy(obj)
if "_state" in new_object:
del new_object["_state"]
return new_object
def create_tempdir_for_project(self, project):
self.dirname = u"{0}_backup".format(project.slug)
self.path = os.path.join(settings.BACKUP_PATH, self.dirname)
if os.path.exists(self.path):
shutil.rmtree(self.path)
os.mkdir(self.path)
def _backup_project_data(self, project):
filename = "project-data.data"
filepath = os.path.join(self.path, filename)
with io.open(filepath, 'w+b') as f:
obj = self._clean_copy(project.__dict__)
pickle.dump(obj, f, -1)
filename = 'project-owner.data'
filepath = os.path.join(self.path, filename)
with io.open(filepath, 'w+b') as f:
obj = self._clean_copy(project.owner.__dict__)
pickle.dump(obj, f, -1)
def _backup_user_roles(self, project):
directory_pathname = "user_roles"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for pur in models.ProjectUserRole.objects.filter(project=project):
obj = self._clean_copy(pur.__dict__)
filename = "{0}_{1}.data".format(pur.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
def _backup_milestones(self, project):
directory_pathname = "milestones"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for milestone in project.milestones.all():
obj = self._clean_copy(milestone.__dict__)
filename = "{0}_{1}.data".format(milestone.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
def _backup_user_story(self, project):
directory_pathname = "user_stories"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for user_story in project.user_stories.all():
obj = self._clean_copy(user_story.__dict__)
obj['watchers'] = [o.id for o in user_story.watchers.all().distinct()]
filename = "{0}_{1}.data".format(user_story.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
def _backup_tasks(self, project):
directory_pathname = "tasks"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for task in project.tasks.all():
obj = self._clean_copy(task.__dict__)
obj['watchers'] = [o.id for o in task.watchers.all()]
filename = "task_{0}_{1}.data".format(task.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
#for response in models.TaskResponse.objects.filter(task__in=project.tasks.all()):
# obj = self._clean_copy(response.__dict__)
# obj['watchers'] = [o.id for o in task.watchers.all()]
# filename = "response_{0}_{1}.data".format(response.id, project.id)
# filepath = os.path.join(path, filename)
# with BinaryFile(filepath) as f:
# pickle.dump(obj, f, -1)
#
#for res_file in models.TaskAttachedFile.objects.filter(task__in=project.tasks.all()):
# obj = self._clean_copy(res_file.__dict__)
# raw_file_data = res_file.attached_file.read()
# raw_file_data = zlib.compress(raw_file_data, 9)
# raw_file_data = base64.b64encode(raw_file_data)
# obj['__raw_file_data'] = raw_file_data
# filename = "file_response_{0}_{1}.data".format(res_file.id, project.id)
# filepath = os.path.join(path, filename)
# with BinaryFile(filepath) as f:
# pickle.dump(obj, f, -1)
def _backup_questions(self, project):
directory_pathname = "questions"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for question in project.questions.all():
obj = self._clean_copy(question.__dict__)
obj['watchers'] = [o.id for o in question.watchers.all()]
filename = "{0}_{1}.data".format(question.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
for response in models.QuestionResponse.objects\
.filter(question__in=project.questions.all()):
obj = self._clean_copy(question.__dict__)
raw_file_data = response.attached_file.read()
raw_file_data = zlib.compress(raw_file_data, 9)
raw_file_data = base64.b64encode(raw_file_data)
obj['__raw_file_data'] = raw_file_data
filename = "file_response_{0}_{1}.data".format(response.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
def _backup_wiki(self, project):
directory_pathname = "wiki"
path = os.path.join(self.path, directory_pathname)
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for wikipage in project.wiki_pages.all():
obj = self._clean_copy(wikipage.__dict__)
obj['watchers'] = [o.id for o in wikipage.watchers.all()]
filename = "{0}_{1}.data".format(wikipage.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
for fattached in wiki_models.WikiPageAttachment.objects\
.filter(wikipage__in=project.wiki_pages.all()):
obj = self._clean_copy(fattached.__dict__)
raw_file_data = fattached.attached_file.read()
raw_file_data = zlib.compress(raw_file_data, 9)
raw_file_data = base64.b64encode(raw_file_data)
obj['__raw_file_data'] = raw_file_data
filename = "file_response_{0}_{1}.data".format(fattached.id, project.id)
filepath = os.path.join(path, filename)
with BinaryFile(filepath) as f:
pickle.dump(obj, f, -1)
def _create_tarball(self, project):
current_date = datetime.datetime.now().strftime("%Y-%m-%d-%H%M%s")
filename = "{0}-{1}.tar.xz".format(project.slug, current_date)
current_pwd = os.getcwd()
os.chdir(settings.BACKUP_PATH)
command = "tar cvJf {0} {1}".format(filename, self.dirname)
p = subprocess.Popen(command.split(), stdout=sys.stdout)
os.chdir(current_pwd)
@login_required
def get(self, request, pslug):
project = get_object_or_404(models.Project, slug=pslug)
self.create_tempdir_for_project(project)
self._backup_project_data(project)
self._backup_user_roles(project)
self._backup_milestones(project)
self._backup_user_story(project)
self._backup_tasks(project)
self._backup_questions(project)
self._backup_wiki(project)
self._create_tarball(project)
return self.redirect_referer("Now exported, rehash directory!")
| niwinz/Green-Mine | src/greenmine/base/views/export.py | Python | bsd-3-clause | 10,254 |
var apiResourcesUrl = '#';
/*
* X-Editable configurations
*/
$.fn.editable.defaults.mode = 'inline';
$.fn.editable.defaults.send = 'always';
$.fn.editable.defaults.ajaxOptions = {type: "patch", contentType: "application/json"};
$.fn.editable.defaults.error = function(response){
if(response.responseJSON.detail != null){
return response.responseJSON.detail;
}
if(response.responseJSON["__all__"] != null){
return response.responseJSON["__all__"] + '';
}
return "Error";
};
$.fn.editable.defaults.params = function(params) {
var r = {};
r[params.name] = params.value;
return JSON.stringify(r);
};
$.fn.editable.defaults.select2 = {
placeholder: "None",
allowClear: true,
};
(function addXEditable(){
// Add standard editable to all elements with .xeditable
$(".xeditable").each(function(){
if ($(this).is($('#phone1')) || $(this).is($('#phone2'))) {
// purpose? seems redundant?
$(this).editable({display: function(value, sourceData) {
if (sourceData) {
$(this).text(sourceData[$(this).attr('id')]);
}
}});
} else if ($(this).data("type") === "select2"){
// populate xeditable.select2 autocompletion values
var that = this;
var url = $(this).data("select");
$.getJSON(url, function(apidata){
var results = [];
for(var i=0; i<apidata.length; i++){
results.push({id: apidata[i].name, text: apidata[i].name});
}
$(that).editable({source: results});
});
} else {
$(this).editable({display: null});
}
});
})();
function validatePassword(password) {
return validatePasswordLength(password) &&
validatePasswordCharacterGroups(password);
}
function validatePasswordLength(password) {
return password.length >= 10;
}
function validatePasswordCharacterGroups(password) {
lower_case = new RegExp('[a-z]').test(password);
upper_case = new RegExp('[A-Z]').test(password);
numbers = new RegExp('[0-9]').test(password);
special = new RegExp('[^a-zA-Z0-9]').test(password);
return (lower_case + upper_case + numbers + special) >= 3
}
$(document).ready(function(){
// Datatables for tables
var dt = $(".listtable").dataTable({
"bPaginate": false,
"bLengthChange": false,
"bFilter": true,
"bSort": true,
"bInfo": false,
"bAutoWidth": false,
"sDom": "t"
});
$(".object-search").keyup(function() {
dt.fnFilter( $(this).val() );
});
/* ##############################################
* User's portrait image upload & download begins
*/
// in photo.js
//
/* User's portrait image upload & download ends
* ############################################
*/
/* ###############################
* Password changing stuff begins
*/
$('#password-modal').on('shown', function() {
$('#password-modal input:visible').first().focus();
$('#password-length').show();
$('#password-character-groups').show();
$('#passwords-matching').hide();
});
$('#password-modal').on('hide', function() {
$('#password-new, #password-new-again, #password-current').val('');
$('#password-status, #password-status-again').html('');
$('#password-new-again').change();
$('#wrong-password-alert').hide();
});
/* validations */
$('#password-new').bind("change paste keyup", function() {
if(!validatePasswordLength($(this).val())){
$('#password-length').show();
}else{
$('#password-length').hide();
}
if(!validatePasswordCharacterGroups($(this).val())){
$('#password-character-groups').show();
}else{
$('#password-character-groups').hide();
}
$('#password-new-again').change();
});
$('#password-new-again').bind("change paste keyup", function() {
if ($(this).val() === $('#password-new').val() && $(this).val().length > 0) {
$('#passwords-matching').hide();
if (validatePassword($('#password-new').val())) {
$('#password-change').removeClass('btn-warning').addClass('btn-success').removeAttr('disabled');
}
} else {
$('#passwords-matching').show();
$('#password-change').removeClass('btn-success').addClass('btn-warning').attr('disabled', 'disabled');
}
if ($(this).val().length < 1) {
$('#passwords-matching').hide();
}
});
/* custom ajax post */
$('#password-change').click(function() {
if ($('#password-new').val() === $('#password-new-again').val() && validatePassword($('#password-new').val())) {
$.post($(this).attr('data-url'), { 'password': $('#password-new').val(), 'old_password': $('#password-current').val() || "" })
.done(function() { $('#password-cancel').click(); })
.fail(function(data) {
$('#wrong-password-alert').html(data.responseText.replace(/\"/g, ""));
$('#wrong-password-alert').show();
});
} else {
return;
}
});
/* Password changing stuff ends
* ############################
*/
/* #########################
* Sudo-button stuff begins
*/
(function(){
'use strict';
$('#confirmPassword').on('shown', function () {
$("#sudoPassword").focus()
});
$('#confirmPasswordForm').submit(function(e){
e.preventDefault();
var password = $("#sudoPassword").val();
$.ajax({
url: url('enable_superuser'),
type: 'POST',
data: {password: password},
error: function(data){
$("#confirmPassError").html(data.responseJSON.desc).addClass("alert").addClass("alert-error");
},
success: function(data){
window.location.reload();
},
});
});
$('#endSudo').click(function(e){
e.preventDefault();
$.ajax({
url: url('end_superuser'),
type: 'POST',
error: function(data){
$("#errorMessage").html(data.responseJSON.desc).addClass("alert").addClass("alert-error");
},
success: function(data){
window.location.reload();
},
});
});
var sudoEnds = parseInt($('#sudotime-ends').html());
var interval = 1; // Interval to update in seconds
$('#extendSudo').click(function(e){
e.preventDefault();
$.post(url('enable_superuser'), function(data) {
sudoEnds = parseInt(data.desc);
updateSudoTimer();
})
.fail(function(data) {
$("#errorMessage").html(data.responseJSON.desc).addClass("alert").addClass("alert-error");
});
});
function updateSudoTimer(){
var timeLeft = (sudoEnds - Math.floor(new Date().getTime()/1000));
var error = $('#errorMessage');
if (timeLeft <= 0) {
fumErrors.set("sudo", "Your sudo session has expired. Please refresh the page.", "danger");
$('#sudotimeleft').html("0");
clearInterval(sudoTimerId);
} else if (timeLeft <= 60){
fumErrors.set("sudo", "Sudo session will expire in less than a minute.", "warning");
$('#sudotimeleft').html(Math.ceil(timeLeft));
} else if (timeLeft < 5*60){
fumErrors.set("sudo","Sudo session will expire in "+Math.ceil(timeLeft/60)+" minutes.", "warning");
$('#sudotimeleft').html(Math.ceil(timeLeft/60));
} else {
$('#sudotimeleft').html(Math.ceil(timeLeft/60));
}
};
if(sudoEnds > 0) {
updateSudoTimer();
var sudoTimerId = setInterval(updateSudoTimer, interval*1000);
}
})();
/* Sudo-button stuff ends
* #######################
*/
/* #######################
* Aliases-field stuff begins
*/
(function(){
'use strict';
var input = $("#aliases-input");
var table = $("#aliases-table");
var url = table.data('url');
var error = $("#errorMessage");
function showError(data){
error.html(data.responseJSON.desc).addClass("alert").addClass("alert-error");
}
function updateAliases(data){
table.html("");
$(data).each(function(alias){
var delicon = '<i class="icon-remove pull-right"></i>';
if ($('#aliases-input').length === 0){
// The field is not editable, so don't show the delete icon
delicon = '';
}
var aliaselement = $('<tr><td class="email-alias"><a href="mailto:'+this+'">'+this+'</a>'+delicon+'</td><td></td></tr>');
var that = this;
aliaselement.find('i').click(function(e){
$.ajax({
url: url,
type: 'DELETE',
data: JSON.stringify({items: [that]}),
contentType: 'application/json',
error: function(){
fumErrors.set('aliasnotset', 'Unable to delete alias.', 'error');
},
success: updateAliases
});
});
table.append(aliaselement);
});
fumErrors.remove('aliasnotset');
}
function addAlias(e){
e.preventDefault();
var alias = input.val();
input.val("");
if (alias.length > 0){
$.ajax({
url: url,
type: 'POST',
data: JSON.stringify({items: [alias]}),
contentType: 'application/json',
error: function(data){
fumErrors.set('aliasnotset', 'Unable to add alias: '+data.responseText, 'error');
input.addClass('fail');
},
success: function(data){updateAliases(data);input.removeClass('fail')}
});
}
}
// Can't use .submit(...) because forms are not allowed in tables.
$("#add-aliases").click(addAlias);
input.keypress(function(e) {
if(e.which == 13) {
addAlias(e);
}
});
if(table.length>0) {
$.ajax({
url: url,
type: 'GET',
success: updateAliases
});
}
})();
/* Aliases stuff ends
* #####################
*/
/* Delete group
* #############
*/
$("#delete-group-modal .confirm").click(function(){
$.ajax({
url: $("#delete-group").data("url"),
type: "DELETE",
success: function(data, status){fumErrors.set("deletegroup", "Group deleted.", "success");},
error: function(data, status){fumErrors.set("deletegroup", "Unable to delete item. " + data.statusText, "danger");},
complete: function(data, status){$("#delete-group-modal").modal('hide');},
});
});
$("#delete-group").click(function(){
$("#delete-group-modal").modal('show');
});
/*
* Enable chosen.js when adding groups, projects or servers.
*/
$(".chosen-select").each(function(){
$(this).chosen();
})
$('.marcopolofield').each(function() {
marcopoloField2($(this));
});
});
fumErrors = {
items: [],
set: function(id, text, type){
for (var i=0; i < this.items.length; i++){
if (this.items[i].id === id){
this.items[i] = {id:id, text:text, type:type};
this.update();
return
}
}
this.items.push({id: id, text: text, type:type});
this.update();
},
remove: function(id){
for (var i=0; i < this.items.length; i++){
if (this.items[i].id === id){
this.items.splice(i, 1);
}
}
this.update();
},
update: function(){
var errors = $("#errorMessage");
errors.html("");
$(this.items).each(function(){
errors.append("<p class='alert alert-"+this.type+"'>"+this.text+"</p>");
});
document.body.scrollTop = document.documentElement.scrollTop = 0;
}
};
function join_this(el) {
var ctx = {};
ctx [el.data('field')] = el.data('parentid');
var apiUrl = url(el.data('parent')+'-'+el.data('child'), ctx);
$.ajax({
url: apiUrl,
type: 'POST',
data: JSON.stringify({items: [request_user]}),
contentType: 'application/json',
error: function(data) {
fumErrors.set('marcopolo', data, 'error')
},
success: function(data){
window.location.reload();
}
});
}
| futurice/futurice-ldap-user-manager | fum/common/static/js/main.js | JavaScript | bsd-3-clause | 13,070 |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Server.Base;
using OpenSim.Services.Interfaces;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Xml;
using FriendInfo = OpenSim.Services.Interfaces.FriendInfo;
namespace OpenSim.Server.Handlers.Friends
{
public class FriendsServerPostHandler : BaseStreamHandler
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private IFriendsService m_FriendsService;
public FriendsServerPostHandler(IFriendsService service) :
base("POST", "/friends")
{
m_FriendsService = service;
}
protected override byte[] ProcessRequest(string path, Stream requestData,
IOSHttpRequest httpRequest, IOSHttpResponse httpResponse)
{
StreamReader sr = new StreamReader(requestData);
string body = sr.ReadToEnd();
sr.Close();
body = body.Trim();
//m_log.DebugFormat("[XXX]: query String: {0}", body);
try
{
Dictionary<string, object> request =
ServerUtils.ParseQueryString(body);
if (!request.ContainsKey("METHOD"))
return FailureResult();
string method = request["METHOD"].ToString();
switch (method)
{
case "getfriends":
return GetFriends(request);
case "getfriends_string":
return GetFriendsString(request);
case "storefriend":
return StoreFriend(request);
case "deletefriend":
return DeleteFriend(request);
case "deletefriend_string":
return DeleteFriendString(request);
}
m_log.DebugFormat("[FRIENDS HANDLER]: unknown method request {0}", method);
}
catch (Exception e)
{
m_log.DebugFormat("[FRIENDS HANDLER]: Exception {0}", e);
}
return FailureResult();
}
#region Method-specific handlers
private byte[] DeleteFriend(Dictionary<string, object> request)
{
UUID principalID = UUID.Zero;
if (request.ContainsKey("PRINCIPALID"))
UUID.TryParse(request["PRINCIPALID"].ToString(), out principalID);
else
m_log.WarnFormat("[FRIENDS HANDLER]: no principalID in request to delete friend");
string friend = string.Empty;
if (request.ContainsKey("FRIEND"))
friend = request["FRIEND"].ToString();
bool success = m_FriendsService.Delete(principalID, friend);
if (success)
return SuccessResult();
else
return FailureResult();
}
private byte[] DeleteFriendString(Dictionary<string, object> request)
{
string principalID = string.Empty;
if (request.ContainsKey("PRINCIPALID"))
principalID = request["PRINCIPALID"].ToString();
else
m_log.WarnFormat("[FRIENDS HANDLER]: no principalID in request to delete friend");
string friend = string.Empty;
if (request.ContainsKey("FRIEND"))
friend = request["FRIEND"].ToString();
bool success = m_FriendsService.Delete(principalID, friend);
if (success)
return SuccessResult();
else
return FailureResult();
}
private byte[] GetFriends(Dictionary<string, object> request)
{
UUID principalID = UUID.Zero;
if (request.ContainsKey("PRINCIPALID"))
UUID.TryParse(request["PRINCIPALID"].ToString(), out principalID);
else
m_log.WarnFormat("[FRIENDS HANDLER]: no principalID in request to get friends");
FriendInfo[] finfos = m_FriendsService.GetFriends(principalID);
return PackageFriends(finfos);
}
private byte[] GetFriendsString(Dictionary<string, object> request)
{
string principalID = string.Empty;
if (request.ContainsKey("PRINCIPALID"))
principalID = request["PRINCIPALID"].ToString();
else
m_log.WarnFormat("[FRIENDS HANDLER]: no principalID in request to get friends");
FriendInfo[] finfos = m_FriendsService.GetFriends(principalID);
return PackageFriends(finfos);
}
private byte[] PackageFriends(FriendInfo[] finfos)
{
Dictionary<string, object> result = new Dictionary<string, object>();
if ((finfos == null) || ((finfos != null) && (finfos.Length == 0)))
result["result"] = "null";
else
{
int i = 0;
foreach (FriendInfo finfo in finfos)
{
Dictionary<string, object> rinfoDict = finfo.ToKeyValuePairs();
result["friend" + i] = rinfoDict;
i++;
}
}
string xmlString = ServerUtils.BuildXmlResponse(result);
//m_log.DebugFormat("[FRIENDS HANDLER]: resp string: {0}", xmlString);
return Util.UTF8NoBomEncoding.GetBytes(xmlString);
}
private byte[] StoreFriend(Dictionary<string, object> request)
{
string principalID = string.Empty, friend = string.Empty; int flags = 0;
FromKeyValuePairs(request, out principalID, out friend, out flags);
bool success = m_FriendsService.StoreFriend(principalID, friend, flags);
if (success)
return SuccessResult();
else
return FailureResult();
}
#endregion Method-specific handlers
#region Misc
private byte[] DocToBytes(XmlDocument doc)
{
MemoryStream ms = new MemoryStream();
XmlTextWriter xw = new XmlTextWriter(ms, null);
xw.Formatting = Formatting.Indented;
doc.WriteTo(xw);
xw.Flush();
return ms.ToArray();
}
private byte[] FailureResult()
{
return FailureResult(String.Empty);
}
private byte[] FailureResult(string msg)
{
XmlDocument doc = new XmlDocument();
XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration,
"", "");
doc.AppendChild(xmlnode);
XmlElement rootElement = doc.CreateElement("", "ServerResponse",
"");
doc.AppendChild(rootElement);
XmlElement result = doc.CreateElement("", "Result", "");
result.AppendChild(doc.CreateTextNode("Failure"));
rootElement.AppendChild(result);
XmlElement message = doc.CreateElement("", "Message", "");
message.AppendChild(doc.CreateTextNode(msg));
rootElement.AppendChild(message);
return DocToBytes(doc);
}
private void FromKeyValuePairs(Dictionary<string, object> kvp, out string principalID, out string friend, out int flags)
{
principalID = string.Empty;
if (kvp.ContainsKey("PrincipalID") && kvp["PrincipalID"] != null)
principalID = kvp["PrincipalID"].ToString();
friend = string.Empty;
if (kvp.ContainsKey("Friend") && kvp["Friend"] != null)
friend = kvp["Friend"].ToString();
flags = 0;
if (kvp.ContainsKey("MyFlags") && kvp["MyFlags"] != null)
Int32.TryParse(kvp["MyFlags"].ToString(), out flags);
}
private byte[] SuccessResult()
{
XmlDocument doc = new XmlDocument();
XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration,
"", "");
doc.AppendChild(xmlnode);
XmlElement rootElement = doc.CreateElement("", "ServerResponse",
"");
doc.AppendChild(rootElement);
XmlElement result = doc.CreateElement("", "Result", "");
result.AppendChild(doc.CreateTextNode("Success"));
rootElement.AppendChild(result);
return DocToBytes(doc);
}
#endregion Misc
}
} | ft-/opensim-optimizations-wip | OpenSim/Server/Handlers/Friends/FriendsServerPostHandler.cs | C# | bsd-3-clause | 10,305 |
using System.Collections.Generic;
namespace Swartz.Mvc.ModelBinders
{
public interface IModelBinderProvider : IDependency
{
IEnumerable<ModelBinderDescriptor> GetModelBinders();
}
} | tuizhis/Swartz | Swartz.Framework/Mvc/ModelBinders/IModelBinderProvider.cs | C# | bsd-3-clause | 205 |
<?php
$config = [
'components' => [
'request' => [
// !!! insert a secret key in the following (if it is empty) - this is required by cookie validation
'cookieValidationKey' => '5cri1v5pxN2TdTRRs6mP5Zv3TvFyS4QC',
],
],
];
/*
if (!YII_ENV_TEST) {
// configuration adjustments for 'dev' environment
$config['bootstrap'][] = 'debug';
$config['modules']['debug'] = 'yii\debug\Module';
$config['bootstrap'][] = 'gii';
//$config['modules']['gii'] = 'yii\gii\Module';
$config['modules']['gii'] = [
'class' => 'yii\gii\Module',
'generators'=>[
'controller' => [
'class' => 'zc\gii\controller\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/controller/default',
]
],
'crud' => [
//'class' => 'yii\gii\generators\crud\Generator',
'class' => 'zc\gii\crud\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/crud/default',
]
],
'module' => [
'class' => 'zc\gii\module\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/module/default',
]
],
'form' => [
'class' => 'zc\gii\form\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/form/default',
]
],
'model' => [
'class' => 'zc\gii\model\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/model/default',
]
],
'extension' => [
'class' => 'zc\gii\extension\Generator',
'templates' => [
'zc-gii' => '@vendor/zc/gii/extension/default',
]
],
]
];
}*/
return $config;
| davidfang/MySKD | api/config/main-local.php | PHP | bsd-3-clause | 2,004 |
/* ========================================================================== */
/* === Source/Mongoose_EdgeCut.cpp ========================================== */
/* ========================================================================== */
/* -----------------------------------------------------------------------------
* Mongoose Graph Partitioning Library Copyright (C) 2017-2018,
* Scott P. Kolodziej, Nuri S. Yeralan, Timothy A. Davis, William W. Hager
* Mongoose is licensed under Version 3 of the GNU General Public License.
* Mongoose is also available under other licenses; contact authors for details.
* -------------------------------------------------------------------------- */
#include "Mongoose_EdgeCut.hpp"
#include "Mongoose_EdgeCutProblem.hpp"
#include "Mongoose_Coarsening.hpp"
#include "Mongoose_GuessCut.hpp"
#include "Mongoose_Internal.hpp"
#include "Mongoose_Logger.hpp"
#include "Mongoose_Random.hpp"
#include "Mongoose_Refinement.hpp"
#include "Mongoose_Waterdance.hpp"
#include <algorithm>
namespace Mongoose
{
bool optionsAreValid(const EdgeCut_Options *options);
void cleanup(EdgeCutProblem *graph);
EdgeCut::~EdgeCut()
{
SuiteSparse_free(partition);
SuiteSparse_free(this);
}
EdgeCut *edge_cut(const Graph *graph)
{
// use default options if not present
EdgeCut_Options *options = EdgeCut_Options::create();
if (!options)
return NULL;
EdgeCut *result = edge_cut(graph, options);
options->~EdgeCut_Options();
return (result);
}
EdgeCut *edge_cut(const Graph *graph, const EdgeCut_Options *options)
{
// Check inputs
if (!optionsAreValid(options))
return NULL;
setRandomSeed(options->random_seed);
if (!graph)
return NULL;
// Create an EdgeCutProblem
EdgeCutProblem *problem = EdgeCutProblem::create(graph);
if (!problem)
return NULL;
EdgeCut *result = edge_cut(problem, options);
problem->~EdgeCutProblem();
return result;
}
EdgeCut *edge_cut(EdgeCutProblem *problem, const EdgeCut_Options *options)
{
// Check inputs
if (!optionsAreValid(options))
return NULL;
setRandomSeed(options->random_seed);
if (!problem)
return NULL;
/* Finish initialization */
problem->initialize(options);
/* Keep track of what the current graph is at any stage */
EdgeCutProblem *current = problem;
/* If we need to coarsen the graph, do the coarsening. */
while (current->n >= options->coarsen_limit)
{
match(current, options);
EdgeCutProblem *next = coarsen(current, options);
/* If we ran out of memory during coarsening, unwind the stack. */
if (!next)
{
while (current != problem)
{
next = current->parent;
current->~EdgeCutProblem();
current = next;
}
return NULL;
}
current = next;
}
/*
* Generate a guess cut and do FM refinement.
* On failure, unwind the stack.
*/
if (!guessCut(current, options))
{
while (current != problem)
{
EdgeCutProblem *next = current->parent;
current->~EdgeCutProblem();
current = next;
}
return NULL;
}
/*
* Refine the guess cut back to the beginning.
*/
while (current->parent != NULL)
{
current = refine(current, options);
waterdance(current, options);
}
cleanup(current);
EdgeCut *result = (EdgeCut*)SuiteSparse_malloc(1, sizeof(EdgeCut));
if (!result)
{
return NULL;
}
result->partition = current->partition;
current->partition = NULL; // Unlink pointer
result->n = current->n;
result->cut_cost = current->cutCost;
result->cut_size = current->cutSize;
result->w0 = current->W0;
result->w1 = current->W1;
result->imbalance = current->imbalance;
return result;
}
bool optionsAreValid(const EdgeCut_Options *options)
{
if (!options)
{
LogError("Fatal Error: options struct cannot be NULL.");
return (false);
}
if (options->coarsen_limit < 1)
{
LogError("Fatal Error: options->coarsen_limit cannot be less than one.");
return (false);
}
if (options->high_degree_threshold < 0)
{
LogError("Fatal Error: options->high_degree_threshold cannot be less "
"than zero.");
return (false);
}
if (options->num_dances < 0)
{
LogError("Fatal Error: options->num_dances cannot be less than zero.");
return (false);
}
if (options->FM_search_depth < 0)
{
LogError(
"Fatal Error: options->fmSearchDepth cannot be less than zero.");
return (false);
}
if (options->FM_consider_count < 0)
{
LogError(
"Fatal Error: options->FM_consider_count cannot be less than zero.");
return (false);
}
if (options->FM_max_num_refinements < 0)
{
LogError("Fatal Error: options->FM_max_num_refinements cannot be less "
"than zero.");
return (false);
}
if (options->gradproj_tolerance < 0)
{
LogError("Fatal Error: options->gradproj_tolerance cannot be less than "
"zero.");
return (false);
}
if (options->gradproj_iteration_limit < 0)
{
LogError("Fatal Error: options->gradProjIterationLimit cannot be less "
"than zero.");
return (false);
}
if (options->target_split < 0 || options->target_split > 1)
{
LogError(
"Fatal Error: options->target_split must be in the range [0, 1].");
return (false);
}
if (options->soft_split_tolerance < 0)
{
LogError("Fatal Error: options->soft_split_tolerance cannot be less than "
"zero.");
return (false);
}
return (true);
}
void cleanup(EdgeCutProblem *G)
{
Int cutSize = 0;
for (Int p = 0; p < 2; p++)
{
Int *bhHeap = G->bhHeap[p];
for (Int i = 0; i < G->bhSize[p]; i++)
{
cutSize += G->externalDegree[bhHeap[i]];
}
}
G->imbalance = fabs(G->imbalance);
G->cutSize = cutSize / 2;
G->cutCost = G->cutCost / 2;
}
} // end namespace Mongoose
| jlblancoc/suitesparse-metis-for-windows | SuiteSparse/Mongoose/Source/Mongoose_EdgeCut.cpp | C++ | bsd-3-clause | 6,402 |
# -*- coding: utf-8 -*-
import json
import os
from datetime import datetime, timedelta
from urllib.parse import urlencode
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.test import RequestFactory
from django.urls import reverse
from django.utils.encoding import force_str
from django.utils.translation import trim_whitespace
from unittest import mock
import pytest
import responses
from pyquery import PyQuery as pq
from waffle.testutils import override_switch
from olympia import amo, core
from olympia.accounts.views import API_TOKEN_COOKIE
from olympia.activity.models import ActivityLog
from olympia.addons.models import Addon, AddonCategory, AddonUser
from olympia.amo.storage_utils import copy_stored_file
from olympia.amo.templatetags.jinja_helpers import (
format_date,
url as url_reverse,
urlparams,
)
from olympia.amo.tests import TestCase, addon_factory, user_factory, version_factory
from olympia.amo.tests.test_helpers import get_image_path
from olympia.api.models import SYMMETRIC_JWT_TYPE, APIKey, APIKeyConfirmation
from olympia.applications.models import AppVersion
from olympia.constants.promoted import RECOMMENDED
from olympia.devhub.decorators import dev_required
from olympia.devhub.models import BlogPost
from olympia.devhub.views import get_next_version_number
from olympia.files.models import FileUpload
from olympia.files.tests.test_models import UploadTest as BaseUploadTest
from olympia.ratings.models import Rating
from olympia.translations.models import Translation, delete_translation
from olympia.users.models import IPNetworkUserRestriction, UserProfile
from olympia.users.tests.test_views import UserViewBase
from olympia.versions.models import ApplicationsVersions, Version, VersionPreview
from olympia.zadmin.models import set_config
class HubTest(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(HubTest, self).setUp()
self.url = reverse('devhub.index')
assert self.client.login(email='regular@mozilla.com')
assert self.client.get(self.url).status_code == 200
self.user_profile = UserProfile.objects.get(id=999)
not_their_addon = addon_factory(users=[user_factory()])
AddonUser.unfiltered.create(
addon=not_their_addon, user=self.user_profile, role=amo.AUTHOR_ROLE_DELETED
)
def clone_addon(self, num, addon_id=3615):
addons = []
source = Addon.objects.get(id=addon_id)
for i in range(num):
data = {
'type': source.type,
'status': source.status,
'name': 'cloned-addon-%s-%s' % (addon_id, i),
'users': [self.user_profile],
}
addons.append(addon_factory(**data))
return addons
class TestDashboard(HubTest):
def setUp(self):
super(TestDashboard, self).setUp()
self.url = reverse('devhub.addons')
self.themes_url = reverse('devhub.themes')
assert self.client.get(self.url).status_code == 200
self.addon = Addon.objects.get(pk=3615)
self.addon.addonuser_set.create(user=self.user_profile)
def test_addons_layout(self):
doc = pq(self.client.get(self.url).content)
assert doc('title').text() == (
'Manage My Submissions :: Developer Hub :: Add-ons for Firefox'
)
assert doc('.links-footer').length == 1
assert doc('#copyright').length == 1
assert doc('#footer-links .mobile-link').length == 0
def get_action_links(self, addon_id):
response = self.client.get(self.url)
doc = pq(response.content)
selector = '.item[data-addonid="%s"] .item-actions li > a' % addon_id
links = [a.text.strip() for a in doc(selector)]
return links
def test_no_addons(self):
"""Check that no add-ons are displayed for this user."""
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.item item').length == 0
def test_addon_pagination(self):
"""Check that the correct info. is displayed for each add-on:
namely, that add-ons are paginated at 10 items per page, and that
when there is more than one page, the 'Sort by' header and pagination
footer appear.
"""
# Create 10 add-ons. We going to make the existing one from the setUp
# and a static theme which shouldn't show up as an addon in this list.
addons = self.clone_addon(10)
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
doc = pq(response.content)
assert len(doc('.item .item-info')) == 10
assert len(doc('.item .info.extension')) == 10
assert doc('nav.paginator').length == 0
for addon in addons:
assert addon.get_icon_url(64) in doc('.item .info h3 a').html()
# Create 5 add-ons -have to change self.addon back to clone extensions.
self.addon.update(type=amo.ADDON_EXTENSION)
self.clone_addon(5)
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url, {'page': 2})
doc = pq(response.content)
assert len(doc('.item .item-info')) == 5
assert doc('nav.paginator').length == 1
def test_themes(self):
"""Check themes show on dashboard."""
# Create 2 themes.
staticthemes = []
for x in range(2):
addon = addon_factory(type=amo.ADDON_STATICTHEME, users=[self.user_profile])
VersionPreview.objects.create(version=addon.current_version)
staticthemes.append(addon)
response = self.client.get(self.themes_url)
doc = pq(response.content)
assert len(doc('.item .item-info')) == 2
assert len(doc('.item .info.statictheme')) == 2
for addon in staticthemes:
assert addon.current_previews[0].thumbnail_url in [
img.attrib['src'] for img in doc('.info.statictheme h3 img')
]
def test_show_hide_statistics_and_new_version_for_disabled(self):
# Not disabled: show statistics and new version links.
self.addon.update(disabled_by_user=False)
links = self.get_action_links(self.addon.pk)
assert 'Statistics' in links, 'Unexpected: %r' % links
assert 'New Version' in links, 'Unexpected: %r' % links
# Disabled (user): hide new version link.
self.addon.update(disabled_by_user=True)
links = self.get_action_links(self.addon.pk)
assert 'New Version' not in links, 'Unexpected: %r' % links
# Disabled (admin): hide statistics and new version links.
self.addon.update(disabled_by_user=False, status=amo.STATUS_DISABLED)
links = self.get_action_links(self.addon.pk)
assert 'Statistics' not in links, 'Unexpected: %r' % links
assert 'New Version' not in links, 'Unexpected: %r' % links
def test_public_addon(self):
assert self.addon.status == amo.STATUS_APPROVED
doc = pq(self.client.get(self.url).content)
item = doc('.item[data-addonid="%s"]' % self.addon.id)
assert item.find('h3 a').attr('href') == self.addon.get_dev_url()
assert item.find('p.downloads'), 'Expected weekly downloads'
assert item.find('p.users'), 'Expected ADU'
assert item.find('.item-details'), 'Expected item details'
assert not item.find(
'p.incomplete'
), 'Unexpected message about incomplete add-on'
appver = self.addon.current_version.apps.all()[0]
appver.delete()
def test_dev_news(self):
for i in range(7):
bp = BlogPost(
title='hi %s' % i, date_posted=datetime.now() - timedelta(days=i)
)
bp.save()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.blog-posts').length == 1
assert doc('.blog-posts li').length == 5
assert doc('.blog-posts li a').eq(0).text() == 'hi 0'
assert doc('.blog-posts li a').eq(4).text() == 'hi 4'
def test_sort_created_filter(self):
response = self.client.get(self.url + '?sort=created')
doc = pq(response.content)
assert doc('.item-details').length == 1
elm = doc('.item-details .date-created')
assert elm.length == 1
assert elm.remove('strong').text() == (format_date(self.addon.created))
def test_sort_updated_filter(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.item-details').length == 1
elm = doc('.item-details .date-updated')
assert elm.length == 1
assert elm.remove('strong').text() == (
trim_whitespace(format_date(self.addon.last_updated))
)
def test_purely_unlisted_addon_are_not_shown_as_incomplete(self):
self.make_addon_unlisted(self.addon)
assert self.addon.has_complete_metadata()
response = self.client.get(self.url)
doc = pq(response.content)
# It should not be considered incomplete despite having STATUS_NULL,
# since it's purely unlisted.
assert not doc('.incomplete')
# Rest of the details should be shown, but not the AMO-specific stuff.
assert not doc('.item-info')
assert doc('.item-details')
def test_mixed_versions_addon_with_incomplete_metadata(self):
self.make_addon_unlisted(self.addon)
version = version_factory(addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED)
version.update(license=None)
self.addon.reload()
assert not self.addon.has_complete_metadata()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.incomplete').text() == (
'This add-on is missing some required information before it can be'
' submitted for publication.'
)
assert doc('form.resume').attr('action') == (
url_reverse('devhub.request-review', self.addon.slug)
)
assert doc('button.link').text() == 'Resume'
def test_no_versions_addon(self):
self.addon.current_version.delete()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.incomplete').text() == ("This add-on doesn't have any versions.")
class TestUpdateCompatibility(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super().setUp()
assert self.client.login(email='del@icio.us')
self.url = reverse('devhub.addons')
# These aren't realistic but work with existing tests and the 3615 addon
self.create_appversion('android', '3.7a1pre')
self.create_appversion('android', '4.0')
def create_appversion(self, name, version):
return AppVersion.objects.get_or_create(
application=amo.APPS[name].id, version=version
)
def test_no_compat(self):
addon = Addon.objects.get(pk=3615)
addon.update(type=amo.ADDON_DICT)
self.client.logout()
assert self.client.login(email='admin@mozilla.com')
response = self.client.get(self.url)
doc = pq(response.content)
assert not doc('.item[data-addonid="3615"] li.compat')
response = self.client.get(
reverse(
'devhub.ajax.compat.update', args=[addon.slug, addon.current_version.id]
)
)
assert response.status_code == 404
response = self.client.get(
reverse('devhub.ajax.compat.status', args=[addon.slug])
)
assert response.status_code == 404
def test_compat(self):
addon = Addon.objects.get(pk=3615)
response = self.client.get(self.url)
doc = pq(response.content)
cu = doc('.item[data-addonid="3615"] .tooltip.compat-update')
assert not cu
addon.current_version.files.update(strict_compatibility=True)
response = self.client.get(self.url)
doc = pq(response.content)
cu = doc('.item[data-addonid="3615"] .tooltip.compat-update')
assert cu
update_url = reverse(
'devhub.ajax.compat.update', args=[addon.slug, addon.current_version.id]
)
assert cu.attr('data-updateurl') == update_url
status_url = reverse('devhub.ajax.compat.status', args=[addon.slug])
selector = '.item[data-addonid="3615"] li.compat'
assert doc(selector).attr('data-src') == status_url
assert doc('.item[data-addonid="3615"] .compat-update-modal')
def test_incompat_firefox(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.files.update(strict_compatibility=True)
versions = ApplicationsVersions.objects.all()[0]
versions.max = AppVersion.objects.get(version='2.0')
versions.save()
doc = pq(self.client.get(self.url).content)
assert doc('.item[data-addonid="3615"] .tooltip.compat-error')
def test_incompat_android(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.files.update(strict_compatibility=True)
appver = AppVersion.objects.get(version='2.0')
appver.update(application=amo.ANDROID.id)
av = ApplicationsVersions.objects.all()[0]
av.application = amo.ANDROID.id
av.max = appver
av.save()
doc = pq(self.client.get(self.url).content)
assert doc('.item[data-addonid="3615"] .tooltip.compat-error')
class TestDevRequired(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestDevRequired, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.edit_page_url = self.addon.get_dev_url('edit')
self.get_url = self.addon.get_dev_url('versions')
self.post_url = self.addon.get_dev_url('delete')
assert self.client.login(email='del@icio.us')
self.au = self.addon.addonuser_set.get(user__email='del@icio.us')
assert self.au.role == amo.AUTHOR_ROLE_OWNER
def test_anon(self):
self.client.logout()
self.assertLoginRedirects(self.client.get(self.get_url), self.get_url)
self.assertLoginRedirects(
self.client.get(self.edit_page_url), self.edit_page_url
)
def test_dev_get(self):
assert self.client.get(self.get_url).status_code == 200
assert self.client.get(self.edit_page_url).status_code == 200
def test_dev_post(self):
self.assert3xx(self.client.post(self.post_url), self.get_url)
def test_disabled_post_dev(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert self.client.post(self.get_url).status_code == 403
def test_disabled_post_admin(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert self.client.login(email='admin@mozilla.com')
self.assert3xx(self.client.post(self.post_url), self.get_url)
class TestVersionStats(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersionStats, self).setUp()
assert self.client.login(email='admin@mozilla.com')
def test_counts(self):
addon = Addon.objects.get(id=3615)
version = addon.current_version
user = UserProfile.objects.get(email='admin@mozilla.com')
for _ in range(10):
Rating.objects.create(addon=addon, user=user, version=addon.current_version)
url = reverse('devhub.versions.stats', args=[addon.slug])
data = json.loads(force_str(self.client.get(url).content))
exp = {
str(version.id): {
'reviews': 10,
'files': 1,
'version': version.version,
'id': version.id,
}
}
self.assertDictEqual(data, exp)
class TestDelete(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestDelete, self).setUp()
self.get_addon = lambda: Addon.objects.filter(id=3615)
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.get_url = lambda: self.get_addon()[0].get_dev_url('delete')
def test_post_not(self):
response = self.client.post(self.get_url(), follow=True)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Add-on was not deleted.'
)
assert self.get_addon().exists()
self.assert3xx(response, self.get_addon()[0].get_dev_url('versions'))
def test_post(self):
self.get_addon().get().update(slug='addon-slug')
response = self.client.post(self.get_url(), {'slug': 'addon-slug'}, follow=True)
assert pq(response.content)('.notification-box').text() == ('Add-on deleted.')
assert not self.get_addon().exists()
self.assert3xx(response, reverse('devhub.addons'))
def test_post_wrong_slug(self):
self.get_addon().get().update(slug='addon-slug')
response = self.client.post(self.get_url(), {'slug': 'theme-slug'}, follow=True)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Add-on was not deleted.'
)
assert self.get_addon().exists()
self.assert3xx(response, self.get_addon()[0].get_dev_url('versions'))
def test_post_statictheme(self):
theme = addon_factory(
name='xpi name',
type=amo.ADDON_STATICTHEME,
slug='stheme-slug',
users=[self.user],
)
response = self.client.post(
theme.get_dev_url('delete'), {'slug': 'stheme-slug'}, follow=True
)
assert pq(response.content)('.notification-box').text() == ('Theme deleted.')
assert not Addon.objects.filter(id=theme.id).exists()
self.assert3xx(response, reverse('devhub.themes'))
def test_post_statictheme_wrong_slug(self):
theme = addon_factory(
name='xpi name',
type=amo.ADDON_STATICTHEME,
slug='stheme-slug',
users=[self.user],
)
response = self.client.post(
theme.get_dev_url('delete'), {'slug': 'foo-slug'}, follow=True
)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Theme was not deleted.'
)
assert Addon.objects.filter(id=theme.id).exists()
self.assert3xx(response, theme.get_dev_url('versions'))
class TestHome(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestHome, self).setUp()
assert self.client.login(email='del@icio.us')
self.url = reverse('devhub.index')
self.addon = Addon.objects.get(pk=3615)
def get_pq(self):
response = self.client.get(self.url)
assert response.status_code == 200
return pq(response.content)
def test_basic_logged_out(self):
self.client.logout()
response = self.client.get(self.url)
assert response.status_code == 200
self.assertTemplateUsed(response, 'devhub/index.html')
assert b'Customize Firefox' in response.content
def test_default_lang_selected(self):
self.client.logout()
doc = self.get_pq()
selected_value = doc('#language option:selected').attr('value')
assert selected_value == 'en-us'
def test_basic_logged_in(self):
response = self.client.get(self.url)
assert response.status_code == 200
self.assertTemplateUsed(response, 'devhub/index.html')
assert b'My Add-ons' in response.content
def test_my_addons_addon_versions_link(self):
assert self.client.login(email='del@icio.us')
doc = self.get_pq()
addon_list = doc('.DevHub-MyAddons-list')
href = addon_list.find('.DevHub-MyAddons-item-versions a').attr('href')
assert href == self.addon.get_dev_url('versions')
def test_my_addons(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW, 'Awaiting Review'),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW, 'Approved'),
(amo.STATUS_DISABLED, amo.STATUS_APPROVED, 'Disabled by Mozilla'),
]
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_file = latest_version.files.all()[0]
for addon_status, file_status, status_str in statuses:
latest_file.update(status=file_status)
self.addon.update(status=addon_status)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
if self.addon.type != amo.ADDON_STATICTHEME:
assert self.addon.get_icon_url(64) in addon_item.html()
else:
assert self.addon.current_previews[0].thumbnail_url in (
addon_item.html()
)
assert (
status_str == addon_item.find('.DevHub-MyAddons-VersionStatus').text()
)
Addon.objects.all().delete()
assert self.get_pq()('.DevHub-MyAddons-list .DevHub-MyAddons-item').length == 0
def test_my_addons_recommended(self):
self.make_addon_promoted(self.addon, RECOMMENDED, approve_version=True)
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_file = latest_version.files.all()[0]
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW, 'Awaiting Review'),
(
amo.STATUS_APPROVED,
amo.STATUS_AWAITING_REVIEW,
'Approved and Recommended',
),
(amo.STATUS_DISABLED, amo.STATUS_APPROVED, 'Disabled by Mozilla'),
]
for addon_status, file_status, status_str in statuses:
latest_file.update(status=file_status)
self.addon.update(status=addon_status)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
if self.addon.type != amo.ADDON_STATICTHEME:
assert self.addon.get_icon_url(64) in addon_item.html()
else:
assert self.addon.current_previews[0].thumbnail_url in (
addon_item.html()
)
assert (
status_str == addon_item.find('.DevHub-MyAddons-VersionStatus').text()
)
Addon.objects.all().delete()
assert self.get_pq()('.DevHub-MyAddons-list .DevHub-MyAddons-item').length == 0
def test_my_addons_with_static_theme(self):
self.addon.update(type=amo.ADDON_STATICTHEME)
VersionPreview.objects.create(version=self.addon.current_version)
self.test_my_addons()
def test_my_addons_incomplete(self):
self.addon.update(status=amo.STATUS_NULL)
# Make add-on incomplete
AddonCategory.objects.filter(addon=self.addon).delete()
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
def test_my_addons_no_disabled_or_deleted(self):
self.addon.update(status=amo.STATUS_APPROVED, disabled_by_user=True)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-VersionStatus').text() == 'Invisible'
class TestActivityFeed(TestCase):
fixtures = ('base/users', 'base/addon_3615')
def setUp(self):
super(TestActivityFeed, self).setUp()
assert self.client.login(email='del@icio.us')
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.versions.first()
self.action_user = UserProfile.objects.get(email='reviewer@mozilla.com')
ActivityLog.objects.all().delete()
def test_feed_for_all(self):
response = self.client.get(reverse('devhub.feed_all'))
assert response.status_code == 200
doc = pq(response.content)
assert doc('header h2').text() == 'Recent Activity for My Add-ons'
def test_feed_for_addon(self):
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 200
doc = pq(response.content)
assert doc('header h2').text() == ('Recent Activity for %s' % self.addon.name)
def test_feed_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 200
def test_feed_disabled_anon(self):
self.client.logout()
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 302
def add_log(self, action=amo.LOG.ADD_RATING):
core.set_user(self.action_user)
ActivityLog.create(action, self.addon, self.version)
def add_hidden_log(self, action=amo.LOG.COMMENT_VERSION):
self.add_log(action=action)
def test_feed_hidden(self):
self.add_hidden_log()
self.add_hidden_log(amo.LOG.OBJECT_ADDED)
res = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(res.content)
assert len(doc('#recent-activity li.item')) == 0
def test_addons_hidden(self):
self.add_hidden_log()
self.add_hidden_log(amo.LOG.OBJECT_ADDED)
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
assert len(doc('.recent-activity li.item')) == 0
def test_unlisted_addons_dashboard(self):
"""Unlisted addons are displayed in the feed on the dashboard page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
assert len(doc('.recent-activity li.item')) == 2
def test_unlisted_addons_feed_sidebar(self):
"""Unlisted addons are displayed in the left side in the feed page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed_all'))
doc = pq(res.content)
# First li is "All My Add-ons".
assert len(doc('#refine-addon li')) == 2
def test_unlisted_addons_feed(self):
"""Unlisted addons are displayed in the feed page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed_all'))
doc = pq(res.content)
assert len(doc('#recent-activity .item')) == 2
def test_unlisted_addons_feed_filter(self):
"""Feed page can be filtered on unlisted addon."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(res.content)
assert len(doc('#recent-activity .item')) == 2
def test_reviewer_name_is_used_for_reviewer_actions(self):
self.action_user.update(display_name='HîdeMe', reviewer_name='ShöwMe')
self.add_log(action=amo.LOG.APPROVE_VERSION)
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(response.content)
assert len(doc('#recent-activity .item')) == 1
content = force_str(response.content)
assert self.action_user.reviewer_name in content
assert self.action_user.name not in content
def test_regular_name_is_used_for_non_reviewer_actions(self):
# Fields are inverted compared to the test above.
self.action_user.update(reviewer_name='HîdeMe', display_name='ShöwMe')
self.add_log(action=amo.LOG.ADD_RATING) # not a reviewer action.
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(response.content)
assert len(doc('#recent-activity .item')) == 1
content = force_str(response.content)
# Assertions are inverted compared to the test above.
assert self.action_user.reviewer_name not in content
assert self.action_user.name in content
def test_addons_dashboard_name(self):
self.add_log()
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
timestamp = doc('.recent-activity li.item span.activity-timestamp')
assert len(timestamp) == 1
assert self.action_user.name
assert self.action_user.name in timestamp.html()
assert '<a href=' not in timestamp.html()
def test_addons_dashboard_reviewer_name(self):
self.action_user.update(reviewer_name='bob')
self.add_log(action=amo.LOG.APPROVE_VERSION)
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
timestamp = doc('.recent-activity li.item span.activity-timestamp')
assert len(timestamp) == 1
assert self.action_user.name
assert self.action_user.name not in timestamp.html()
assert self.action_user.reviewer_name in timestamp.html()
assert '<a href=' not in timestamp.html()
class TestAPIAgreement(TestCase):
fixtures = ['base/addon_3615', 'base/addon_5579', 'base/users']
def setUp(self):
super(TestAPIAgreement, self).setUp()
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.user.update(last_login_ip='192.168.1.1')
def test_agreement_read(self):
self.user.update(read_dev_agreement=self.days_ago(0))
response = self.client.get(reverse('devhub.api_key_agreement'))
self.assert3xx(response, reverse('devhub.api_key'))
def test_agreement_unread_captcha_inactive(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
form = response.context['agreement_form']
assert 'recaptcha' not in form.fields
doc = pq(response.content)
assert doc('.g-recaptcha') == []
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_unread_captcha_active(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
form = response.context['agreement_form']
assert 'recaptcha' in form.fields
doc = pq(response.content)
assert doc('.g-recaptcha')
def test_agreement_submit_success(self):
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 302
assert response['Location'] == reverse('devhub.api_key')
self.user.reload()
self.assertCloseToNow(self.user.read_dev_agreement)
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_submit_captcha_active_error(self):
self.user.update(read_dev_agreement=None)
response = self.client.post(reverse('devhub.api_key_agreement'))
# Captcha is properly rendered
doc = pq(response.content)
assert doc('.g-recaptcha')
assert 'recaptcha' in response.context['agreement_form'].errors
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_submit_captcha_active_success(self):
self.user.update(read_dev_agreement=None)
verify_data = urlencode(
{
'secret': '',
'remoteip': '127.0.0.1',
'response': 'test',
}
)
responses.add(
responses.GET,
'https://www.google.com/recaptcha/api/siteverify?' + verify_data,
json={'error-codes': [], 'success': True},
)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'g-recaptcha-response': 'test',
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 302
assert response['Location'] == reverse('devhub.api_key')
self.user.reload()
self.assertCloseToNow(self.user.read_dev_agreement)
def test_agreement_read_but_too_long_ago(self):
set_config('last_dev_agreement_change_date', '2018-01-01 12:00')
before_agreement_last_changed = datetime(2018, 1, 1, 12, 0) - timedelta(days=1)
self.user.update(read_dev_agreement=before_agreement_last_changed)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
@mock.patch('olympia.addons.utils.RestrictionChecker.is_submission_allowed')
def test_cant_submit_agreement_if_restricted(self, is_submission_allowed_mock):
is_submission_allowed_mock.return_value = False
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 200
assert response.context['agreement_form'].is_valid() is False
self.user.reload()
assert self.user.read_dev_agreement is None
assert is_submission_allowed_mock.call_count == 2
# First call is from the form, and it's not checking the agreement,
# it's just to see if the user is restricted.
assert is_submission_allowed_mock.call_args_list[0] == (
(),
{'check_dev_agreement': False},
)
# Second call is from the view itself, no arguments
assert is_submission_allowed_mock.call_args_list[1] == ((), {})
def test_cant_submit_agreement_if_restricted_functional(self):
# Like test_cant_submit_agreement_if_restricted() but with no mocks,
# picking a single restriction and making sure it's working properly.
IPNetworkUserRestriction.objects.create(network='127.0.0.1/32')
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 200
assert response.context['agreement_form'].is_valid() is False
doc = pq(response.content)
assert doc('.addon-submission-process').text() == (
'Multiple add-ons violating our policies have been submitted '
'from your location. The IP address has been blocked.\n'
'More information on Developer Accounts'
)
@mock.patch('olympia.addons.utils.RestrictionChecker.is_submission_allowed')
def test_agreement_page_shown_if_restricted(self, is_submission_allowed_mock):
# Like test_agreement_read() above, but with a restricted user: they
# are shown the agreement page again instead of redirecting to the
# api keys page.
is_submission_allowed_mock.return_value = False
self.user.update(read_dev_agreement=self.days_ago(0))
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
class TestAPIKeyPage(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestAPIKeyPage, self).setUp()
self.url = reverse('devhub.api_key')
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.user.update(last_login_ip='192.168.1.1')
def test_key_redirect(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key'))
self.assert3xx(response, reverse('devhub.api_key_agreement'))
def test_redirect_if_restricted(self):
IPNetworkUserRestriction.objects.create(network='127.0.0.1/32')
response = self.client.get(reverse('devhub.api_key'))
self.assert3xx(response, reverse('devhub.api_key_agreement'))
def test_view_without_credentials_not_confirmed_yet(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
submit = doc('#generate-key')
assert submit.text() == 'Generate new credentials'
inputs = doc('.api-input input')
assert len(inputs) == 0, 'Inputs should be absent before keys exist'
assert not doc('input[name=confirmation_token]')
def test_view_with_credentials(self):
APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
submit = doc('#generate-key')
assert submit.text() == 'Revoke and regenerate credentials'
assert doc('#revoke-key').text() == 'Revoke'
key_input = doc('.key-input input').val()
assert key_input == 'some-jwt-key'
def test_view_without_credentials_confirmation_requested_no_token(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=False
)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
# Since confirmation has already been requested, there shouldn't be
# any buttons on the page if no token was passed in the URL - the user
# needs to follow the link in the email to continue.
assert not doc('input[name=confirmation_token]')
assert not doc('input[name=action]')
def test_view_without_credentials_confirmation_requested_with_token(self):
APIKeyConfirmation.objects.create(
user=self.user, token='secrettoken', confirmed_once=False
)
self.url += '?token=secrettoken'
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('input[name=confirmation_token]')) == 1
token_input = doc('input[name=confirmation_token]')[0]
assert token_input.value == 'secrettoken'
submit = doc('#generate-key')
assert submit.text() == 'Confirm and generate new credentials'
def test_view_no_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
# Should look similar to when there are no credentials and no
# confirmation has been requested yet, the post action is where it
# will differ.
self.test_view_without_credentials_not_confirmed_yet()
def test_create_new_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
patch = mock.patch('olympia.devhub.views.APIKey.new_jwt_credentials')
with patch as mock_creator:
response = self.client.post(self.url, data={'action': 'generate'})
mock_creator.assert_called_with(self.user)
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert message.subject == 'New API key created'
assert reverse('devhub.api_key') in message.body
self.assert3xx(response, self.url)
def test_create_new_credentials_confirming_with_token(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='secrettoken', confirmed_once=False
)
patch = mock.patch('olympia.devhub.views.APIKey.new_jwt_credentials')
with patch as mock_creator:
response = self.client.post(
self.url,
data={'action': 'generate', 'confirmation_token': 'secrettoken'},
)
mock_creator.assert_called_with(self.user)
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert message.subject == 'New API key created'
assert reverse('devhub.api_key') in message.body
confirmation.reload()
assert confirmation.confirmed_once
self.assert3xx(response, self.url)
def test_create_new_credentials_not_confirmed_yet(self):
assert not APIKey.objects.filter(user=self.user).exists()
assert not APIKeyConfirmation.objects.filter(user=self.user).exists()
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
# Since there was no credentials are no confirmation yet, this should
# create a confirmation, send an email with the token, but not create
# credentials yet.
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert not APIKey.objects.filter(user=self.user).exists()
assert APIKeyConfirmation.objects.filter(user=self.user).exists()
confirmation = APIKeyConfirmation.objects.filter(user=self.user).get()
assert confirmation.token
assert not confirmation.confirmed_once
token = confirmation.token
expected_url = (
f'http://testserver/en-US/developers/addon/api/key/?token={token}'
)
assert message.subject == 'Confirmation for developer API keys'
assert expected_url in message.body
def test_create_new_credentials_confirmation_exists_no_token_passed(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=False
)
response = self.client.post(self.url, data={'action': 'generate'})
assert len(mail.outbox) == 0
assert not APIKey.objects.filter(user=self.user).exists()
confirmation.reload()
assert not confirmation.confirmed_once # Unchanged
self.assert3xx(response, self.url)
def test_create_new_credentials_confirmation_exists_token_is_wrong(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='sometoken', confirmed_once=False
)
response = self.client.post(
self.url, data={'action': 'generate', 'confirmation_token': 'wrong'}
)
# Nothing should have happened, the user will just be redirect to the
# page.
assert len(mail.outbox) == 0
assert not APIKey.objects.filter(user=self.user).exists()
confirmation.reload()
assert not confirmation.confirmed_once
self.assert3xx(response, self.url)
def test_delete_and_recreate_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
new_key = APIKey.get_jwt_key(user=self.user)
assert new_key.key != old_key.key
assert new_key.secret != old_key.secret
def test_delete_and_recreate_credentials_has_not_been_confirmed_yet(self):
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
# Since there was no confirmation, this should create a one, send an
# email with the token, but not create credentials yet. (Would happen
# for an user that had api keys from before we introduced confirmation
# mechanism, but decided to regenerate).
assert len(mail.outbox) == 2 # 2 because of key revocation email.
assert 'revoked' in mail.outbox[0].body
message = mail.outbox[1]
assert message.to == [self.user.email]
assert not APIKey.objects.filter(user=self.user, is_active=True).exists()
assert APIKeyConfirmation.objects.filter(user=self.user).exists()
confirmation = APIKeyConfirmation.objects.filter(user=self.user).get()
assert confirmation.token
assert not confirmation.confirmed_once
token = confirmation.token
expected_url = (
f'http://testserver/en-US/developers/addon/api/key/?token={token}'
)
assert message.subject == 'Confirmation for developer API keys'
assert expected_url in message.body
def test_delete_credentials(self):
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'revoke'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
assert len(mail.outbox) == 1
assert 'revoked' in mail.outbox[0].body
class TestUpload(BaseUploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestUpload, self).setUp()
assert self.client.login(email='regular@mozilla.com')
self.url = reverse('devhub.upload')
self.image_path = get_image_path('animated.png')
def post(self, **kwargs):
# Has to be a binary, non xpi file.
data = open(self.image_path, 'rb')
return self.client.post(self.url, {'upload': data}, **kwargs)
def test_login_required(self):
self.client.logout()
response = self.post()
assert response.status_code == 302
def test_create_fileupload(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
assert 'animated.png' in upload.name
data = open(self.image_path, 'rb').read()
assert storage.open(upload.path).read() == data
def test_fileupload_metadata(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
self.client.login(email=user.email)
self.post(REMOTE_ADDR='4.8.15.16.23.42')
upload = FileUpload.objects.get()
assert upload.user == user
assert upload.source == amo.UPLOAD_SOURCE_DEVHUB
assert upload.ip_address == '4.8.15.16.23.42'
def test_fileupload_validation(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
assert upload.validation
validation = json.loads(upload.validation)
assert not validation['success']
# The current interface depends on this JSON structure:
assert validation['errors'] == 1
assert validation['warnings'] == 0
assert len(validation['messages'])
msg = validation['messages'][0]
assert msg['type'] == 'error'
assert msg['message'] == (
'Unsupported file type, please upload a supported file '
'(.crx, .xpi, .zip).'
)
assert not msg['description']
def test_redirect(self):
response = self.post()
upload = FileUpload.objects.get()
url = reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
self.assert3xx(response, url)
def test_not_an_uuid(self):
url = reverse('devhub.upload_detail', args=['garbage', 'json'])
response = self.client.get(url)
assert response.status_code == 404
@mock.patch('olympia.devhub.tasks.validate')
def test_upload_unlisted_addon(self, validate_mock):
"""Unlisted addons are validated as "self hosted" addons."""
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload_unlisted')
self.post()
# Make sure it was called with listed=False.
assert not validate_mock.call_args[1]['listed']
class TestUploadDetail(BaseUploadTest):
fixtures = ['base/appversion', 'base/users']
@classmethod
def setUpTestData(cls):
versions = {
'51.0a1',
amo.DEFAULT_WEBEXT_MIN_VERSION,
amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID,
amo.DEFAULT_WEBEXT_MAX_VERSION,
}
for version in versions:
cls.create_appversion('firefox', version)
cls.create_appversion('android', version)
def setUp(self):
super(TestUploadDetail, self).setUp()
assert self.client.login(email='regular@mozilla.com')
@classmethod
def create_appversion(cls, application_name, version):
return AppVersion.objects.create(
application=amo.APPS[application_name].id, version=version
)
def post(self):
# Has to be a binary, non xpi file.
data = open(get_image_path('animated.png'), 'rb')
return self.client.post(reverse('devhub.upload'), {'upload': data})
def validation_ok(self):
return {
'errors': 0,
'success': True,
'warnings': 0,
'notices': 0,
'message_tree': {},
'messages': [],
'rejected': False,
'metadata': {},
}
def upload_file(self, file, url='devhub.upload'):
addon = os.path.join(
settings.ROOT, 'src', 'olympia', 'devhub', 'tests', 'addons', file
)
with open(addon, 'rb') as f:
response = self.client.post(reverse(url), {'upload': f})
assert response.status_code == 302
def test_detail_json(self):
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data['validation']['errors'] == 1
assert data['url'] == (
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
assert data['full_report_url'] == (
reverse('devhub.upload_detail', args=[upload.uuid.hex])
)
# We must have tiers
assert len(data['validation']['messages'])
msg = data['validation']['messages'][0]
assert msg['tier'] == 1
def test_upload_detail_for_version(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def test_upload_detail_for_version_not_an_uuid(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
url = reverse('devhub.upload_detail_for_version', args=[addon.slug, 'garbage'])
response = self.client.get(url)
assert response.status_code == 404
def test_upload_detail_for_version_unlisted(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory(version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED})
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def test_upload_detail_for_version_deleted(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
addon.delete()
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 404
def test_detail_view(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex])
)
assert response.status_code == 200
doc = pq(response.content)
expected = 'Validation Results for animated.png'
assert doc('header h2').text() == expected
suite = doc('#addon-validator-suite')
expected = reverse('devhub.standalone_upload_detail', args=[upload.uuid.hex])
assert suite.attr('data-validateurl') == expected
def test_not_an_uuid_standalon_upload_detail(self):
url = reverse('devhub.standalone_upload_detail', args=['garbage'])
response = self.client.get(url)
assert response.status_code == 404
def test_no_servererror_on_missing_version(self):
"""https://github.com/mozilla/addons-server/issues/3779
addons-linter and amo-validator both add proper errors if the version
is missing but we shouldn't fail on that but properly show the
validation results.
"""
self.upload_file('valid_webextension_no_version.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
message = [
(m['message'], m.get('type') == 'error')
for m in data['validation']['messages']
]
expected = [('"/version" is a required property', True)]
assert message == expected
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_not_a_valid_xpi(self, run_addons_linter_mock):
run_addons_linter_mock.return_value = json.dumps(self.validation_ok())
self.upload_file('unopenable.xpi')
# We never even reach the linter (we can't: because we're repacking
# zip files, we should raise an error if the zip is invalid before
# calling the linter, even though the linter has a perfectly good error
# message for this kind of situation).
assert not run_addons_linter_mock.called
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
message = [
(m['message'], m.get('fatal', False))
for m in data['validation']['messages']
]
# We do raise a specific error message explaining that the archive is
# not valid instead of a generic exception.
assert message == [
('Invalid or corrupt add-on file.', True),
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_experiment_xpi_allowed(self, mock_validator):
user = UserProfile.objects.get(email='regular@mozilla.com')
self.grant_permission(user, 'Experiments:submit')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file(
'../../../files/fixtures/files/experiment_inside_webextension.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_experiment_xpi_not_allowed(self, mock_validator):
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file(
'../../../files/fixtures/files/experiment_inside_webextension.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit this type of add-on',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_allowed(self, mock_validator):
user = user_factory()
self.grant_permission(user, 'SystemAddon:Submit')
assert self.client.login(email=user.email)
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_not_allowed_not_allowed(self, mock_validator):
user_factory(email='redpanda@mozilla.com')
assert self.client.login(email='redpanda@mozilla.com')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit an add-on using an ID ending with '
'"@mozilla.com" or "@mozilla.org" or '
'"@pioneer.mozilla.org" or "@search.mozilla.org" or '
'"@shield.mozilla.com" or "@shield.mozilla.org" or '
'"@mozillaonline.com" or "@mozillafoundation.org" or '
'"@rally.mozilla.org"',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
@mock.patch('olympia.files.utils.get_signer_organizational_unit_name')
def test_mozilla_signed_allowed(self, mock_get_signature, mock_validator):
user = user_factory()
assert self.client.login(email=user.email)
self.grant_permission(user, 'SystemAddon:Submit')
mock_validator.return_value = json.dumps(self.validation_ok())
mock_get_signature.return_value = 'Mozilla Extensions'
self.upload_file(
'../../../files/fixtures/files/webextension_signed_already.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.files.utils.get_signer_organizational_unit_name')
def test_mozilla_signed_not_allowed_not_allowed(self, mock_get_signature):
user_factory(email='redpanda@mozilla.com')
assert self.client.login(email='redpanda@mozilla.com')
mock_get_signature.return_value = 'Mozilla Extensions'
self.upload_file(
'../../../files/fixtures/files/webextension_signed_already.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit a Mozilla Signed Extension',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_update_allowed(self, mock_validator):
"""Updates to system addons are allowed from anyone."""
user = user_factory(email='pinkpanda@notzilla.com')
addon = addon_factory(guid='systemaddon@mozilla.org')
AddonUser.objects.create(addon=addon, user=user)
assert self.client.login(email='pinkpanda@notzilla.com')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
def test_no_redirect_for_metadata(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory(status=amo.STATUS_NULL)
AddonCategory.objects.filter(addon=addon).delete()
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def assert_json_error(request, field, msg):
assert request.status_code == 400
assert request['Content-Type'] == 'application/json'
field = '__all__' if field is None else field
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
assert content[field] == [msg]
def assert_json_field(request, field, msg):
assert request.status_code == 200
assert request['Content-Type'] == 'application/json'
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
assert content[field] == msg
class TestQueuePosition(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestQueuePosition, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
self.addon.update(guid='guid@xpi')
assert self.client.login(email='del@icio.us')
self.edit_url = reverse(
'devhub.versions.edit', args=[self.addon.slug, self.version.id]
)
# Add a second one also awaiting review in each queue
addon_factory(
status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}
)
version_factory(
addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}
)
# And some static themes that shouldn't be counted
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME),
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME),
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
def test_not_in_queue(self):
response = self.client.get(self.addon.get_dev_url('versions'))
assert self.addon.status == amo.STATUS_APPROVED
assert pq(response.content)('.version-status-actions .dark').length == 0
def test_in_queue(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW),
]
for (addon_status, file_status) in statuses:
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.all()[0].update(status=file_status)
self.addon.update(status=addon_status)
response = self.client.get(self.addon.get_dev_url('versions'))
doc = pq(response.content)
span = doc('.queue-position')
assert span.length
assert 'Queue Position: 1 of 2' in span.text()
def test_static_themes_in_queue(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW),
]
self.addon.update(type=amo.ADDON_STATICTHEME)
for (addon_status, file_status) in statuses:
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.all()[0].update(status=file_status)
self.addon.update(status=addon_status)
response = self.client.get(self.addon.get_dev_url('versions'))
doc = pq(response.content)
span = doc('.queue-position')
assert span.length
assert 'Queue Position: 1 of 3' in span.text()
class TestVersionXSS(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersionXSS, self).setUp()
self.version = Addon.objects.get(id=3615).current_version
assert self.client.login(email='del@icio.us')
def test_unique_version_num(self):
# Can't use a "/" to close the tag, as we're doing a get_url_path on
# it, which uses addons.versions, which consumes up to the first "/"
# encountered.
self.version.update(version='<script>alert("Happy XSS-Xmas");<script>')
response = self.client.get(reverse('devhub.addons'))
assert response.status_code == 200
assert b'<script>alert' not in response.content
assert b'<script>alert' in response.content
class TestDeleteAddon(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestDeleteAddon, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.url = self.addon.get_dev_url('delete')
self.client.login(email='admin@mozilla.com')
def test_bad_password(self):
response = self.client.post(self.url, {'slug': 'nope'})
self.assert3xx(response, self.addon.get_dev_url('versions'))
assert response.context['title'] == (
'URL name was incorrect. Add-on was not deleted.'
)
assert Addon.objects.count() == 1
def test_success(self):
response = self.client.post(self.url, {'slug': 'a3615'})
self.assert3xx(response, reverse('devhub.addons'))
assert response.context['title'] == 'Add-on deleted.'
assert Addon.objects.count() == 0
class TestRequestReview(TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestRequestReview, self).setUp()
self.addon = addon_factory()
self.version = self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED
)
self.redirect_url = self.addon.get_dev_url('versions')
self.public_url = reverse('devhub.request-review', args=[self.addon.slug])
assert self.client.login(email='admin@mozilla.com')
def get_addon(self):
return Addon.objects.get(id=self.addon.id)
def get_version(self):
return Version.objects.get(pk=self.version.id)
def check_400(self, url):
response = self.client.post(url)
assert response.status_code == 400
def test_public(self):
self.addon.update(status=amo.STATUS_APPROVED)
self.check_400(self.public_url)
@mock.patch('olympia.addons.models.Addon.has_complete_metadata')
def test_renominate_for_full_review(self, mock_has_complete_metadata):
# When a version is rejected, the addon is disabled.
# The author must upload a new version and re-nominate.
# Renominating the same version resets the nomination date.
mock_has_complete_metadata.return_value = True
orig_date = datetime.now() - timedelta(days=30)
# Pretend it was nominated in the past:
self.version.update(nomination=orig_date)
self.addon.update(status=amo.STATUS_NULL)
response = self.client.post(self.public_url)
self.assert3xx(response, self.redirect_url)
assert self.get_addon().status == amo.STATUS_NOMINATED
assert self.get_version().nomination.timetuple()[0:5] != (
orig_date.timetuple()[0:5]
)
class TestRedirects(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestRedirects, self).setUp()
self.base = reverse('devhub.index')
assert self.client.login(email='admin@mozilla.com')
self.user = UserProfile.objects.get(email='admin@mozilla.com')
self.user.update(last_login_ip='192.168.1.1')
def test_edit(self):
url = self.base + 'addon/edit/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.edit', args=['a3615']), 301)
url = self.base + 'addon/edit/3615/'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.edit', args=['a3615']), 301)
def test_status(self):
url = self.base + 'addon/status/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.versions', args=['a3615']), 301)
def test_versions(self):
url = self.base + 'versions/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.versions', args=['a3615']), 301)
def test_lwt_submit_redirects_to_addon_submit(self):
url = reverse('devhub.themes.submit')
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.submit.distribution'), 302)
class TestHasCompleteMetadataRedirects(TestCase):
"""Make sure Addons that are not complete in some way are correctly
redirected to the right view (and don't end up in a redirect loop)."""
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestHasCompleteMetadataRedirects, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = RequestFactory().get('developers/addon/a3615/edit')
self.request.user = UserProfile.objects.get(email='admin@mozilla.com')
self.addon = Addon.objects.get(id=3615)
self.addon.update(status=amo.STATUS_NULL)
self.addon = Addon.objects.get(id=3615)
assert self.addon.has_complete_metadata(), self.addon.get_required_metadata()
assert not self.addon.should_redirect_to_submit_flow()
# We need to be logged in for any redirection into real views.
assert self.client.login(email='admin@mozilla.com')
def _test_redirect(self):
func = dev_required(self.f)
response = func(self.request, addon_id='a3615')
assert not self.f.called
assert response.status_code == 302
assert response['Location'] == ('/en-US/developers/addon/a3615/submit/details')
# Check the redirection doesn't redirect also.
redirection = self.client.get(response['Location'])
assert redirection.status_code == 200
def test_default(self):
func = dev_required(self.f)
func(self.request, addon_id='a3615')
# Don't redirect if there is no metadata to collect.
assert self.f.called
def test_no_summary(self):
delete_translation(self.addon, 'summary')
self._test_redirect()
def test_no_license(self):
self.addon.current_version.update(license=None)
self._test_redirect()
def test_no_license_no_summary(self):
self.addon.current_version.update(license=None)
delete_translation(self.addon, 'summary')
self._test_redirect()
class TestDocs(TestCase):
def test_doc_urls(self):
assert '/en-US/developers/docs/' == reverse('devhub.docs', args=[])
assert '/en-US/developers/docs/te' == reverse('devhub.docs', args=['te'])
assert '/en-US/developers/docs/te/st', reverse('devhub.docs', args=['te/st'])
urls = [
(reverse('devhub.docs', args=['getting-started']), 301),
(reverse('devhub.docs', args=['how-to']), 301),
(reverse('devhub.docs', args=['how-to/other-addons']), 301),
(reverse('devhub.docs', args=['fake-page']), 404),
(reverse('devhub.docs', args=['how-to/fake-page']), 404),
(reverse('devhub.docs'), 301),
]
index = reverse('devhub.index')
for url in urls:
response = self.client.get(url[0])
assert response.status_code == url[1]
if url[1] == 302: # Redirect to the index page
self.assert3xx(response, index)
class TestRemoveLocale(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestRemoveLocale, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.url = reverse('devhub.addons.remove-locale', args=['a3615'])
assert self.client.login(email='del@icio.us')
def test_bad_request(self):
response = self.client.post(self.url)
assert response.status_code == 400
def test_success(self):
self.addon.name = {'en-US': 'woo', 'el': 'yeah'}
self.addon.save()
self.addon.remove_locale('el')
qs = Translation.objects.filter(localized_string__isnull=False).values_list(
'locale', flat=True
)
response = self.client.post(self.url, {'locale': 'el'})
assert response.status_code == 200
assert sorted(qs.filter(id=self.addon.name_id)) == ['en-US']
def test_delete_default_locale(self):
response = self.client.post(self.url, {'locale': self.addon.default_locale})
assert response.status_code == 400
def test_remove_version_locale(self):
version = self.addon.versions.all()[0]
version.release_notes = {'fr': 'oui'}
version.save()
self.client.post(self.url, {'locale': 'fr'})
res = self.client.get(
reverse('devhub.versions.edit', args=[self.addon.slug, version.pk])
)
doc = pq(res.content)
# There's 2 fields, one for en-us, one for init.
assert len(doc('div.trans textarea')) == 2
class TestXssOnAddonName(amo.tests.TestXss):
def test_devhub_feed_page(self):
url = reverse('devhub.feed', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
def test_devhub_addon_edit_page(self):
url = reverse('devhub.addons.edit', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
def test_devhub_version_edit_page(self):
url = reverse(
'devhub.versions.edit',
args=[self.addon.slug, self.addon.current_version.id],
)
self.assertNameAndNoXSS(url)
def test_devhub_version_list_page(self):
url = reverse('devhub.addons.versions', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
@pytest.mark.django_db
def test_get_next_version_number():
addon = addon_factory(version_kw={'version': '1.0'})
# Easy case - 1.0 to 2.0
assert get_next_version_number(addon) == '2.0'
# version numbers without minor numbers should be okay too.
version_factory(addon=addon, version='2')
assert get_next_version_number(addon) == '3.0'
# We just iterate the major version number
addon.current_version.update(version='34.45.0a1pre')
addon.current_version.save()
assert get_next_version_number(addon) == '35.0'
# "Take" 35.0
version_factory(
addon=addon, version='35.0', file_kw={'status': amo.STATUS_DISABLED}
)
assert get_next_version_number(addon) == '36.0'
# And 36.0, even though it's deleted.
version_factory(addon=addon, version='36.0').delete()
assert addon.current_version.version == '34.45.0a1pre'
assert get_next_version_number(addon) == '37.0'
class TestThemeBackgroundImage(TestCase):
def setUp(self):
user = user_factory(email='regular@mozilla.com')
assert self.client.login(email='regular@mozilla.com')
self.addon = addon_factory(users=[user])
self.url = reverse(
'devhub.submit.version.previous_background',
args=[self.addon.slug, 'listed'],
)
def test_wrong_user(self):
user_factory(email='irregular@mozilla.com')
assert self.client.login(email='irregular@mozilla.com')
response = self.client.post(self.url, follow=True)
assert response.status_code == 403
def test_no_header_image(self):
response = self.client.post(self.url, follow=True)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data == {}
def test_header_image(self):
destination = self.addon.current_version.all_files[0].current_file_path
zip_file = os.path.join(
settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip'
)
copy_stored_file(zip_file, destination)
response = self.client.post(self.url, follow=True)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data
assert len(data.items()) == 1
assert 'weta.png' in data
assert len(data['weta.png']) == 168596 # base64-encoded size
class TestLogout(UserViewBase):
def test_success(self):
user = UserProfile.objects.get(email='jbalogh@mozilla.com')
self.client.login(email=user.email)
assert user.auth_id
response = self.client.get(reverse('devhub.index'), follow=True)
assert pq(response.content)('li a.avatar').attr('href') == (user.get_url_path())
assert pq(response.content)('li a.avatar img').attr('src') == (user.picture_url)
response = self.client.get('/en-US/developers/logout', follow=False)
self.assert3xx(response, '/en-US/firefox/', status_code=302)
response = self.client.get(reverse('devhub.index'), follow=True)
assert not pq(response.content)('li a.avatar')
user.reload()
assert not user.auth_id
def test_redirect(self):
self.client.login(email='jbalogh@mozilla.com')
self.client.get(reverse('devhub.index'), follow=True)
url = '/en-US/about'
response = self.client.get(
urlparams(reverse('devhub.logout'), to=url), follow=True
)
self.assert3xx(response, url, status_code=302)
# Test an invalid domain
url = urlparams(
reverse('devhub.logout'), to='/en-US/about', domain='http://evil.com'
)
response = self.client.get(url, follow=False)
self.assert3xx(response, '/en-US/about', status_code=302)
def test_session_cookie_deleted_on_logout(self):
self.client.login(email='jbalogh@mozilla.com')
self.client.cookies[API_TOKEN_COOKIE] = 'some.token.value'
response = self.client.get(reverse('devhub.logout'))
cookie = response.cookies[settings.SESSION_COOKIE_NAME]
cookie_date_string = 'Thu, 01 Jan 1970 00:00:00 GMT'
assert cookie.value == ''
# in django2.1+ changed to django.utils.http.http_date from cookie_date
assert cookie['expires'].replace('-', ' ') == cookie_date_string
jwt_cookie = response.cookies[API_TOKEN_COOKIE]
assert jwt_cookie.value == ''
assert jwt_cookie['expires'].replace('-', ' ') == cookie_date_string
class TestStatsLinksInManageMySubmissionsPage(TestCase):
def setUp(self):
super().setUp()
self.user = user_factory()
self.addon = addon_factory(users=[self.user])
self.url = reverse('devhub.addons')
self.client.login(email=self.user.email)
def test_link_to_stats(self):
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_addon_disabled_by_user(self):
self.addon.update(disabled_by_user=True)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_unlisted_addon(self):
self.make_addon_unlisted(self.addon)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_no_link_for_addon_disabled_by_mozilla(self):
self.addon.update(status=amo.STATUS_DISABLED)
self.make_addon_unlisted(self.addon)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) not in str(
response.content
)
def test_link_to_stats_for_langpacks(self):
self.addon.update(type=amo.ADDON_LPAPP)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_dictionaries(self):
self.addon.update(type=amo.ADDON_DICT)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
| bqbn/addons-server | src/olympia/devhub/tests/test_views.py | Python | bsd-3-clause | 83,689 |
import itertools
import functools
import operator
import warnings
from distutils.version import LooseVersion
import numpy as np
from pandas import compat
from pandas._libs import tslib, algos, lib
from pandas.core.dtypes.common import (
_get_dtype,
is_float, is_scalar,
is_integer, is_complex, is_float_dtype,
is_complex_dtype, is_integer_dtype,
is_bool_dtype, is_object_dtype,
is_numeric_dtype,
is_datetime64_dtype, is_timedelta64_dtype,
is_datetime_or_timedelta_dtype,
is_int_or_datetime_dtype, is_any_int_dtype)
from pandas.core.dtypes.cast import _int64_max, maybe_upcast_putmask
from pandas.core.dtypes.missing import isna, notna, na_value_for_dtype
from pandas.core.config import get_option
from pandas.core.common import _values_from_object
_BOTTLENECK_INSTALLED = False
_MIN_BOTTLENECK_VERSION = '1.0.0'
try:
import bottleneck as bn
ver = bn.__version__
_BOTTLENECK_INSTALLED = (LooseVersion(ver) >=
LooseVersion(_MIN_BOTTLENECK_VERSION))
if not _BOTTLENECK_INSTALLED:
warnings.warn(
"The installed version of bottleneck {ver} is not supported "
"in pandas and will be not be used\nThe minimum supported "
"version is {min_ver}\n".format(
ver=ver, min_ver=_MIN_BOTTLENECK_VERSION), UserWarning)
except ImportError: # pragma: no cover
pass
_USE_BOTTLENECK = False
def set_use_bottleneck(v=True):
# set/unset to use bottleneck
global _USE_BOTTLENECK
if _BOTTLENECK_INSTALLED:
_USE_BOTTLENECK = v
set_use_bottleneck(get_option('compute.use_bottleneck'))
class disallow(object):
def __init__(self, *dtypes):
super(disallow, self).__init__()
self.dtypes = tuple(np.dtype(dtype).type for dtype in dtypes)
def check(self, obj):
return hasattr(obj, 'dtype') and issubclass(obj.dtype.type,
self.dtypes)
def __call__(self, f):
@functools.wraps(f)
def _f(*args, **kwargs):
obj_iter = itertools.chain(args, compat.itervalues(kwargs))
if any(self.check(obj) for obj in obj_iter):
msg = 'reduction operation {name!r} not allowed for this dtype'
raise TypeError(msg.format(name=f.__name__.replace('nan', '')))
try:
with np.errstate(invalid='ignore'):
return f(*args, **kwargs)
except ValueError as e:
# we want to transform an object array
# ValueError message to the more typical TypeError
# e.g. this is normally a disallowed function on
# object arrays that contain strings
if is_object_dtype(args[0]):
raise TypeError(e)
raise
return _f
class bottleneck_switch(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def __call__(self, alt):
bn_name = alt.__name__
try:
bn_func = getattr(bn, bn_name)
except (AttributeError, NameError): # pragma: no cover
bn_func = None
@functools.wraps(alt)
def f(values, axis=None, skipna=True, **kwds):
if len(self.kwargs) > 0:
for k, v in compat.iteritems(self.kwargs):
if k not in kwds:
kwds[k] = v
try:
if values.size == 0:
# we either return np.nan or pd.NaT
if is_numeric_dtype(values):
values = values.astype('float64')
fill_value = na_value_for_dtype(values.dtype)
if values.ndim == 1:
return fill_value
else:
result_shape = (values.shape[:axis] +
values.shape[axis + 1:])
result = np.empty(result_shape, dtype=values.dtype)
result.fill(fill_value)
return result
if (_USE_BOTTLENECK and skipna and
_bn_ok_dtype(values.dtype, bn_name)):
result = bn_func(values, axis=axis, **kwds)
# prefer to treat inf/-inf as NA, but must compute the func
# twice :(
if _has_infs(result):
result = alt(values, axis=axis, skipna=skipna, **kwds)
else:
result = alt(values, axis=axis, skipna=skipna, **kwds)
except Exception:
try:
result = alt(values, axis=axis, skipna=skipna, **kwds)
except ValueError as e:
# we want to transform an object array
# ValueError message to the more typical TypeError
# e.g. this is normally a disallowed function on
# object arrays that contain strings
if is_object_dtype(values):
raise TypeError(e)
raise
return result
return f
def _bn_ok_dtype(dt, name):
# Bottleneck chokes on datetime64
if (not is_object_dtype(dt) and not is_datetime_or_timedelta_dtype(dt)):
# GH 15507
# bottleneck does not properly upcast during the sum
# so can overflow
# GH 9422
# further we also want to preserve NaN when all elements
# are NaN, unlinke bottleneck/numpy which consider this
# to be 0
if name in ['nansum', 'nanprod']:
return False
return True
return False
def _has_infs(result):
if isinstance(result, np.ndarray):
if result.dtype == 'f8':
return lib.has_infs_f8(result.ravel())
elif result.dtype == 'f4':
return lib.has_infs_f4(result.ravel())
try:
return np.isinf(result).any()
except (TypeError, NotImplementedError):
# if it doesn't support infs, then it can't have infs
return False
def _get_fill_value(dtype, fill_value=None, fill_value_typ=None):
""" return the correct fill value for the dtype of the values """
if fill_value is not None:
return fill_value
if _na_ok_dtype(dtype):
if fill_value_typ is None:
return np.nan
else:
if fill_value_typ == '+inf':
return np.inf
else:
return -np.inf
else:
if fill_value_typ is None:
return tslib.iNaT
else:
if fill_value_typ == '+inf':
# need the max int here
return _int64_max
else:
return tslib.iNaT
def _get_values(values, skipna, fill_value=None, fill_value_typ=None,
isfinite=False, copy=True):
""" utility to get the values view, mask, dtype
if necessary copy and mask using the specified fill_value
copy = True will force the copy
"""
values = _values_from_object(values)
if isfinite:
mask = _isfinite(values)
else:
mask = isna(values)
dtype = values.dtype
dtype_ok = _na_ok_dtype(dtype)
# get our fill value (in case we need to provide an alternative
# dtype for it)
fill_value = _get_fill_value(dtype, fill_value=fill_value,
fill_value_typ=fill_value_typ)
if skipna:
if copy:
values = values.copy()
if dtype_ok:
np.putmask(values, mask, fill_value)
# promote if needed
else:
values, changed = maybe_upcast_putmask(values, mask, fill_value)
elif copy:
values = values.copy()
values = _view_if_needed(values)
# return a platform independent precision dtype
dtype_max = dtype
if is_integer_dtype(dtype) or is_bool_dtype(dtype):
dtype_max = np.int64
elif is_float_dtype(dtype):
dtype_max = np.float64
return values, mask, dtype, dtype_max
def _isfinite(values):
if is_datetime_or_timedelta_dtype(values):
return isna(values)
if (is_complex_dtype(values) or is_float_dtype(values) or
is_integer_dtype(values) or is_bool_dtype(values)):
return ~np.isfinite(values)
return ~np.isfinite(values.astype('float64'))
def _na_ok_dtype(dtype):
return not is_int_or_datetime_dtype(dtype)
def _view_if_needed(values):
if is_datetime_or_timedelta_dtype(values):
return values.view(np.int64)
return values
def _wrap_results(result, dtype):
""" wrap our results if needed """
if is_datetime64_dtype(dtype):
if not isinstance(result, np.ndarray):
result = lib.Timestamp(result)
else:
result = result.view(dtype)
elif is_timedelta64_dtype(dtype):
if not isinstance(result, np.ndarray):
# raise if we have a timedelta64[ns] which is too large
if np.fabs(result) > _int64_max:
raise ValueError("overflow in timedelta operation")
result = lib.Timedelta(result, unit='ns')
else:
result = result.astype('i8').view(dtype)
return result
def nanany(values, axis=None, skipna=True):
values, mask, dtype, _ = _get_values(values, skipna, False, copy=skipna)
return values.any(axis)
def nanall(values, axis=None, skipna=True):
values, mask, dtype, _ = _get_values(values, skipna, True, copy=skipna)
return values.all(axis)
@disallow('M8')
@bottleneck_switch()
def nansum(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna, 0)
dtype_sum = dtype_max
if is_float_dtype(dtype):
dtype_sum = dtype
elif is_timedelta64_dtype(dtype):
dtype_sum = np.float64
the_sum = values.sum(axis, dtype=dtype_sum)
the_sum = _maybe_null_out(the_sum, axis, mask)
return _wrap_results(the_sum, dtype)
@disallow('M8')
@bottleneck_switch()
def nanmean(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna, 0)
dtype_sum = dtype_max
dtype_count = np.float64
if is_integer_dtype(dtype) or is_timedelta64_dtype(dtype):
dtype_sum = np.float64
elif is_float_dtype(dtype):
dtype_sum = dtype
dtype_count = dtype
count = _get_counts(mask, axis, dtype=dtype_count)
the_sum = _ensure_numeric(values.sum(axis, dtype=dtype_sum))
if axis is not None and getattr(the_sum, 'ndim', False):
the_mean = the_sum / count
ct_mask = count == 0
if ct_mask.any():
the_mean[ct_mask] = np.nan
else:
the_mean = the_sum / count if count > 0 else np.nan
return _wrap_results(the_mean, dtype)
@disallow('M8')
@bottleneck_switch()
def nanmedian(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(values, skipna)
def get_median(x):
mask = notna(x)
if not skipna and not mask.all():
return np.nan
return algos.median(_values_from_object(x[mask]))
if not is_float_dtype(values):
values = values.astype('f8')
values[mask] = np.nan
if axis is None:
values = values.ravel()
notempty = values.size
# an array from a frame
if values.ndim > 1:
# there's a non-empty array to apply over otherwise numpy raises
if notempty:
return _wrap_results(
np.apply_along_axis(get_median, axis, values), dtype)
# must return the correct shape, but median is not defined for the
# empty set so return nans of shape "everything but the passed axis"
# since "axis" is where the reduction would occur if we had a nonempty
# array
shp = np.array(values.shape)
dims = np.arange(values.ndim)
ret = np.empty(shp[dims != axis])
ret.fill(np.nan)
return _wrap_results(ret, dtype)
# otherwise return a scalar value
return _wrap_results(get_median(values) if notempty else np.nan, dtype)
def _get_counts_nanvar(mask, axis, ddof, dtype=float):
dtype = _get_dtype(dtype)
count = _get_counts(mask, axis, dtype=dtype)
d = count - dtype.type(ddof)
# always return NaN, never inf
if is_scalar(count):
if count <= ddof:
count = np.nan
d = np.nan
else:
mask2 = count <= ddof
if mask2.any():
np.putmask(d, mask2, np.nan)
np.putmask(count, mask2, np.nan)
return count, d
@disallow('M8')
@bottleneck_switch(ddof=1)
def nanstd(values, axis=None, skipna=True, ddof=1):
result = np.sqrt(nanvar(values, axis=axis, skipna=skipna, ddof=ddof))
return _wrap_results(result, values.dtype)
@disallow('M8')
@bottleneck_switch(ddof=1)
def nanvar(values, axis=None, skipna=True, ddof=1):
values = _values_from_object(values)
dtype = values.dtype
mask = isna(values)
if is_any_int_dtype(values):
values = values.astype('f8')
values[mask] = np.nan
if is_float_dtype(values):
count, d = _get_counts_nanvar(mask, axis, ddof, values.dtype)
else:
count, d = _get_counts_nanvar(mask, axis, ddof)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
# xref GH10242
# Compute variance via two-pass algorithm, which is stable against
# cancellation errors and relatively accurate for small numbers of
# observations.
#
# See https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
avg = _ensure_numeric(values.sum(axis=axis, dtype=np.float64)) / count
if axis is not None:
avg = np.expand_dims(avg, axis)
sqr = _ensure_numeric((avg - values)**2)
np.putmask(sqr, mask, 0)
result = sqr.sum(axis=axis, dtype=np.float64) / d
# Return variance as np.float64 (the datatype used in the accumulator),
# unless we were dealing with a float array, in which case use the same
# precision as the original values array.
if is_float_dtype(dtype):
result = result.astype(dtype)
return _wrap_results(result, values.dtype)
@disallow('M8', 'm8')
def nansem(values, axis=None, skipna=True, ddof=1):
var = nanvar(values, axis, skipna, ddof=ddof)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count, _ = _get_counts_nanvar(mask, axis, ddof, values.dtype)
var = nanvar(values, axis, skipna, ddof=ddof)
return np.sqrt(var) / np.sqrt(count)
def _nanminmax(meth, fill_value_typ):
@bottleneck_switch()
def reduction(values, axis=None, skipna=True):
values, mask, dtype, dtype_max = _get_values(
values, skipna, fill_value_typ=fill_value_typ, )
if ((axis is not None and values.shape[axis] == 0) or
values.size == 0):
try:
result = getattr(values, meth)(axis, dtype=dtype_max)
result.fill(np.nan)
except:
result = np.nan
else:
result = getattr(values, meth)(axis)
result = _wrap_results(result, dtype)
return _maybe_null_out(result, axis, mask)
reduction.__name__ = 'nan' + meth
return reduction
nanmin = _nanminmax('min', fill_value_typ='+inf')
nanmax = _nanminmax('max', fill_value_typ='-inf')
@disallow('O')
def nanargmax(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype, _ = _get_values(values, skipna, fill_value_typ='-inf')
result = values.argmax(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
@disallow('O')
def nanargmin(values, axis=None, skipna=True):
"""
Returns -1 in the NA case
"""
values, mask, dtype, _ = _get_values(values, skipna, fill_value_typ='+inf')
result = values.argmin(axis)
result = _maybe_arg_null_out(result, axis, mask, skipna)
return result
@disallow('M8', 'm8')
def nanskew(values, axis=None, skipna=True):
""" Compute the sample skewness.
The statistic computed here is the adjusted Fisher-Pearson standardized
moment coefficient G1. The algorithm computes this coefficient directly
from the second and third central moment.
"""
values = _values_from_object(values)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count = _get_counts(mask, axis)
else:
count = _get_counts(mask, axis, dtype=values.dtype)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
mean = values.sum(axis, dtype=np.float64) / count
if axis is not None:
mean = np.expand_dims(mean, axis)
adjusted = values - mean
if skipna:
np.putmask(adjusted, mask, 0)
adjusted2 = adjusted ** 2
adjusted3 = adjusted2 * adjusted
m2 = adjusted2.sum(axis, dtype=np.float64)
m3 = adjusted3.sum(axis, dtype=np.float64)
# floating point error
#
# #18044 in _libs/windows.pyx calc_skew follow this behavior
# to fix the fperr to treat m2 <1e-14 as zero
m2 = _zero_out_fperr(m2)
m3 = _zero_out_fperr(m3)
with np.errstate(invalid='ignore', divide='ignore'):
result = (count * (count - 1) ** 0.5 / (count - 2)) * (m3 / m2 ** 1.5)
dtype = values.dtype
if is_float_dtype(dtype):
result = result.astype(dtype)
if isinstance(result, np.ndarray):
result = np.where(m2 == 0, 0, result)
result[count < 3] = np.nan
return result
else:
result = 0 if m2 == 0 else result
if count < 3:
return np.nan
return result
@disallow('M8', 'm8')
def nankurt(values, axis=None, skipna=True):
""" Compute the sample excess kurtosis.
The statistic computed here is the adjusted Fisher-Pearson standardized
moment coefficient G2, computed directly from the second and fourth
central moment.
"""
values = _values_from_object(values)
mask = isna(values)
if not is_float_dtype(values.dtype):
values = values.astype('f8')
count = _get_counts(mask, axis)
else:
count = _get_counts(mask, axis, dtype=values.dtype)
if skipna:
values = values.copy()
np.putmask(values, mask, 0)
mean = values.sum(axis, dtype=np.float64) / count
if axis is not None:
mean = np.expand_dims(mean, axis)
adjusted = values - mean
if skipna:
np.putmask(adjusted, mask, 0)
adjusted2 = adjusted ** 2
adjusted4 = adjusted2 ** 2
m2 = adjusted2.sum(axis, dtype=np.float64)
m4 = adjusted4.sum(axis, dtype=np.float64)
with np.errstate(invalid='ignore', divide='ignore'):
adj = 3 * (count - 1) ** 2 / ((count - 2) * (count - 3))
numer = count * (count + 1) * (count - 1) * m4
denom = (count - 2) * (count - 3) * m2**2
result = numer / denom - adj
# floating point error
#
# #18044 in _libs/windows.pyx calc_kurt follow this behavior
# to fix the fperr to treat denom <1e-14 as zero
numer = _zero_out_fperr(numer)
denom = _zero_out_fperr(denom)
if not isinstance(denom, np.ndarray):
# if ``denom`` is a scalar, check these corner cases first before
# doing division
if count < 4:
return np.nan
if denom == 0:
return 0
with np.errstate(invalid='ignore', divide='ignore'):
result = numer / denom - adj
dtype = values.dtype
if is_float_dtype(dtype):
result = result.astype(dtype)
if isinstance(result, np.ndarray):
result = np.where(denom == 0, 0, result)
result[count < 4] = np.nan
return result
@disallow('M8', 'm8')
def nanprod(values, axis=None, skipna=True):
mask = isna(values)
if skipna and not is_any_int_dtype(values):
values = values.copy()
values[mask] = 1
result = values.prod(axis)
return _maybe_null_out(result, axis, mask)
def _maybe_arg_null_out(result, axis, mask, skipna):
# helper function for nanargmin/nanargmax
if axis is None or not getattr(result, 'ndim', False):
if skipna:
if mask.all():
result = -1
else:
if mask.any():
result = -1
else:
if skipna:
na_mask = mask.all(axis)
else:
na_mask = mask.any(axis)
if na_mask.any():
result[na_mask] = -1
return result
def _get_counts(mask, axis, dtype=float):
dtype = _get_dtype(dtype)
if axis is None:
return dtype.type(mask.size - mask.sum())
count = mask.shape[axis] - mask.sum(axis)
if is_scalar(count):
return dtype.type(count)
try:
return count.astype(dtype)
except AttributeError:
return np.array(count, dtype=dtype)
def _maybe_null_out(result, axis, mask):
if axis is not None and getattr(result, 'ndim', False):
null_mask = (mask.shape[axis] - mask.sum(axis)) == 0
if np.any(null_mask):
if is_numeric_dtype(result):
if np.iscomplexobj(result):
result = result.astype('c16')
else:
result = result.astype('f8')
result[null_mask] = np.nan
else:
# GH12941, use None to auto cast null
result[null_mask] = None
elif result is not tslib.NaT:
null_mask = mask.size - mask.sum()
if null_mask == 0:
result = np.nan
return result
def _zero_out_fperr(arg):
# #18044 reference this behavior to fix rolling skew/kurt issue
if isinstance(arg, np.ndarray):
with np.errstate(invalid='ignore'):
return np.where(np.abs(arg) < 1e-14, 0, arg)
else:
return arg.dtype.type(0) if np.abs(arg) < 1e-14 else arg
@disallow('M8', 'm8')
def nancorr(a, b, method='pearson', min_periods=None):
"""
a, b: ndarrays
"""
if len(a) != len(b):
raise AssertionError('Operands to nancorr must have same size')
if min_periods is None:
min_periods = 1
valid = notna(a) & notna(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
f = get_corr_func(method)
return f(a, b)
def get_corr_func(method):
if method in ['kendall', 'spearman']:
from scipy.stats import kendalltau, spearmanr
def _pearson(a, b):
return np.corrcoef(a, b)[0, 1]
def _kendall(a, b):
rs = kendalltau(a, b)
if isinstance(rs, tuple):
return rs[0]
return rs
def _spearman(a, b):
return spearmanr(a, b)[0]
_cor_methods = {
'pearson': _pearson,
'kendall': _kendall,
'spearman': _spearman
}
return _cor_methods[method]
@disallow('M8', 'm8')
def nancov(a, b, min_periods=None):
if len(a) != len(b):
raise AssertionError('Operands to nancov must have same size')
if min_periods is None:
min_periods = 1
valid = notna(a) & notna(b)
if not valid.all():
a = a[valid]
b = b[valid]
if len(a) < min_periods:
return np.nan
return np.cov(a, b)[0, 1]
def _ensure_numeric(x):
if isinstance(x, np.ndarray):
if is_integer_dtype(x) or is_bool_dtype(x):
x = x.astype(np.float64)
elif is_object_dtype(x):
try:
x = x.astype(np.complex128)
except:
x = x.astype(np.float64)
else:
if not np.any(x.imag):
x = x.real
elif not (is_float(x) or is_integer(x) or is_complex(x)):
try:
x = float(x)
except Exception:
try:
x = complex(x)
except Exception:
raise TypeError('Could not convert {value!s} to numeric'
.format(value=x))
return x
# NA-friendly array comparisons
def make_nancomp(op):
def f(x, y):
xmask = isna(x)
ymask = isna(y)
mask = xmask | ymask
with np.errstate(all='ignore'):
result = op(x, y)
if mask.any():
if is_bool_dtype(result):
result = result.astype('O')
np.putmask(result, mask, np.nan)
return result
return f
nangt = make_nancomp(operator.gt)
nange = make_nancomp(operator.ge)
nanlt = make_nancomp(operator.lt)
nanle = make_nancomp(operator.le)
naneq = make_nancomp(operator.eq)
nanne = make_nancomp(operator.ne)
| winklerand/pandas | pandas/core/nanops.py | Python | bsd-3-clause | 24,684 |
# -*- coding: utf-8 -*-
#
# malepierre documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'malepierre'
copyright = u"2015, Eliot Berriot"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'malepierredoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'malepierre.tex',
u'malepierre Documentation',
u"Eliot Berriot", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'malepierre', u'malepierre Documentation',
[u"Eliot Berriot"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'malepierre', u'malepierre Documentation',
u"Eliot Berriot", 'malepierre',
'Warhammer campaing manager', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| EliotBerriot/malepierre | docs/conf.py | Python | bsd-3-clause | 7,814 |
// Copyright 2017 The Cobalt Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "starboard/speech_recognizer.h"
#if SB_API_VERSION >= 12 || SB_HAS(SPEECH_RECOGNIZER)
SbSpeechRecognizer SbSpeechRecognizerCreate(
const SbSpeechRecognizerHandler* handler) {
return kSbSpeechRecognizerInvalid;
}
#endif // SB_API_VERSION >= 12 || SB_HAS(SPEECH_RECOGNIZER)
| youtube/cobalt | starboard/shared/stub/speech_recognizer_create.cc | C++ | bsd-3-clause | 909 |
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model common\models\VendorRoundTripRate */
$this->title = 'Update Vendor Round Trip Rate: ' . ' ' . $model->id;
$this->params['breadcrumbs'][] = ['label' => 'Vendor Round Trip Rates', 'url' => ['index']];
$this->params['breadcrumbs'][] = ['label' => $model->id, 'url' => ['view', 'id' => $model->id]];
$this->params['breadcrumbs'][] = 'Update';
?>
<div class="vendor-round-trip-rate-update">
<h1><?= Html::encode($this->title) ?></h1>
<?= $this->render('_form', [
'model' => $model,
]) ?>
</div>
| brijeshm4013/test | common/modules/vendorManagement/views/vendor-round-trip-rate/update.php | PHP | bsd-3-clause | 589 |
<?php
namespace PhpInk\Nami\CoreBundle\Model\Orm\Analytics;
use Doctrine\ORM\Mapping as ORM;
use JMS\Serializer\Annotation as JMS;
use PhpInk\Nami\CoreBundle\Model\Orm\Core;
/**
* Base analytics (ip, createdAt)
*
* @ORM\MappedSuperclass
*/
class BaseAnalytics extends Core\Entity
{
/**
* Primary Key
* @var int
* @ORM\Id
* @ORM\Column(type="integer", options={"unsigned"=true})
* @ORM\GeneratedValue(strategy="AUTO")
* @JMS\Expose
*/
protected $id;
/**
* The ip who have seen the page
*
* @var string
* @ORM\Column(name="ip", type="string", length=255)
* @JMS\Expose
*/
protected $ip;
/**
* @var \DateTime
* @ORM\Column(name="created_at", type="datetime")
* @JMS\Expose
*/
protected $createdAt;
/**
* The user agent
*
* @var string
* @ORM\Column(name="user_agent", type="string", length=255)
* @JMS\Expose
*/
protected $userAgent;
/**
* Constructor
*
* @param string $ip [optional]
* @param string $userAgent [optional]
*/
public function __construct($ip = null, $userAgent = null)
{
$this->setCreatedAt(new \DateTime());
if (is_string($ip)) {
$this->setIp($ip);
}
if (is_string($userAgent)) {
$this->setUserAgent($userAgent);
}
}
/**
* Get the value of id.
*
* @return integer
*/
public function getId()
{
return $this->id;
}
/**
* Set the value of id.
*
* @param integer
* @return BaseAnalytics
*/
public function setId($id)
{
$this->id = $id;
return $this;
}
/**
* Set Ip entity related by `ip` (many to one).
*
* @param string $ip
* @return BaseAnalytics
*/
public function setIp($ip)
{
$this->ip = $ip;
return $this;
}
/**
* Get Ip entity related by `ip` (many to one).
*
* @return string
*/
public function getIp()
{
return $this->ip;
}
/**
* Get the value of userAgent
* @return string
*/
public function getUserAgent()
{
return $this->userAgent;
}
/**
* Set the value of userAgent
* @param string $userAgent
* @return BaseAnalytics
*/
public function setUserAgent($userAgent)
{
$this->userAgent = $userAgent;
return $this;
}
/**
* Set createdAt
*
* @param \DateTime $createdAt
* @return BaseAnalytics
*/
public function setCreatedAt($createdAt)
{
$this->createdAt = $createdAt;
return $this;
}
/**
* Get createdAt
*
* @return \DateTime
*/
public function getCreatedAt()
{
return $this->createdAt;
}
}
| phpink/nami-core-bundle | Model/Orm/Analytics/BaseAnalytics.php | PHP | bsd-3-clause | 2,875 |
var functions_dup =
[
[ "_", "functions.html", null ],
[ "a", "functions_a.html", null ],
[ "b", "functions_b.html", null ],
[ "c", "functions_c.html", null ],
[ "d", "functions_d.html", null ],
[ "e", "functions_e.html", null ],
[ "f", "functions_f.html", null ],
[ "g", "functions_g.html", null ],
[ "j", "functions_j.html", null ],
[ "l", "functions_l.html", null ],
[ "m", "functions_m.html", null ],
[ "n", "functions_n.html", null ],
[ "o", "functions_o.html", null ],
[ "p", "functions_p.html", null ],
[ "r", "functions_r.html", null ],
[ "s", "functions_s.html", null ],
[ "t", "functions_t.html", null ],
[ "u", "functions_u.html", null ],
[ "v", "functions_v.html", null ]
]; | improve-project/platform | doc/database_handler/functions_dup.js | JavaScript | bsd-3-clause | 762 |
//go:build ios
// +build ios
package attachments
/*
#cgo LDFLAGS: -framework MobileCoreServices
*/
import "C"
| keybase/client | go/chat/attachments/preview_darwin_ios.go | GO | bsd-3-clause | 112 |
/**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* The examples provided by Facebook are for non-commercial testing and
* evaluation purposes only.
*
* Facebook reserves all rights not expressly granted.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
* FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* @flow
* @providesModule MultiColumnExample
*/
'use strict';
const React = require('react');
const ReactNative = require('react-native');
const {
FlatList,
StyleSheet,
Text,
View,
} = ReactNative;
const UIExplorerPage = require('./UIExplorerPage');
const infoLog = require('infoLog');
const {
FooterComponent,
HeaderComponent,
ItemComponent,
PlainInput,
SeparatorComponent,
genItemData,
getItemLayout,
pressItem,
renderSmallSwitchOption,
} = require('./ListExampleShared');
class MultiColumnExample extends React.PureComponent {
static title = '<FlatList> - MultiColumn';
static description = 'Performant, scrollable grid of data.';
state = {
data: genItemData(1000),
filterText: '',
fixedHeight: true,
logViewable: false,
numColumns: 2,
virtualized: true,
};
_onChangeFilterText = (filterText) => {
this.setState(() => ({filterText}));
};
_onChangeNumColumns = (numColumns) => {
this.setState(() => ({numColumns: Number(numColumns)}));
};
render() {
const filterRegex = new RegExp(String(this.state.filterText), 'i');
const filter = (item) => (filterRegex.test(item.text) || filterRegex.test(item.title));
const filteredData = this.state.data.filter(filter);
return (
<UIExplorerPage
title={this.props.navigator ? null : '<FlatList> - MultiColumn'}
noSpacer={true}
noScroll={true}>
<View style={styles.searchRow}>
<View style={styles.row}>
<PlainInput
onChangeText={this._onChangeFilterText}
placeholder="Search..."
value={this.state.filterText}
/>
<Text> numColumns: </Text>
<PlainInput
clearButtonMode="never"
onChangeText={this._onChangeNumColumns}
value={this.state.numColumns ? String(this.state.numColumns) : ''}
/>
</View>
<View style={styles.row}>
{renderSmallSwitchOption(this, 'virtualized')}
{renderSmallSwitchOption(this, 'fixedHeight')}
{renderSmallSwitchOption(this, 'logViewable')}
</View>
</View>
<SeparatorComponent />
<FlatList
ItemSeparatorComponent={SeparatorComponent}
ListFooterComponent={FooterComponent}
ListHeaderComponent={HeaderComponent}
getItemLayout={this.state.fixedHeight ? this._getItemLayout : undefined}
data={filteredData}
key={this.state.numColumns + (this.state.fixedHeight ? 'f' : 'v')}
numColumns={this.state.numColumns || 1}
onRefresh={() => alert('onRefresh: nothing to refresh :P')}
refreshing={false}
renderItem={this._renderItemComponent}
shouldItemUpdate={this._shouldItemUpdate}
disableVirtualization={!this.state.virtualized}
onViewableItemsChanged={this._onViewableItemsChanged}
legacyImplementation={false}
/>
</UIExplorerPage>
);
}
_getItemLayout(data: any, index: number): {length: number, offset: number, index: number} {
return getItemLayout(data, index);
}
_renderItemComponent = ({item}) => {
return (
<ItemComponent
item={item}
fixedHeight={this.state.fixedHeight}
onPress={this._pressItem}
/>
);
};
_shouldItemUpdate(prev, next) {
// Note that this does not check state.fixedHeight because we blow away the whole list by
// changing the key anyway.
return prev.item !== next.item;
}
// This is called when items change viewability by scrolling into or out of the viewable area.
_onViewableItemsChanged = (info: {
changed: Array<{
key: string, isViewable: boolean, item: {columns: Array<*>}, index: ?number, section?: any
}>},
) => {
// Impressions can be logged here
if (this.state.logViewable) {
infoLog('onViewableItemsChanged: ', info.changed.map((v) => ({...v, item: '...'})));
}
};
_pressItem = (key: number) => {
pressItem(this, key);
};
}
const styles = StyleSheet.create({
row: {
flexDirection: 'row',
alignItems: 'center',
},
searchRow: {
padding: 10,
},
});
module.exports = MultiColumnExample;
| shrutic/react-native | Examples/UIExplorer/js/MultiColumnExample.js | JavaScript | bsd-3-clause | 5,181 |
#!/usr/bin/env python
from distutils.core import setup
with open('README.rst') as f:
readme = f.read()
with open('CHANGES.rst') as f:
changes = f.read()
setup(
name='sdict',
version='0.1.0',
description='dict subclass with slicing and insertion.',
author='Jared Suttles',
url='https://github.com/jaredks/sdict',
py_modules=['sdict'],
package_data={'': ['LICENSE', 'README.rst', 'CHANGES.rst']},
long_description=readme + '\n\n' + changes,
license='BSD License'
)
| jaredks/sdict | setup.py | Python | bsd-3-clause | 511 |
import { LightningElement, api, track, wire } from "lwc";
//labels
import stgColAccountRecordType from "@salesforce/label/c.stgColAccountRecordType";
import stgColAutoEnrollmentStatus from "@salesforce/label/c.stgColAutoEnrollmentStatus";
import stgColAutoEnrollmentRole from "@salesforce/label/c.stgColAutoEnrollmentRole";
import stgOptSelect from "@salesforce/label/c.stgOptSelect";
import stgAutoEnrollmentEditModalBody from "@salesforce/label/c.stgAutoEnrollmentEditModalBody";
import stgApiNameLabel from "@salesforce/label/c.stgApiNameLabel";
import stgTellMeMoreLink from "@salesforce/label/c.stgTellMeMoreLink";
import stgAutoEnrollmentNewModalBody from "@salesforce/label/c.stgAutoEnrollmentNewModalBody";
import stgAccountRecordTypeHelp from "@salesforce/label/c.stgAccountRecordTypeHelp";
import stgAutoEnrollmentDeleteModalBody from "@salesforce/label/c.stgAutoEnrollmentDeleteModalBody";
//apex
import getAccountRecordTypeComboboxVModel from "@salesforce/apex/ProgramSettingsController.getAccountRecordTypeComboboxVModel";
import getAutoEnrollmentMappingStatusComboboxVModel from "@salesforce/apex/ProgramSettingsController.getAutoEnrollmentMappingStatusComboboxVModel";
import getAutoEnrollmentMappingRoleComboboxVModel from "@salesforce/apex/ProgramSettingsController.getAutoEnrollmentMappingRoleComboboxVModel";
export default class autoEnrollmentMappingModalBody extends LightningElement {
@api actionName;
@api oldAccountRecordType;
@api newAccountRecordType;
@api autoProgramEnrollmentStatus;
@api autoProgramEnrollmentRole;
@track accountRecordTypeComboboxVModel;
@track accountRecordTypeComboboxWireResult;
@track autoEnrollmentMappingStatusComboboxVModel;
@track autoEnrollmentMappingStatusComboboxVModelWireResult;
@track autoEnrollmentMappingRoleComboboxVModel;
@track autoEnrollmentMappingRoleComboboxVModelWireResult;
labelReference = {
accountRecordTypeCombobox: stgColAccountRecordType,
statusCombobox: stgColAutoEnrollmentStatus,
roleCombobox: stgColAutoEnrollmentRole,
comboboxPlaceholderText: stgOptSelect,
modalBodyEdit: stgAutoEnrollmentEditModalBody,
modalBodyCreate: stgAutoEnrollmentNewModalBody,
modalBodyDelete: stgAutoEnrollmentDeleteModalBody,
apiNameDisplay: stgApiNameLabel,
tellMeMoreLink: stgTellMeMoreLink,
stgAccountRecordTypeHelp: stgAccountRecordTypeHelp,
};
inputAttributeReference = {
accountRecordType: "accountRecordType",
autoProgramEnrollmentStatus: "autoProgramEnrollmentStatus",
autoProgramEnrollmentRole: "autoProgramEnrollmentRole",
};
@wire(getAccountRecordTypeComboboxVModel, {
accountRecordType: "$newAccountRecordType",
})
accountRecordTypeComboboxVModelWire(result) {
this.accountRecordTypeComboboxWireResult = result;
if (result.data) {
this.accountRecordTypeComboboxVModel = result.data;
} else if (result.error) {
//console.log("error retrieving accountRecordTypeComboboxVModel");
}
}
@wire(getAutoEnrollmentMappingStatusComboboxVModel, {
autoProgramEnrollmentStatus: "$autoProgramEnrollmentStatus",
})
autoEnrollmentMappingStatusComboboxVModelWire(result) {
this.autoEnrollmentMappingStatusComboboxVModelWireResult = result;
if (result.data) {
this.autoEnrollmentMappingStatusComboboxVModel = result.data;
} else if (result.error) {
//console.log("error retrieving autoEnrollmentMappingStatusComboboxVModel");
}
}
@wire(getAutoEnrollmentMappingRoleComboboxVModel, {
autoProgramEnrollmentRole: "$autoProgramEnrollmentRole",
})
autoEnrollmentMappingRoleComboboxVModelWire(result) {
this.autoEnrollmentMappingRoleComboboxVModelWireResult = result;
if (result.data) {
this.autoEnrollmentMappingRoleComboboxVModel = result.data;
} else if (result.error) {
//console.log("error retrieving autoEnrollmentMappingStatusComboboxVModel");
}
}
handleAccountRecordTypeChange(event) {
this.dispatchAccountRecordTypeChangeEvent(event.detail.value);
}
dispatchAccountRecordTypeChangeEvent(newAccountRecordType) {
const accountRecordTypeDetails = {
newAccountRecordType: newAccountRecordType,
};
const accountRecordTypeChangeEvent = new CustomEvent("autoenrollmentmappingaccountrecordtypechange", {
detail: accountRecordTypeDetails,
bubbles: true,
composed: true,
});
this.dispatchEvent(accountRecordTypeChangeEvent);
}
handleAutoEnrollmentMappingStatusChange(event) {
this.dispatchAutoEnrollmentMappingStatusChangeEvent(event.detail.value);
}
dispatchAutoEnrollmentMappingStatusChangeEvent(autoProgramEnrollmentStatus) {
const autoEnrollmentMappingStatusDetails = {
autoProgramEnrollmentStatus: autoProgramEnrollmentStatus,
};
const autoEnrollmentMappingStatusChangeEvent = new CustomEvent("autoenrollmentmappingstatuschange", {
detail: autoEnrollmentMappingStatusDetails,
bubbles: true,
composed: true,
});
this.dispatchEvent(autoEnrollmentMappingStatusChangeEvent);
}
handleAutoEnrollmentMappingRoleChange(event) {
this.dispatchAutoEnrollmentMappingRoleChangeEvent(event.detail.value);
}
dispatchAutoEnrollmentMappingRoleChangeEvent(autoProgramEnrollmentRole) {
const autoEnrollmentMappingRoleDetails = {
autoProgramEnrollmentRole: autoProgramEnrollmentRole,
};
const autoEnrollmentMappingRoleChangeEvent = new CustomEvent("autoenrollmentmappingrolechange", {
detail: autoEnrollmentMappingRoleDetails,
bubbles: true,
composed: true,
});
this.dispatchEvent(autoEnrollmentMappingRoleChangeEvent);
}
get autoEnrollmentMappingModalDesc() {
switch (this.actionName) {
case "edit":
return this.labelReference.modalBodyEdit + " " + this.autoEnrollmentHyperLink;
case "create":
return this.labelReference.modalBodyCreate + " " + this.autoEnrollmentHyperLink;
case "delete":
return this.labelReference.modalBodyDelete
.replace("{0}", this.oldAccountRecordType)
.replace("{1}", this.autoProgramEnrollmentStatus)
.replace("{2}", this.autoProgramEnrollmentRole);
}
}
get modifyRecords() {
return this.actionName === "edit" || this.actionName === "create";
}
get deleteRecords() {
return this.actionName === "delete";
}
get autoEnrollmentHyperLink() {
return (
'<a href="https://powerofus.force.com/EDA-Configure-Affiliations-Settings">' +
this.labelReference.tellMeMoreLink +
"</a>"
);
}
get accountRecordTypeApiNameLabel() {
return this.labelReference.apiNameDisplay.replace("{0}", this.accountRecordTypeComboboxVModel.value);
}
}
| SalesforceFoundation/HEDAP | force-app/main/default/lwc/autoEnrollmentMappingModalBody/autoEnrollmentMappingModalBody.js | JavaScript | bsd-3-clause | 7,220 |
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model app\models\Salary */
$this->title = 'Create Salary';
$this->params['breadcrumbs'][] = ['label' => 'Salaries', 'url' => ['index']];
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="salary-create">
<h1><?= Html::encode($this->title) ?></h1>
<?= $this->render('_form', [
'model' => $model,
]) ?>
</div>
| kimniyom/transport | views/salary/create.php | PHP | bsd-3-clause | 413 |
<?php
namespace core\control;
use core\util\param\Validator as Validator;
/**
* Simple timer class. If the timer is started on can call Timer::get()
* to check if the timeout already passed.
* @author Marc Bredt
*/
class Timer {
/**
* Default timer timeout
*/
const timer_default_timeout = 60;
/**
* Timeout in seconds set during creation.
*/
private $timer_timeout = null;
/**
* Start timestamp in seconds set upon start.
*/
private $timer_start_ts = null;
/**
* End timestamp in seconds set upon start.
*/
private $timer_end_ts = null;
/**
* Default standalone flag.
*/
const timer_default_standalone = false;
/**
* Contains the standalone flag to make this timer block
* after start.
*/
private $standalone = false;
/**
* Indicates if the timer timed out.
*/
private $timed_out = false;
/**
* Create a time and set the timeout.
* @param $timeout timeout
*/
public function __construct($timeout = self::timer_default_timeout,
$standalone = self::timer_default_standalone) {
// set timeout
if(Validator::isa($timeout,"integer",7))
$this->timer_timeout = $timeout;
else
$this->timer_timeout = self::timer_default_timeout;
// set standalone
if(Validator::isa($standalone,"boolean"))
$this->timer_standalone = $standalone;
else
$this->timer_standalone = self::timer_default_standalone;
}
/**
* Start the timer. Sets start and end timestamps.
* If it is a standalone timer it will just wait for
* $this->timer_timeout seconds otherwise it just initializes
* the start and end timestamps.
*/
public function start() {
$this->timed_out = false;
$this->timer_start_ts = time();
$this->timer_end_ts = time() + $this->timer_timeout;
if($this->timer_standalone) {
while($this->get()>0) { sleep(1); }
}
}
/**
* Check if the timeout already passed
* @return seconds until the timeout passes.
*/
public function get() {
if(time()>=$this->timer_end_ts) { $this->timed_out = true; }
return ($this->timed_out ? 0 : $this->timer_end_ts - time());
}
/**
* Check if the timer timed out.
* Functions get() and get_timed_out() should be distinct to
* avoid modifying the timer state after acquiring a semaphore.
* @return $this->timed_out
* @see SharedMemoryHandler
*/
public function get_timed_out() {
return $this->timed_out;
}
/**
* Get a string representation for this timer.
* @return timer as string
*/
public function __toString() {
return __CLASS__." (to=".$this->timer_timeout.
", start=".$this->timer_start_ts.
", end=".$this->timer_end_ts.
", standalone=".var_export($this->standalone,true).
", time_left=".$this->get().
", timedout=".var_export($this->get_timed_out(),true).
")";
}
}
?>
| marcbredt/heili | trunk/src/core/control/timer.class.php | PHP | bsd-3-clause | 3,021 |
<?php
if(!defined('SLINEINC'))
{
exit("Request Error!");
}
function lib_getchildcomment(&$ctag,&$refObj)
{
global $dsql;
$attlist="row|8,typeid|,groupname|";
FillAttsDefault($ctag->CAttribute->Items,$attlist);
extract($ctag->CAttribute->Items, EXTR_SKIP);
$innertext = trim($ctag->GetInnertext());
$revalue = '';
$commentid=$refObj->Fields['commentid'];
$articleid=$refObj->Fields['articleid'];
if(empty($commentid))
return '';
$sql="select * from #@__comment where dockid='$commentid' and isshow=1 order by addtime asc";
$dsql->SetQuery($sql);
$dsql->Execute();
$ctp = new STTagParse();
$ctp->SetNameSpace("field","[","]");
$ctp->LoadSource($innertext);
$GLOBALS['autoindex'] = 0;
while($row = $dsql->GetArray())
{
$userinfo=$GLOBALS['User']->getInfoByMid($row['memberid']);
$row['litpic']=$userinfo['litpic'] ? $userinfo['litpic'] : '/templets/smore/images/member_default.gif';
$row['nickname']=empty($userinfo['nick'])?'匿名':$userinfo['nickname'];
$replymember=loc_getCommentMemberInfo($row['pid']);
$row['replylitpic']=getUploadFileUrl($replymember['litpic']);
$row['replynickname']=empty($replymember['nickname'])?'匿名':$replymember['nickname'];
$row['replymemberid']=$replymember['id'];
$row['articleid']=$articleid;
foreach($ctp->CTags as $tagid=>$ctag)
{
if($ctag->GetName()=='array')
{
$ctp->Assign($tagid, $row);
}
else
{
if( !empty($row[$ctag->GetName()])) $ctp->Assign($tagid,$row[$ctag->GetName()]);
}
}
$revalue .= $ctp->GetResult();
$GLOBALS['autoindex']++;
}
return $revalue;
}
function loc_getCommentMemberInfo($commentid)
{
global $dsql;
$memberid=$dsql->GetOne("select memberid from #@__comment where id='$commentid'");
$memberinfo=$dsql->GetOne("select * from #@__member where mid='{$memberid['memberid']}'");
return $memberinfo;
}
| lz1988/stourwebcms | include/taglib/smore/getchildcomment.lib.php | PHP | bsd-3-clause | 1,998 |
require 'active_record'
require 'pure_record/pure_class'
require 'pure_record/actions'
module PureRecord
def self.Create(*args)
PureRecord::Actions::Create.new(*args)
end
def self.Update(*args)
PureRecord::Actions::Update.new(*args)
end
def self.Delete(*args)
PureRecord::Actions::Delete.new(*args)
end
def self.generate_pure_class(active_record_class)
if !active_record_class.ancestors.include?(ActiveRecord::Base)
raise ArgumentError.new("Invalid argument to 'pure'. #{active_record_class.name} is not a subclass of ActiveRecord::Base, but it very well should be.")
end
Class.new PureClass do
self.attributes = active_record_class.columns.map(&:name)
self.associations = active_record_class.reflect_on_all_associations.map(&:name)
self.active_record_class = active_record_class
attr_accessor *attributes
associations.each do |assoc_name|
define_method(assoc_name) { fetch_association(assoc_name) }
end
end
end
def self.purify(record_s)
cached_purify(record_s, {})
end
def self.impurify(record_s)
cached_impurify(record_s, {})
end
def self.validate(record_s)
Array(record_s).all? do |record|
impurify(record).valid?
end
end
private
def self.cached_purify(record_s, association_cache)
one_or_many(record_s, 'purify') do |records|
records.map do |record|
if record.kind_of?(PureRecord::PureClass)
return record.dup
end
if !record.class.respond_to?(:pure_class)
raise ArgumentError.new("#{record.class.name} does not have a pure class. Perhaps you forgot to define the 'pure_class' method for #{record.class.name}.")
end
if association_cache[record.object_id]
return association_cache[record.object_id]
end
attrs = record.attributes.slice(*record.class.pure_class.attributes)
attrs = attrs.merge(options: {already_persisted: !record.new_record?})
pure_record = record.class.pure_class.new(attrs)
association_cache[record.object_id] = pure_record
assoc_hash = record.class.pure_class.associations.each_with_object({}) do |assoc_name, hash|
assoc = record.association(assoc_name)
if assoc.loaded? && assoc.target
hash[assoc_name] = cached_purify(assoc.target, association_cache)
elsif assoc.loaded?
hash[assoc_name] = nil
end
end
pure_record.add_associations(assoc_hash)
pure_record
end
end
end
def self.cached_impurify(record_s, association_cache)
one_or_many(record_s, 'impurify') do |records|
records.map do |record|
if record.kind_of?(ActiveRecord::Base)
return impurify(purify(record))
end
if association_cache[record.object_id]
return association_cache[record.object_id]
end
instance = record.class.active_record_class.new
association_cache[record.object_id] = instance
record.class.attributes.each do |attr|
instance.send("#{attr}=", record.send(attr))
end
record.loaded_associations.each do |assoc_name, pure_associations|
assoc = instance.association(assoc_name).loaded!
impure_assoc = pure_associations ? cached_impurify(pure_associations, association_cache) : nil
instance.association(assoc_name).loaded!
instance.association(assoc_name).writer(impure_assoc)
end
instance.instance_variable_set("@new_record", !record.already_persisted?)
instance
end
end
end
ValidClasses = [Array, ActiveRecord::Base, PureRecord::PureClass]
def self.one_or_many(record_s, method_name, &block)
if !ValidClasses.any? { |klass| record_s.kind_of?(klass) }
raise ArgumentError.new("You cannot use '#{method_name}' with #{record_s.class.name}. '#{method_name}' can only be used on an instance of ActiveRecord::Base, PureRecord::PureClass, or Array.")
end
is_collection = record_s.kind_of?(Array)
records = is_collection ? record_s : [record_s]
results = block.call(records)
is_collection ? results : results.first
end
end
| MichaelBaker/pure_record | lib/pure_record.rb | Ruby | bsd-3-clause | 4,267 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Goal: Implement simple tasks executed during deployment with deploy.sh
#
# @authors
# Andrei Sura <sura.andrei@gmail.com>
# Taeber Rapczak <taeber@ufl.edu>
"""
Fabric deployment file.
@see
http://fabric-docs.readthedocs.org/en/latest/
http://docs.fabfile.org/en/latest/usage/fab.html#cmdoption--show
http://docs.fabfile.org/en/latest/api/core/operations.html
"""
import imp
import sys
import os.path
from fabric import colors
from fabric.api import cd
from fabric.api import env, local, lcd
from fabric.context_managers import hide, prefix, settings
from fabric.contrib.console import confirm
from fabric.contrib.files import exists, upload_template
from fabric.operations import require, run, sudo
from fabric.utils import abort
# from pprint import pprint
def help():
local('fab --list')
# =========================================================================
# Deployment repos
# =========================================================================
def load_environ(target, new_settings={}):
""" Load an environment properties file 'environ/fabric.py' """
# pprint(sys.path)
fab_conf_file = os.path.join(target, 'fabric.py')
if not os.path.isfile(fab_conf_file):
abort("Please create the '{}' file".format(fab_conf_file))
try:
fabric = imp.load_source('fabric', fab_conf_file)
except ImportError:
abort("Can't load '{}' environ; is PYTHONPATH exported?".format(target))
env.update(fabric.get_settings(new_settings))
env.environment = target
def production(new_settings={}):
"""Work on the production environment"""
load_environ('production', new_settings)
def staging(new_settings={}):
"""Work on the staging environment"""
load_environ('staging', new_settings)
def _remove_directories():
"""Remove the top project directory"""
print('\n\nRemoving directories...')
if exists('%(project_path)s' % env):
sudo('rm -rf %(project_path)s' % env)
else:
print('Path %(project_path)s does not exist' % env)
def _init_directories():
"""Create initial directories"""
# @TODO: create a backup if directory exists
print('\n\nCreating initial directories...')
_remove_directories()
sudo('mkdir -p %(project_path)s/logs' % env)
# sudo('do something as user', user=notme)
sudo('chown -R %(user)s:%(server_group)s %(project_path)s' % env)
# Let group members to delete files
sudo('chmod -R 770 %(project_path)s' % env)
def _fix_perms(folder):
""" Fixe permissions for a specified folder:
$ chgrp authorized-group some-folder
$ chmod -R g+w,o-rwx some-folder
"""
sudo('chgrp -R {} {}'.format(env.server_group, folder))
sudo('chmod -R g+sw,o-rwx {}'.format(folder))
def _init_virtualenv():
"""Create initial virtualenv"""
print('\n\nCreating virtualenv...')
run('virtualenv -p %(python)s --no-site-packages %(env_path)s' % env)
with prefix('source %(env_path)s/bin/activate' % env):
run('easy_install pip')
_fix_perms(env.env_path)
def _install_requirements():
"""Install dependencies defined in the requirements file"""
print('\n\nInstalling requirements...')
with prefix('source %(env_path)s/bin/activate' % env):
run('pip install -r '
' %(project_repo_path)s/app/requirements/deploy.txt'
% env)
_fix_perms(env.env_path)
def _update_requirements():
"""Update dependencies defined in the requirements file"""
print('\n\nUpdating requirements...')
with prefix('source %(env_path)s/bin/activate' % env):
run('pip install -U -r '
' %(project_repo_path)s/app/requirements/deploy.txt' % env)
_fix_perms(env.env_path)
def _is_prod():
""" Check if env.environment == 'production'"""
require('environment', provided_by=[production, staging])
return env.environment == 'production'
def bootstrap(tag='master'):
"""Bootstrap the deployment using the specified branch"""
require('environment', provided_by=[production, staging])
print(MOTD_PROD if _is_prod() else MOTD_STAG)
msg = colors.red('\n%(project_path)s exists. '
'Do you want to continue anyway?' % env)
if (not exists('%(project_path)s' % env)
or confirm(msg, default=False)):
with settings(hide('stdout', 'stderr')):
_init_directories()
_init_virtualenv()
_git_clone_tag(tag=tag)
_install_requirements()
update_config(tag=tag) # upload new config files
enable_site()
else:
sys.exit('\nAborting.')
def deploy(tag='master'):
"""Update the code, config, requirements, and enable the site
"""
require('environment', provided_by=[production, staging])
with settings(hide('stdout', 'stderr')):
disable_site()
_git_clone_tag(tag=tag)
_install_requirements()
_update_requirements()
update_config(tag=tag) # upload new config files
enable_site()
def mysql_conf():
""" Store mysql login credentials to the encrypted file
~/.mylogin.cnf
Once created you can connect to the database without typing the password.
Example:
$ mysql_config_editor set --login-path=local --user=root --password \
--host=localhost
$ mysql --login-path=local
For more details see:
https://dev.mysql.com/doc/refman/5.6/en/mysql-config-editor.html
"""
require('environment', provided_by=[production, staging])
print("Storing the database credentials to ~/.mylogin.cnf")
print(colors.yellow("⚠ Plese note that if you have a '#' in your password"
" then you have to specify the password in quotes."))
cmd = ("mysql_config_editor set "
" --login-path=fabric_%(db_host)s "
" --user=%(db_user)s "
" --password "
" --host=%(db_host)s"
% env)
local(cmd, capture=True)
def _mysql_login_path():
""" Create a string to be used for storing credentials to ~/.mylogin.cnf
@see #mysql_conf()
"""
require('environment', provided_by=[production, staging])
return "fabric_%(db_host)s" % env
def mysql_conf_test():
""" Check if a configuration was created for the host"""
require('environment', provided_by=[production, staging])
from subprocess import Popen, PIPE
login_path = _mysql_login_path()
cmd = ("mysql_config_editor print --login-path={} 2> /dev/null"
.format(login_path) % env)
proc = Popen(cmd, shell=True, stdout=PIPE)
(out, err) = proc.communicate()
# print("Checking mysql login path: {}".format(login_path))
has_config = ("" != out)
if not has_config:
print("There are no mysql credentials stored in ~/.mylogin.cnf file."
" Please store the database credentials by running: \n\t"
" fab {} mysql_conf".format(env.environment))
sys.exit('\nAborting.')
def mysql_check_db_exists():
""" Check if the specified database was already created """
require('environment', provided_by=[production, staging])
mysql_conf_test()
cmd = ("echo 'SELECT COUNT(*) FROM information_schema.SCHEMATA "
" WHERE SCHEMA_NAME = \"%(db_name)s\" ' "
" | mysql --login-path=fabric_%(db_host)s "
" | sort | head -1"
% env)
result = local(cmd, capture=True)
# print("check_db_exists: {}".format(result))
return result
def mysql_count_tables():
""" Return the number of tables in the database """
require('environment', provided_by=[production, staging])
exists = mysql_check_db_exists()
if not exists:
abort(colors.red("Unable to list database '%(db_name)s' tables."
"The database does not exist." % env))
login_path = _mysql_login_path()
cmd = ("echo 'SELECT COUNT(*) FROM INFORMATION_SCHEMA.TABLES "
" WHERE TABLE_SCHEMA = \"%(db_name)s\" ' "
" | mysql --login-path={}"
" | sort | head -1".format(login_path)
% env)
result = local(cmd, capture=True)
return int(result)
def mysql_list_tables():
""" Show the list of tables with row counts """
require('environment', provided_by=[production, staging])
exists = mysql_check_db_exists()
if not exists:
abort(colors.red("Unable to list database '%(db_name)s' tables."
"The database does not exist." % env))
login_path = _mysql_login_path()
cmd = ("echo 'SELECT table_name, table_rows FROM INFORMATION_SCHEMA.TABLES "
" WHERE TABLE_SCHEMA = \"%(db_name)s\" ' "
" | mysql --login-path={}".format(login_path)
% env)
result = local(cmd, capture=True)
print(result)
def mysql_create_tables():
""" Create the application tables.
Assumes that the database was already created and
an user was granted `create` privileges.
"""
require('environment', provided_by=[production, staging])
exists = mysql_check_db_exists()
if not exists:
abort(colors.red("Unable to create tables in database '%(db_name)s'."
"The database does not exist" % env))
total_tables = mysql_count_tables()
if total_tables > 0:
print(colors.red("The database already contains {} tables."
.format(total_tables)))
sys.exit("If you need to re-create the tables please run: "
"\n\t fab {} mysql_reset_tables"
.format(env.environment))
login_path = _mysql_login_path()
files = ['001/upgrade.sql', '002/upgrade.sql', '002/data.sql']
with lcd('../db/'):
for sql in files:
cmd = ("mysql --login-path={} %(db_name)s < {}"
.format(login_path, sql)
% env)
local(cmd)
def mysql_drop_tables():
""" Drop the application tables"""
require('environment', provided_by=[production, staging])
total_tables = mysql_count_tables()
question = ("Do you want to drop the {} tables in '%(db_name)s'?"
.format(total_tables) % env)
if not confirm(question):
abort(colors.yellow("Aborting at user request."))
exists = mysql_check_db_exists()
if not exists:
abort(colors.red("Unable to drop tables in database '%(db_name)s'."
"The database does not exist" % env))
files = ['002/downgrade.sql', '001/downgrade.sql']
with lcd('../db/'):
for sql in files:
cmd = ("mysql --login-path=fabric_%(db_host)s %(db_name)s < {}"
.format(sql)
% env)
local(cmd)
def mysql_reset_tables():
""" Drop and re-create the application tables"""
total_tables = mysql_count_tables()
if total_tables > 0:
mysql_drop_tables()
mysql_create_tables()
def _toggle_apache_site(state):
"""Switch site's status to enabled or disabled
Note: the `project_name` is used for referencing the config files
"""
action = "Enabling" if state else "Disabling"
print('\n%s site...' % action)
env.apache_command = 'a2ensite' if state else 'a2dissite'
sudo('%(apache_command)s %(project_name)s' % env)
# We have to have the ssl config too because we use the NetScaler
sudo('%(apache_command)s %(project_name)s-ssl' % env)
sudo('service apache2 reload')
def check_syntax_apache():
"""Check the syntax of apache configurations"""
require('environment', provided_by=[production, staging])
out = sudo('apache2ctl -S')
print("\n ==> Apache syntax check: \n{}".format(out))
def show_errors_apache():
"""Show info about apache"""
require('environment', provided_by=[production, staging])
out = sudo('cat %(project_path)s/logs/error.log' % env)
print("\n ==> Apache errors: \n{}".format(out))
def show_config_apache():
"""Show info about apache"""
require('environment', provided_by=[production, staging])
out = sudo('apachectl -V')
print("\n ==> Apache config: \n{}".format(out))
out = sudo('apachectl -S 2>&1')
print("\n ==> Apache virtualhosts listening on port 443: \n{}".format(out))
# sudo('apachectl -D DUMP_MODULES')
def enable_site():
"""Enable the site"""
require('environment', provided_by=[production, staging])
with settings(hide('stdout', 'stderr')):
_toggle_apache_site(True)
def disable_site():
"""Disable the site"""
require('environment', provided_by=[production, staging])
with settings(hide('stdout', 'stderr')):
_toggle_apache_site(False)
def update_config(tag='master'):
"""Update server configuration files
Warnings:
- the CWD of the fabfile is used to specify paths
- if you use the "%(var)s/ % env" syntax make *sure*
that you provide the "var" in your fabric.py file
"""
require('environment', provided_by=[production, staging])
print('\n\nUpdating server configuration...')
local_settings_file = os.path.abspath('%(environment)s/settings.conf' % env)
local("""sed -i'.bak' -e "s|^APP_VERSION.*|APP_VERSION = '{}'|" {}"""
.format(tag, local_settings_file))
with settings(hide('stdout', 'stderr')):
# Create a map of files to upload
# https://github.com/fabric/fabric/blob/master/fabric/operations.py#put
files_map = {
0: {
'local': os.path.abspath('dropper.wsgi'),
'remote': env.wsgi_file,
'mode': '644',
},
1: {
'local': os.path.abspath('%(environment)s/virtualhost.conf'
% env),
'remote': env.vhost_file,
'mode': '644',
'group': 'root'
},
2: {
'local': os.path.abspath('%(environment)s/virtualhost-ssl.conf'
% env),
'remote': env.vhost_ssl_file,
'mode': '644',
'group': 'root'
},
3: {
'local': local_settings_file,
'remote': env.settings_file,
'mode': '640'
}
}
# print files_map
# upload files but create a bakup with *.bak extension if the
# remote file already exists
for key, file_data in files_map.iteritems():
local_file = file_data['local']
remote_file = file_data['remote']
mode = file_data['mode']
if not os.path.isfile(local_file):
abort("Please create the file: {}".format(local_file))
print('\nUploading {} \n to ==> {} with mode {}'
.format(local_file, remote_file, mode))
upload_template(filename=local_file,
destination=remote_file,
context=env,
use_sudo=True,
mirror_local_mode=False,
mode=mode,
pty=None)
if 'group' in file_data:
sudo('chgrp {} {}'.format(file_data['group'], remote_file))
print("Changed group to {} for {}"
.format(file_data['group'], remote_file))
else:
sudo('chgrp {} {}'.format(env.server_group, remote_file))
def restart_wsgi_app():
"""Reload the daemon processes by touching the WSGI file"""
require('environment', provided_by=[production, staging])
with settings(hide('stdout', 'stderr')):
sudo('touch %(wsgi_file)s' % env)
def check_app():
"""cURL the target server to check if the app is up"""
require('environment', provided_by=[production, staging])
local('curl -sk https://%(project_url)s | grep "Version " ' % env)
def print_project_repo():
""" Show the git repository path specified in the fabric.py file"""
print("\n Project repo: {}".format(env.project_repo))
def print_project_name():
""" Show the project name uses as name for deploying the code"""
print("Project name: {}".format(env.project_name))
def git_tags(url=None, last_only=False):
""" Show repo tags"""
require('environment', provided_by=[production, staging])
if url is None:
url = '%(project_repo)s' % env
cmd = ('git ls-remote --tags {} '
' | cut -d / -f3 '
' | sort -t. -k 1,1n -k 2,2n -k 3,3n '.format(url))
if last_only:
cmd += ' | tail -1'
result = local(cmd, capture=True)
return result
def _git_clone_tag(tag=None):
""" Clone a `slim` version of the code
Note: if the tag was already deployed once we create a backup
"""
require('environment', provided_by=[production, staging])
url = env.project_repo
if tag is None:
print(colors.yellow(
"No tag specified. Attempt to read the last tag from: {}"
.format(url)))
tag = git_tags(url=url, last_only=True)
if not tag:
abort(colors.red('\nPlease specify a valid tag.'))
# Clone the code to src/v0.0.1`
destination = ('%(project_path_src)s/v{}'.format(tag) % env)
cmd = ('git clone -b {} --single-branch %(project_repo)s {}'
.format(tag, destination) % env)
if exists(destination):
with cd(env.project_path_src):
cmd_mv = 'mv v{} backup_`date "+%Y-%m-%d"`_v{}'.format(tag, tag)
sudo(cmd_mv, user=env.server_user)
sudo(cmd, user=env.server_user)
_fix_perms(destination)
with cd(env.project_path_src):
# Create symlink
sudo('ln -nsf {} current'.format(destination), user=env.server_user)
def git_archive_tag():
""" Create a vTAG_NUMBER.tar archive file of the code
suitable for deployment (excludes .git folder)
Note: does not work with --remote=https://github.com/...)
"""
require('environment', provided_by=[production, staging])
last_tag = git_tags(last_only=True)
archive_name = "v{}.tar".format(last_tag)
local('git archive --format=tar --remote=. {} ../app > {}'
.format(last_tag, archive_name))
print("Created archive file: {}".format(archive_name))
# -----------------------------------------------------------------------------
MOTD_PROD = """
____ __ ____ _
| _ \ _ __ ___ _ __ _ __ ___ _ __ ____\ \ | _ \ _ __ ___ __| |
| | | | '__/ _ \| '_ \| '_ \ / _ \ '__| |_____\ \ | |_) | '__/ _ \ / _` |
| |_| | | | (_) | |_) | |_) | __/ | |_____/ / | __/| | | (_) | (_| |
|____/|_| \___/| .__/| .__/ \___|_| /_/ |_| |_| \___/ \__,_|
|_| |_|
"""
MOTD_STAG = """
____ __ ____
| _ \ _ __ ___ _ __ _ __ ___ _ __ \ \ | _ \ _____ __
| | | | '__/ _ \| '_ \| '_ \ / _ \ '__| _____\ \ | | | |/ _ \ \ / /
| |_| | | | (_) | |_) | |_) | __/ | |_____/ / | |_| | __/\ V /
|____/|_| \___/| .__/| .__/ \___|_| /_/ |____/ \___| \_/
|_| |_|
"""
| indera/redi-dropper-client | app/deploy/fabfile.py | Python | bsd-3-clause | 19,299 |
<?php
return array(
// This should be an array of module namespaces used in the application.
'modules' => array(
'Contacto',
'Application',
),
// These are various options for the listeners attached to the ModuleManager
'module_listener_options' => array(
// This should be an array of paths in which modules reside.
// If a string key is provided, the listener will consider that a module
// namespace, the value of that key the specific path to that module's
// Module class.
'module_paths' => array(
'./module',
'./vendor',
),
// An array of paths from which to glob configuration files after
// modules are loaded. These effectively overide configuration
// provided by modules themselves. Paths may use GLOB_BRACE notation.
'config_glob_paths' => array(
'config/autoload/{,*.}{global,local}.php',
),
// Whether or not to enable a configuration cache.
// If enabled, the merged configuration will be cached and used in
// subsequent requests.
//'config_cache_enabled' => $booleanValue,
// The key used to create the configuration cache file name.
//'config_cache_key' => $stringKey,
// Whether or not to enable a module class map cache.
// If enabled, creates a module class map cache which will be used
// by in future requests, to reduce the autoloading process.
//'module_map_cache_enabled' => $booleanValue,
// The key used to create the class map cache file name.
//'module_map_cache_key' => $stringKey,
// The path in which to cache merged configuration.
//'cache_dir' => $stringPath,
// Whether or not to enable modules dependency checking.
// Enabled by default, prevents usage of modules that depend on other modules
// that weren't loaded.
// 'check_dependencies' => true,
),
// Used to create an own service manager. May contain one or more child arrays.
//'service_listener_options' => array(
// array(
// 'service_manager' => $stringServiceManagerName,
// 'config_key' => $stringConfigKey,
// 'interface' => $stringOptionalInterface,
// 'method' => $stringRequiredMethodName,
// ),
// )
// Initial configuration with which to seed the ServiceManager.
// Should be compatible with Zend\ServiceManager\Config.
// 'service_manager' => array(),
); | aegm/app_sofve | config/application.config.php | PHP | bsd-3-clause | 2,592 |
/*
* Copyright (c) 2005-2010, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.bundle.event;
import org.osgi.service.event.Event;
/**
* This class will try to update the EventHandler if it succeed an interrupt
* will be performed on the 'owner' class.
*
* @author Magnus Klack, Johnny Baveras
*
*/
public class TimeoutDeliver extends Thread {
/** The object to notify when done */
private Object caller;
/** The service reference of the handler to call. */
private TrackedEventHandler handler;
private Event event;
private boolean delivered = true;
private boolean timedOut = false;
private boolean closed = false;
public synchronized boolean isDelivered()
{
return delivered;
}
private synchronized void setDelivered(boolean delivered)
{
this.delivered = delivered;
}
private synchronized boolean isTimedOut()
{
return timedOut;
}
/**
* Called to indicate that the caller does not wait for the completion of
* current deliver job, i.e. the caller will not be notified if delivery is
* finished after this call.
*
* @return true if delivery job not finished yet
*/
public synchronized boolean stopDeliveryNotification()
{
timedOut = true;
return !delivered;
}
public synchronized void close()
{
closed = true;
notifyAll();
}
public synchronized boolean isActive()
{
return !delivered;
}
public synchronized void deliver(final Object caller,
final Event event,
final TrackedEventHandler handler)
{
if (isActive()) {
throw new IllegalStateException("Delivery already in progress");
}
timedOut = false;
delivered = false;
this.caller = caller;
this.event = event;
this.handler = handler;
notifyAll();
}
/**
* Inherited from Thread, starts the thread.
*/
public void run()
{
while (!closed) {
if (!isDelivered()) {
synchronized (this) {
// Synchronized to ensure that this thread sees the current values
// of the instance fields
handler.isBlacklisted();
}
try {
handler.handleEventSubjectToFilter(event);
} catch (Throwable e) {
Activator.log
.error("Handler threw exception in handleEvent: " + e, e);
} finally {
synchronized (caller) {
setDelivered(true);
}
}
/* tell the owner that notification is done */
if (!isTimedOut()) {
synchronized (caller) {
caller.notifyAll();
}
}
} else {
synchronized (this) {
try {
wait();
} catch (InterruptedException e) {
// Ignore
}
}
}
}
}
}
| knopflerfish/knopflerfish.org | osgi/bundles/event/src/org/knopflerfish/bundle/event/TimeoutDeliver.java | Java | bsd-3-clause | 4,529 |
<?php
namespace Application\Controller;
use Zend\Mvc\Controller\AbstractActionController;
use Zend\View\Model\ViewModel;
use Zend\Session\Container;
class MytestController extends AbstractActionController {
private $title = "";
private $keyword = "";
private $description = "";
private $raw = null;
private $data = null;
private $get = null;
public function __construct(){
//you can now access the router used by the MVC application
}
public function indexAction() {
$r = $this->getServiceLocator()->get('Zend\View\Renderer\PhpRenderer');
$r->headTitle($this->title);
$r->headMeta()->appendName('keywords', $this->keyword)->setIndent(8);
$r->headMeta()->appendName('description', $this->description)->setIndent(8);
$r->headMeta()->appendName('Language', 'en')->setIndent(8);
$r->headMeta()->appendName('dc.title', $this->title)->setIndent(8);
$r->headMeta()->appendName('dc.keywords', $this->keyword)->setIndent(8);
$r->headMeta()->appendName('dc.description', $this->description)->setIndent(8);
return new ViewModel();
}
public function testAction() {
$r = $this->getServiceLocator()->get('Zend\View\Renderer\PhpRenderer');
$r->headTitle($this->title);
$r->headMeta()->appendName('keywords', $this->keyword)->setIndent(8);
$r->headMeta()->appendName('description', $this->description)->setIndent(8);
$r->headMeta()->appendName('Language', 'en')->setIndent(8);
$r->headMeta()->appendName('dc.title', $this->title)->setIndent(8);
$r->headMeta()->appendName('dc.keywords', $this->keyword)->setIndent(8);
$r->headMeta()->appendName('dc.description', $this->description)->setIndent(8);
return new ViewModel();
}
}
| cybersolutions/pubdr | module/Application/src/Application/Controller/MytestController.php | PHP | bsd-3-clause | 1,773 |
<?php
/**
* @link http://www.yiiframework.com/
* @copyright Copyright (c) 2008 Yii Software LLC
* @license http://www.yiiframework.com/license/
*/
namespace backend\assets;
use yii\web\AssetBundle;
/**
* @author Qiang Xue <qiang.xue@gmail.com>
* @since 2.0
*/
class AppAsset extends AssetBundle
{
public $basePath = '@webroot';
public $baseUrl = '@web';
public $css = [
'css/site.less',
];
public $js = [
];
public $depends = [
'yii\web\YiiAsset',
'yii\bootstrap\BootstrapAsset',
];
}
| ASzczesna/LemiRestaurant | backend/assets/AppAsset.php | PHP | bsd-3-clause | 551 |
#ifndef SPREADSHEET_EXTENSION_HPP
#define SPREADSHEET_EXTENSION_HPP
#include "core_data_model/i_model_extension.hpp"
#include "core_data_model/i_item_role.hpp"
#include "core_data_model/abstract_item_model.hpp"
#include <unordered_map>
#include <ctime>
#include <vector>
namespace wgt
{
class SpreadsheetExtension : public IModelExtension
{
using ModelIndex = IModelExtension::ModelIndex;
public:
SpreadsheetExtension();
virtual ~SpreadsheetExtension();
Variant data(const ModelIndex& index, ItemRole::Id roleId) const override;
void onDataChanged(const ModelIndex& topLeft, const ModelIndex& bottomRight,
const std::vector<ItemRole::Id>& roles) override;
void commitData();
private:
std::unordered_map<ModelIndex, time_t> lastEdits_;
time_t commitTime_;
};
} // end namespace wgt
#endif // SPREADSHEET_EXTENSION_HPP
| wgsyd/wgtf | src/core/testing/plg_grid_editor_test/spreadsheet_extension.hpp | C++ | bsd-3-clause | 853 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Note that although this is not a "browser" test, it runs as part of
// browser_tests. This is because WebKit does not work properly if it is
// shutdown and re-initialized. Since browser_tests runs each test in a
// new process, this avoids the problem.
#include "chrome/renderer/safe_browsing/phishing_dom_feature_extractor.h"
#include "base/bind.h"
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/memory/weak_ptr.h"
#include "base/message_loop/message_loop.h"
#include "base/time/time.h"
#include "chrome/renderer/safe_browsing/features.h"
#include "chrome/renderer/safe_browsing/mock_feature_extractor_clock.h"
#include "chrome/renderer/safe_browsing/test_utils.h"
#include "content/public/test/render_view_fake_resources_test.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "third_party/WebKit/public/platform/WebString.h"
#include "third_party/WebKit/public/web/WebFrame.h"
#include "third_party/WebKit/public/web/WebScriptSource.h"
using ::testing::DoAll;
using ::testing::Invoke;
using ::testing::Return;
namespace safe_browsing {
class PhishingDOMFeatureExtractorTest
: public content::RenderViewFakeResourcesTest {
public:
// Helper for the SubframeRemoval test that posts a message to remove
// the iframe "frame1" from the document.
void ScheduleRemoveIframe() {
message_loop_.PostTask(
FROM_HERE,
base::Bind(&PhishingDOMFeatureExtractorTest::RemoveIframe,
weak_factory_.GetWeakPtr()));
}
protected:
PhishingDOMFeatureExtractorTest()
: content::RenderViewFakeResourcesTest(),
weak_factory_(this) {}
virtual ~PhishingDOMFeatureExtractorTest() {}
virtual void SetUp() {
// Set up WebKit and the RenderView.
content::RenderViewFakeResourcesTest::SetUp();
extractor_.reset(new PhishingDOMFeatureExtractor(view(), &clock_));
}
virtual void TearDown() {
content::RenderViewFakeResourcesTest::TearDown();
}
// Runs the DOMFeatureExtractor on the RenderView, waiting for the
// completion callback. Returns the success boolean from the callback.
bool ExtractFeatures(FeatureMap* features) {
success_ = false;
extractor_->ExtractFeatures(
features,
base::Bind(&PhishingDOMFeatureExtractorTest::ExtractionDone,
base::Unretained(this)));
message_loop_.Run();
return success_;
}
// Completion callback for feature extraction.
void ExtractionDone(bool success) {
success_ = success;
message_loop_.Quit();
}
// Does the actual work of removing the iframe "frame1" from the document.
void RemoveIframe() {
blink::WebFrame* main_frame = GetMainFrame();
ASSERT_TRUE(main_frame);
main_frame->executeScript(
blink::WebString(
"document.body.removeChild(document.getElementById('frame1'));"));
}
MockFeatureExtractorClock clock_;
scoped_ptr<PhishingDOMFeatureExtractor> extractor_;
bool success_; // holds the success value from ExtractFeatures
base::WeakPtrFactory<PhishingDOMFeatureExtractorTest> weak_factory_;
};
TEST_F(PhishingDOMFeatureExtractorTest, FormFeatures) {
// This test doesn't exercise the extraction timing.
EXPECT_CALL(clock_, Now()).WillRepeatedly(Return(base::TimeTicks::Now()));
responses_["http://host.com/"] =
"<html><head><body>"
"<form action=\"query\"><input type=text><input type=checkbox></form>"
"<form action=\"http://cgi.host.com/submit\"></form>"
"<form action=\"http://other.com/\"></form>"
"<form action=\"query\"></form>"
"<form></form></body></html>";
FeatureMap expected_features;
expected_features.AddBooleanFeature(features::kPageHasForms);
expected_features.AddRealFeature(features::kPageActionOtherDomainFreq, 0.25);
expected_features.AddBooleanFeature(features::kPageHasTextInputs);
expected_features.AddBooleanFeature(features::kPageHasCheckInputs);
FeatureMap features;
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
responses_["http://host.com/"] =
"<html><head><body>"
"<input type=\"radio\"><input type=password></body></html>";
expected_features.Clear();
expected_features.AddBooleanFeature(features::kPageHasRadioInputs);
expected_features.AddBooleanFeature(features::kPageHasPswdInputs);
features.Clear();
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
responses_["http://host.com/"] =
"<html><head><body><input></body></html>";
expected_features.Clear();
expected_features.AddBooleanFeature(features::kPageHasTextInputs);
features.Clear();
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
responses_["http://host.com/"] =
"<html><head><body><input type=\"invalid\"></body></html>";
expected_features.Clear();
expected_features.AddBooleanFeature(features::kPageHasTextInputs);
features.Clear();
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
}
TEST_F(PhishingDOMFeatureExtractorTest, LinkFeatures) {
// This test doesn't exercise the extraction timing.
EXPECT_CALL(clock_, Now()).WillRepeatedly(Return(base::TimeTicks::Now()));
responses_["http://www.host.com/"] =
"<html><head><body>"
"<a href=\"http://www2.host.com/abc\">link</a>"
"<a name=page_anchor></a>"
"<a href=\"http://www.chromium.org/\">chromium</a>"
"</body></html";
FeatureMap expected_features;
expected_features.AddRealFeature(features::kPageExternalLinksFreq, 0.5);
expected_features.AddRealFeature(features::kPageSecureLinksFreq, 0.0);
expected_features.AddBooleanFeature(features::kPageLinkDomain +
std::string("chromium.org"));
FeatureMap features;
LoadURL("http://www.host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
responses_.clear();
responses_["https://www.host.com/"] =
"<html><head><body>"
"<a href=\"login\">this is secure</a>"
"<a href=\"http://host.com\">not secure</a>"
"<a href=\"https://www2.host.com/login\">also secure</a>"
"<a href=\"http://chromium.org/\">also not secure</a>"
"</body></html>";
expected_features.Clear();
expected_features.AddRealFeature(features::kPageExternalLinksFreq, 0.25);
expected_features.AddRealFeature(features::kPageSecureLinksFreq, 0.5);
expected_features.AddBooleanFeature(features::kPageLinkDomain +
std::string("chromium.org"));
features.Clear();
LoadURL("https://www.host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
}
TEST_F(PhishingDOMFeatureExtractorTest, ScriptAndImageFeatures) {
// This test doesn't exercise the extraction timing.
EXPECT_CALL(clock_, Now()).WillRepeatedly(Return(base::TimeTicks::Now()));
responses_["http://host.com/"] =
"<html><head><script></script><script></script></head></html>";
FeatureMap expected_features;
expected_features.AddBooleanFeature(features::kPageNumScriptTagsGTOne);
FeatureMap features;
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
responses_["http://host.com/"] =
"<html><head><script></script><script></script><script></script>"
"<script></script><script></script><script></script><script></script>"
"</head><body><img src=\"blah.gif\">"
"<img src=\"http://host2.com/blah.gif\"></body></html>";
expected_features.Clear();
expected_features.AddBooleanFeature(features::kPageNumScriptTagsGTOne);
expected_features.AddBooleanFeature(features::kPageNumScriptTagsGTSix);
expected_features.AddRealFeature(features::kPageImgOtherDomainFreq, 0.5);
features.Clear();
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
}
TEST_F(PhishingDOMFeatureExtractorTest, SubFrames) {
// This test doesn't exercise the extraction timing.
EXPECT_CALL(clock_, Now()).WillRepeatedly(Return(base::TimeTicks::Now()));
// Test that features are aggregated across all frames.
responses_["http://host.com/"] =
"<html><body><input type=text><a href=\"info.html\">link</a>"
"<iframe src=\"http://host2.com/\"></iframe>"
"<iframe src=\"http://host3.com/\"></iframe>"
"</body></html>";
responses_["http://host2.com/"] =
"<html><head><script></script><body>"
"<form action=\"http://host4.com/\"><input type=checkbox></form>"
"<form action=\"http://host2.com/submit\"></form>"
"<a href=\"http://www.host2.com/home\">link</a>"
"<iframe src=\"nested.html\"></iframe>"
"<body></html>";
responses_["http://host2.com/nested.html"] =
"<html><body><input type=password>"
"<a href=\"https://host4.com/\">link</a>"
"<a href=\"relative\">another</a>"
"</body></html>";
responses_["http://host3.com/"] =
"<html><head><script></script><body>"
"<img src=\"http://host.com/123.png\">"
"</body></html>";
FeatureMap expected_features;
expected_features.AddBooleanFeature(features::kPageHasForms);
// Form action domains are compared to the URL of the document they're in,
// not the URL of the toplevel page. So http://host2.com/ has two form
// actions, one of which is external.
expected_features.AddRealFeature(features::kPageActionOtherDomainFreq, 0.5);
expected_features.AddBooleanFeature(features::kPageHasTextInputs);
expected_features.AddBooleanFeature(features::kPageHasPswdInputs);
expected_features.AddBooleanFeature(features::kPageHasCheckInputs);
expected_features.AddRealFeature(features::kPageExternalLinksFreq, 0.25);
expected_features.AddBooleanFeature(features::kPageLinkDomain +
std::string("host4.com"));
expected_features.AddRealFeature(features::kPageSecureLinksFreq, 0.25);
expected_features.AddBooleanFeature(features::kPageNumScriptTagsGTOne);
expected_features.AddRealFeature(features::kPageImgOtherDomainFreq, 1.0);
FeatureMap features;
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
}
TEST_F(PhishingDOMFeatureExtractorTest, Continuation) {
// For this test, we'll cause the feature extraction to run multiple
// iterations by incrementing the clock.
// This page has a total of 50 elements. For the external forms feature to
// be computed correctly, the extractor has to examine the whole document.
// Note: the empty HEAD is important -- WebKit will synthesize a HEAD if
// there isn't one present, which can be confusing for the element counts.
std::string response = "<html><head></head><body>"
"<form action=\"ondomain\"></form>";
for (int i = 0; i < 45; ++i) {
response.append("<p>");
}
response.append("<form action=\"http://host2.com/\"></form></body></html>");
responses_["http://host.com/"] = response;
// Advance the clock 6 ms every 10 elements processed, 10 ms between chunks.
// Note that this assumes kClockCheckGranularity = 10 and
// kMaxTimePerChunkMs = 10.
base::TimeTicks now = base::TimeTicks::Now();
EXPECT_CALL(clock_, Now())
// Time check at the start of extraction.
.WillOnce(Return(now))
// Time check at the start of the first chunk of work.
.WillOnce(Return(now))
// Time check after the first 10 elements.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(6)))
// Time check after the next 10 elements. This is over the chunk
// time limit, so a continuation task will be posted.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(12)))
// Time check at the start of the second chunk of work.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(22)))
// Time check after resuming iteration for the second chunk.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(24)))
// Time check after the next 10 elements.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(30)))
// Time check after the next 10 elements. This will trigger another
// continuation task.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(36)))
// Time check at the start of the third chunk of work.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(46)))
// Time check after resuming iteration for the third chunk.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(48)))
// Time check after the last 10 elements.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(54)))
// A final time check for the histograms.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(56)));
FeatureMap expected_features;
expected_features.AddBooleanFeature(features::kPageHasForms);
expected_features.AddRealFeature(features::kPageActionOtherDomainFreq, 0.5);
FeatureMap features;
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
// Make sure none of the mock expectations carry over to the next test.
::testing::Mock::VerifyAndClearExpectations(&clock_);
// Now repeat the test with the same page, but advance the clock faster so
// that the extraction time exceeds the maximum total time for the feature
// extractor. Extraction should fail. Note that this assumes
// kMaxTotalTimeMs = 500.
EXPECT_CALL(clock_, Now())
// Time check at the start of extraction.
.WillOnce(Return(now))
// Time check at the start of the first chunk of work.
.WillOnce(Return(now))
// Time check after the first 10 elements.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(300)))
// Time check at the start of the second chunk of work.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(350)))
// Time check after resuming iteration for the second chunk.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(360)))
// Time check after the next 10 elements. This is over the limit.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(600)))
// A final time check for the histograms.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(620)));
features.Clear();
EXPECT_FALSE(ExtractFeatures(&features));
}
TEST_F(PhishingDOMFeatureExtractorTest, SubframeRemoval) {
// In this test, we'll advance the feature extractor so that it is positioned
// inside an iframe, and have it pause due to exceeding the chunk time limit.
// Then, prior to continuation, the iframe is removed from the document.
// As currently implemented, this should finish extraction from the removed
// iframe document.
responses_["http://host.com/"] =
"<html><head></head><body>"
"<iframe src=\"frame.html\" id=\"frame1\"></iframe>"
"<form></form></body></html>";
responses_["http://host.com/frame.html"] =
"<html><body><p><p><p><input type=password></body></html>";
base::TimeTicks now = base::TimeTicks::Now();
EXPECT_CALL(clock_, Now())
// Time check at the start of extraction.
.WillOnce(Return(now))
// Time check at the start of the first chunk of work.
.WillOnce(Return(now))
// Time check after the first 10 elements. Enough time has passed
// to stop extraction. Schedule the iframe removal to happen as soon as
// the feature extractor returns control to the message loop.
.WillOnce(DoAll(
Invoke(this, &PhishingDOMFeatureExtractorTest::ScheduleRemoveIframe),
Return(now + base::TimeDelta::FromMilliseconds(21))))
// Time check at the start of the second chunk of work.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(25)))
// Time check after resuming iteration for the second chunk.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(27)))
// A final time check for the histograms.
.WillOnce(Return(now + base::TimeDelta::FromMilliseconds(33)));
FeatureMap expected_features;
expected_features.AddBooleanFeature(features::kPageHasForms);
expected_features.AddBooleanFeature(features::kPageHasPswdInputs);
FeatureMap features;
LoadURL("http://host.com/");
ASSERT_TRUE(ExtractFeatures(&features));
ExpectFeatureMapsAreEqual(features, expected_features);
}
} // namespace safe_browsing
| cvsuser-chromium/chromium | chrome/renderer/safe_browsing/phishing_dom_feature_extractor_browsertest.cc | C++ | bsd-3-clause | 16,907 |
<?php
define("SERVER", "127.0.0.1");
define("PORT", 9090);
define("USERNAME", "root");
define("PASSWORD", "");
?>
| wfcreations/ANNClient | config.php | PHP | bsd-3-clause | 122 |
#pragma once
//=====================================================================//
/*! @file
@brief DS3231 RTC ドライバー @n
Maxim Integrated @n
https://datasheets.maximintegrated.com/en/ds/DS3231.pdf @n
中華製モジュールの注意点:@n
・バッテリーバックアップにリチウム電池を使う場合、直列抵抗を除く事
@author 平松邦仁 (hira@rvf-rc45.net)
@copyright Copyright (C) 2016, 2021 Kunihito Hiramatsu @n
Released under the MIT license @n
https://github.com/hirakuni45/RX/blob/master/LICENSE
*/
//=====================================================================//
#include <cstdint>
#include "common/time.h"
namespace chip {
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++//
/*!
@brief DS3231 RTC テンプレートクラス
@param[in] I2C_IO i2c クラス
*/
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++//
template <class I2C_IO>
class DS3231 {
public:
// R/W ビットを含まない7ビット値
static constexpr uint8_t I2C_ADR = 0x68;
private:
I2C_IO& i2c_io_;
bool start_;
struct reg_t
{
uint8_t reg[7];
bool operator != (const reg_t& t) {
for(uint8_t i = 0; i < 7; ++i) {
if(reg[i] != t.reg[i]) return true;
}
return false;
}
};
bool get_time_(reg_t& t) const noexcept {
uint8_t reg[1];
reg[0] = 0x00; // set address
if(!i2c_io_.send(I2C_ADR, reg, 1)) {
return false;
}
if(!i2c_io_.recv(I2C_ADR, &t.reg[0], 7)) {
return false;
}
return true;
}
public:
//-----------------------------------------------------------------//
/*!
@brief コンストラクター
@param[in] i2c_io I2C クラスを参照で渡す
*/
//-----------------------------------------------------------------//
DS3231(I2C_IO& i2c_io) noexcept : i2c_io_(i2c_io), start_(false) { }
//-----------------------------------------------------------------//
/*!
@brief 開始
@return エラーなら「false」を返す
*/
//-----------------------------------------------------------------//
bool start() noexcept
{
uint8_t reg[2];
reg[0] = 0x0e; /// internal register address
reg[1] = 0x00;
start_ = i2c_io_.send(I2C_ADR, reg, 2);
return start_;
}
//-----------------------------------------------------------------//
/*!
@brief DS3231 時間設定
@param[in] t 時間
@return 成功なら「true」
*/
//-----------------------------------------------------------------//
bool set_time(time_t t) const noexcept
{
if(!start_) return false;
tm tmt;
gmtime_r(&t, &tmt);
uint8_t reg[7];
reg[0] = ((tmt.tm_sec / 10) << 4) | (tmt.tm_sec % 10); // 0 to 59
reg[1] = ((tmt.tm_min / 10) << 4) | (tmt.tm_min % 10); // 0 to 59
reg[2] = ((tmt.tm_hour / 10) << 4) | (tmt.tm_hour % 10); // 0 to 23
reg[3] = tmt.tm_wday + 1; // 1 to 7
reg[4] = ((tmt.tm_mday / 10) << 4) | (tmt.tm_mday % 10); // 1 to 31
uint8_t mon = tmt.tm_mon + 1;
reg[5] = ((mon / 10) << 4) | (mon % 10); // 1 to 12
uint16_t y = tmt.tm_year % 100;
reg[6] = ((y / 10) << 4) | (y % 10); // 0 to 99
return i2c_io_.send(I2C_ADR, 0x00, reg, 7);
}
//-----------------------------------------------------------------//
/*!
@brief DS3231 時間呼び出し
@param[out] t 取得時間
@return 成功なら「true」
*/
//-----------------------------------------------------------------//
bool get_time(time_t& t) const noexcept {
if(!start_) return false;
reg_t tt;
reg_t tmp;
tm ts;
// 二度読んで、同じだったら正しい時間とする
uint8_t n = 5; // 5回ループして正常に読めなかったら、エラーとする
do {
tmp = tt;
if(!get_time_(tt)) return false;
--n;
if(n == 0) {
return false;
}
} while(tt != tmp) ;
ts.tm_sec = ((tt.reg[0] >> 4) * 10) + (tt.reg[0] & 0xf);
ts.tm_min = ((tt.reg[1] >> 4) * 10) + (tt.reg[1] & 0xf);
ts.tm_hour = ((tt.reg[2] >> 4) * 10) + (tt.reg[2] & 0xf);
ts.tm_mday = ((tt.reg[4] >> 4) * 10) + (tt.reg[4] & 0xf);
ts.tm_mon = ((((tt.reg[5] & 0x10) >> 4) * 10) + (tt.reg[5] & 0xf)) - 1;
ts.tm_year = ((tt.reg[6] >> 4) * 10) + (tt.reg[6] & 0xf);
ts.tm_year += 100;
t = mktime_gmt(&ts);
return true;
}
};
}
| hirakuni45/RX | chip/DS3231.hpp | C++ | bsd-3-clause | 4,347 |
package dev.kkorolyov.pancake.platform.event;
/**
* {@link Event} requesting an entity be destroyed.
*/
public class DestroyEntity implements Event {
private final int id;
/**
* Constructs a new destroy entity event.
* @param id ID of entity to destroy
*/
public DestroyEntity(int id) {
this.id = id;
}
/** @return ID of entity to destroy */
public int getId() {
return id;
}
}
| kkorolyov/Pancake | pancake-platform/src/main/java/dev/kkorolyov/pancake/platform/event/DestroyEntity.java | Java | bsd-3-clause | 400 |
module Hippo::TransactionSets
module HIPAA_277
class L2000A < Hippo::TransactionSets::Base
loop_name 'L2000A' #Information Source Level
#Information Source Level
segment Hippo::Segments::HL,
:name => 'Information Source Level',
:minimum => 1,
:maximum => 1,
:position => 100,
:identified_by => {
'HL03' => '20',
'HL04' => '1'
}
#Information Source Name
loop Hippo::TransactionSets::HIPAA_277::L2100A,
:name => 'Information Source Name',
:minimum => 1,
:maximum => 1,
:position => 500,
:identified_by => {
'NM1.NM101' => ["AY", "PR"],
'NM1.NM102' => '2',
'NM1.NM108' => ["46", "FI", "PI", "XV"]
}
#Transmission Receipt Control Identifier
loop Hippo::TransactionSets::HIPAA_277::L2200A,
:name => 'Transmission Receipt Control Identifier',
:minimum => 1,
:maximum => 1,
:position => 900,
:identified_by => {
'TRN.TRN01' => '1'
}
end
end
end
| Aprexis/hippo | lib/hippo/transaction_sets/HIPAA_277/L2000A.rb | Ruby | bsd-3-clause | 1,392 |
/*******************************************************************************
* Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
*******************************************************************************/
package com.att.cadi.http;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.Principal;
import javax.net.ssl.HttpsURLConnection;
import com.att.cadi.CadiException;
import com.att.cadi.client.AbsTransferSS;
import com.att.cadi.config.Config;
import com.att.cadi.config.SecurityInfoC;
public class HTransferSS extends AbsTransferSS<HttpURLConnection> {
public HTransferSS(Principal principal, String app) throws IOException {
super(principal, app);
}
public HTransferSS(Principal principal, String app, SecurityInfoC<HttpURLConnection> si) {
super(principal, app, si);
}
@Override
public void setSecurity(HttpURLConnection huc) throws CadiException {
if(value!=null) {
if(defSS==null) {
throw new CadiException("Need App Credentials to send message");
}
defSS.setSecurity(huc);
huc.addRequestProperty(Config.CADI_USER_CHAIN, value);
}
if(securityInfo!=null) {
securityInfo.setSocketFactoryOn((HttpsURLConnection)huc);
}
}
@Override
public int setLastResponse(int respCode) {
return 0;
}
}
| att/AAF | cadi/client/src/main/java/com/att/cadi/http/HTransferSS.java | Java | bsd-3-clause | 1,314 |
<?php
/**
* \PEAR2\Pyrus\DER\Schema
*
* PHP version 5
*
* @category PEAR2
* @package PEAR2_Pyrus
* @author Greg Beaver <cellog@php.net>
* @copyright 2010 The PEAR Group
* @license http://www.opensource.org/licenses/bsd-license.php New BSD License
* @version SVN: $Id$
* @link http://svn.php.net/viewvc/pear2/Pyrus/
*/
/**
* Represents a Distinguished Encoding Rule IASN.1 schema
*
* This is used to name components and to retrieve context-specific types
*
* @category PEAR2
* @package PEAR2_Pyrus
* @author Greg Beaver <cellog@php.net>
* @copyright 2010 The PEAR Group
* @license http://www.opensource.org/licenses/bsd-license.php New BSD License
* @link http://svn.php.net/viewvc/pear2/Pyrus/
*/
namespace PEAR2\Pyrus\DER;
class Schema extends \PEAR2\Pyrus\DER
{
static protected $types = array();
protected $name;
protected $parent;
protected $tag;
protected $optional = false;
protected $multiple = false;
protected $class;
protected $lastfind = false;
function __construct(Schema $parent = null, $tag = 0, $type = '')
{
$this->parent = $parent;
$this->tag = $tag;
$this->class = $type;
}
function setOptional()
{
$this->optional = true;
return $this;
}
function setMultiple()
{
$this->multiple = true;
return $this;
}
function setName($name)
{
$this->name = $name;
}
function setTag($tag)
{
$this->tag = $tag;
}
function setClass($class)
{
$this->class = $class;
}
function multiple()
{
return $this->multiple;
}
function optional()
{
return $this->optional;
}
function __call($func, $args)
{
if ($func == 'choice') {
if (isset($args[0])) {
if (isset($args[1])) {
$obj = new SchemaChoice($this, $args[0], $args[1]);
} else {
$obj = new SchemaChoice($this, $args[0]);
}
$this->objs[$args[0]] = $obj;
return $obj;
}
return new SchemaChoice($this);
}
if (!isset($args[0])) {
throw new Exception('Invalid schema, element must be named');
}
$name = $args[0];
if ($func == 'any') {
if (isset($args[1])) {
$obj = new Schema($this, 0x80 | $args[1], 'any');
} else {
$obj = new Schema($this, 0, 'any');
}
} elseif (isset(self::$types[strtolower($func)])) {
$obj = clone self::$types[strtolower($func)];
$obj->setParent($this);
if (isset($args[1])) {
if ($obj->parentSchema()) {
$obj->setTag(0x80 | 0x20 | $args[1]);
} else {
$obj->setTag(0x80 | $args[1]);
}
}
} else {
$class = 'PEAR2\Pyrus\DER\\' . ucfirst($func);
if (!class_exists($class, 1)) {
throw new Exception('Unknown type ' . $func . ' at ' . $this->path());
}
if (!isset($args[1])) {
$tag = $class::TAG;
} else {
$tag = $args[1] | 0x80;
if (strtolower($func) == 'set' || strtolower($func) == 'sequence') {
$tag |= 0x20;
}
}
$obj = new Schema($this, $tag, $class);
}
$this->objs[$name] = $obj;
$obj->setName($name);
if ($obj->parentSchema() && !isset(self::$types[strtolower($func)])) {
return $obj;
} else {
return $this;
}
}
function __clone()
{
foreach ($this->objs as $i => $obj) {
$this->objs[$i] = clone $obj;
$obj->setParent($this);
}
}
function parentSchema()
{
if ($this instanceof SchemaChoice) {
return true;
}
if ($this->class === 'PEAR2\Pyrus\DER\Sequence') {
return true;
}
if ($this->class === 'PEAR2\Pyrus\DER\Set') {
return true;
}
return false;
}
function setParent(Schema $parent)
{
$this->parent = $parent;
}
function end()
{
return $this->parent;
}
static function addType($name, Schema $schema)
{
self::$types[strtolower($name)] = $schema;
}
static function types()
{
return self::$types;
}
function __get($var)
{
if ($var === 'types') {
return self::$types;
}
if ($var == 'name') {
return $this->name;
}
if ($var == 'tag') {
return $this->tag;
}
if ($var == 'type') {
return $this->class;
}
if (isset($this->objs[$var])) {
return $this->objs[$var];
}
throw new Exception('Unknown schema element ' . $var);
}
function findTag($tag)
{
if ($this->tag === $tag) {
return $this;
}
return false;
}
function resetLastFind()
{
$this->lastfind = false;
}
function find($tag)
{
foreach ($this->objs as $index => $obj) {
if ($this->lastfind && $index != $this->lastfind) {
continue;
}
if ($this->lastfind) {
if ($obj->multiple() && $this->lastfind == $index) {
if ($test = $obj->findTag($tag)) {
return $test;
}
}
$this->lastfind = false;
continue;
}
if ($obj->type === 'any') {
if (($tag & 0x80) === 0x80) {
// context-sensitive tag, do best guess
if (($tag & 0x20) == 0x20) {
$tag = Sequence::TAG;
} else {
$tag = OctetString::TAG;
}
}
if (!isset($this->tagMap[$tag])) {
throw new Exception('Unknown tag: ' . dechex($tag) . ' at ' . $this->path());
}
$type = $this->tagMap[$tag];
$ret = new Schema($this->parent, $tag, $type);
$ret->setName($obj->name);
$this->lastfind = $index;
return $ret;
}
if ($test = $obj->findTag($tag)) {
$this->lastfind = $index;
if ($test->name != $index) {
$test = clone $test;
$test->setName($index);
}
return $test;
}
if (!$obj->optional()) {
if (isset($this->tagMap[$tag])) {
$tag = '"' . str_replace('PEAR2\Pyrus\DER\\', '', $this->tagMap[$tag]) .
'" (0x' . dechex($tag) . ')';
} else {
$tag = dechex($tag);
}
throw new Exception('Invalid DER document, required tag ' .
$index . ' not found, instead requested ' .
'tag value ' . $tag . ' at ' .
$this->path());
}
}
if (isset($this->tagMap[$tag])) {
$tag = '"' . str_replace('PEAR2\Pyrus\DER\\', '', $this->tagMap[$tag]) . '" (0x' . dechex($tag) . ')';
} else {
$tag = dechex($tag);
}
throw new Exception('Invalid DER document, no matching elements for tag ' . $tag .
' at ' . $this->path());
}
function path()
{
if ($this->parent && $this->parent->path()) {
return $this->parent->path() . '->' . $this->name;
}
return $this->name;
}
}
| pear2/pear2.php.net | vendor/php/PEAR2/Pyrus/DER/Schema.php | PHP | bsd-3-clause | 8,073 |
module.exports.BaseController = require('./BaseController');
| olalonde/chungking | lib/index.js | JavaScript | bsd-3-clause | 61 |
// Copyright 2015 The Serulian Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package packageloader defines functions and types for loading and parsing source from disk or VCS.
package packageloader
import (
"fmt"
"log"
"path"
"strings"
"sync"
"github.com/serulian/compiler/compilercommon"
"github.com/serulian/compiler/compilerutil"
"github.com/serulian/compiler/vcs"
cmap "github.com/streamrail/concurrent-map"
)
// SerulianPackageDirectory is the directory under the root directory holding cached packages.
const SerulianPackageDirectory = ".pkg"
// SerulianTestSuffix is the suffix for all testing modules. Testing modules will not be loaded
// when loading a package.
const SerulianTestSuffix = "_test"
// PackageLoader helps to fully and recursively load a Serulian package and its dependencies
// from a directory or set of directories.
type PackageLoader struct {
entrypoint Entrypoint // The entrypoint for the package loader.
libraries map[string]Library // The libraries being loaded.
vcsDevelopmentDirectories []string // Directories to check for VCS packages before VCS checkout.
pathLoader PathLoader // The path loaders to use.
alwaysValidate bool // Whether to always run validation, regardless of errors. Useful to IDE tooling.
skipVCSRefresh bool // Whether to skip VCS refresh if cache exists. Useful to IDE tooling.
errors chan compilercommon.SourceError // Errors are reported on this channel
warnings chan compilercommon.SourceWarning // Warnings are reported on this channel
handlers map[string]SourceHandler // The handlers for each of the supported package kinds.
parsers map[string]SourceHandlerParser // The parsers for each of the supported package kinds.
pathKindsEncountered cmap.ConcurrentMap // The path+kinds processed by the loader goroutine
vcsPathsLoaded cmap.ConcurrentMap // The VCS paths that have been loaded, mapping to their checkout dir
vcsLockMap compilerutil.LockMap // LockMap for ensuring single loads of all VCS paths.
packageMap *mutablePackageMap // The package map.
sourceTracker *mutableSourceTracker // The source tracker.
workTracker sync.WaitGroup // WaitGroup used to wait until all loading is complete
finished chan bool // Channel used to tell background goroutines to quit
cancelationHandle compilerutil.CancelationHandle
}
// Library contains a reference to an external library to load, in addition to those referenced
// by the root source file.
type Library struct {
PathOrURL string // The file location or SCM URL of the library's package.
IsSCM bool // If true, the PathOrURL is treated as a remote SCM package.
Kind string // The kind of the library. Leave empty for Serulian files.
Alias string // The import alias for this library.
}
// LoadResult contains the result of attempting to load all packages and source files for this
// project.
type LoadResult struct {
Status bool // True on success, false otherwise
Errors []compilercommon.SourceError // The errors encountered, if any
Warnings []compilercommon.SourceWarning // The warnings encountered, if any
PackageMap LoadedPackageMap // Map of packages loaded.
SourceTracker SourceTracker // Tracker of all source loaded.
}
// NewPackageLoader creates and returns a new package loader for the given config.
func NewPackageLoader(config Config) *PackageLoader {
handlersMap := map[string]SourceHandler{}
for _, handler := range config.SourceHandlers {
handlersMap[handler.Kind()] = handler
}
pathLoader := config.PathLoader
if pathLoader == nil {
pathLoader = LocalFilePathLoader{}
}
return &PackageLoader{
libraries: map[string]Library{},
entrypoint: config.Entrypoint,
vcsDevelopmentDirectories: config.VCSDevelopmentDirectories,
pathLoader: pathLoader,
alwaysValidate: config.AlwaysValidate,
skipVCSRefresh: config.SkipVCSRefresh,
errors: make(chan compilercommon.SourceError, 32),
warnings: make(chan compilercommon.SourceWarning, 32),
handlers: handlersMap,
parsers: nil,
pathKindsEncountered: cmap.New(),
packageMap: newMutablePackageMap(),
sourceTracker: newMutableSourceTracker(config.PathLoader),
vcsPathsLoaded: cmap.New(),
vcsLockMap: compilerutil.CreateLockMap(),
finished: make(chan bool, 1),
cancelationHandle: compilerutil.GetCancelationHandle(config.cancelationHandle),
}
}
// Load performs the loading of a Serulian package found at the directory path.
// Any libraries specified will be loaded as well.
func (p *PackageLoader) Load(libraries ...Library) LoadResult {
// Start the parsers for each of the handlers.
parsersMap := map[string]SourceHandlerParser{}
for _, handler := range p.handlers {
parser := handler.NewParser()
if parser == nil {
panic(fmt.Sprintf("Got a nil parser from handler `%s`", handler.Kind()))
}
parsersMap[handler.Kind()] = parser
}
p.parsers = parsersMap
// Populate the libraries map.
for _, library := range libraries {
p.libraries[library.Alias] = library
}
// Start the error/warning collection goroutine.
result := &LoadResult{
Status: true,
Errors: make([]compilercommon.SourceError, 0),
Warnings: make([]compilercommon.SourceWarning, 0),
}
go p.collectIssues(result)
// Add the root source file(s) as the first items to be parsed.
entrypointPaths, err := p.entrypoint.EntrypointPaths(p.pathLoader)
if err != nil {
sourceRange := compilercommon.InputSource(string(p.entrypoint)).RangeForRunePosition(0, p.sourceTracker)
result.Status = false
result.Errors = append(result.Errors, compilercommon.SourceErrorf(sourceRange, "Could not resolve entrypoint path: %v", err))
return *result
}
for _, path := range entrypointPaths {
sourceRange := compilercommon.InputSource(path).RangeForRunePosition(0, p.sourceTracker)
for _, handler := range p.handlers {
if strings.HasSuffix(path, handler.PackageFileExtension()) {
p.pushPath(pathSourceFile, handler.Kind(), path, sourceRange)
break
}
}
}
// Add the libraries to be parsed.
for _, library := range libraries {
sourceRange := compilercommon.InputSource(library.PathOrURL).RangeForRunePosition(0, p.sourceTracker)
p.pushLibrary(library, library.Kind, sourceRange)
}
// Wait for all packages and source files to be completed.
p.workTracker.Wait()
// Tell the goroutines to quit.
p.finished <- true
// If canceled, return immediately.
if p.cancelationHandle.WasCanceled() {
for _, parser := range p.parsers {
parser.Cancel()
}
return LoadResult{
Status: false,
Errors: make([]compilercommon.SourceError, 0),
Warnings: make([]compilercommon.SourceWarning, 0),
}
}
// Save the package map.
result.PackageMap = p.packageMap.Build()
result.SourceTracker = p.sourceTracker.Freeze()
// Apply all parser changes.
for _, parser := range p.parsers {
parser.Apply(result.PackageMap, result.SourceTracker, p.cancelationHandle)
}
// Perform verification in all parsers.
if p.alwaysValidate || len(result.Errors) == 0 {
errorReporter := func(err compilercommon.SourceError) {
result.Errors = append(result.Errors, err)
result.Status = false
}
warningReporter := func(warning compilercommon.SourceWarning) {
result.Warnings = append(result.Warnings, warning)
}
for _, parser := range p.parsers {
parser.Verify(errorReporter, warningReporter, p.cancelationHandle)
}
}
if p.cancelationHandle.WasCanceled() {
return LoadResult{
Status: false,
Errors: make([]compilercommon.SourceError, 0),
Warnings: make([]compilercommon.SourceWarning, 0),
}
}
return *result
}
// PathLoader returns the path loader used by this package manager.
func (p *PackageLoader) PathLoader() PathLoader {
return p.pathLoader
}
// ModuleOrPackage defines a reference to a module or package.
type ModuleOrPackage struct {
// Name is the name of the module or package.
Name string
// Path is the on-disk path of the module or package.
Path string
// SourceKind is the kind source for the module or package. Packages will always be
// empty.
SourceKind string
}
// ListSubModulesAndPackages lists all modules or packages found *directly* under the given path.
func (p *PackageLoader) ListSubModulesAndPackages(packagePath string) ([]ModuleOrPackage, error) {
directoryContents, err := p.pathLoader.LoadDirectory(packagePath)
if err != nil {
return []ModuleOrPackage{}, err
}
var modulesOrPackages = make([]ModuleOrPackage, 0, len(directoryContents))
for _, entry := range directoryContents {
// Filter any test modules.
if strings.Contains(entry.Name, SerulianTestSuffix+".") {
continue
}
if entry.IsDirectory {
modulesOrPackages = append(modulesOrPackages, ModuleOrPackage{entry.Name, path.Join(packagePath, entry.Name), ""})
continue
}
for _, handler := range p.handlers {
if strings.HasSuffix(entry.Name, handler.PackageFileExtension()) {
name := entry.Name[0 : len(entry.Name)-len(handler.PackageFileExtension())]
modulesOrPackages = append(modulesOrPackages, ModuleOrPackage{name, path.Join(packagePath, entry.Name), handler.Kind()})
break
}
}
}
return modulesOrPackages, nil
}
// LocalPackageInfoForPath returns the package information for the given path. Note that VCS paths will
// be converted into their local package equivalent. If the path refers to a source file instead of a
// directory, a package containing the single module will be returned.
func (p *PackageLoader) LocalPackageInfoForPath(path string, sourceKind string, isVCSPath bool) (PackageInfo, error) {
if isVCSPath {
localPath, err := p.getVCSDirectoryForPath(path)
if err != nil {
return PackageInfo{}, err
}
path = localPath
}
// Find the source handler matching the source kind.
handler, ok := p.handlers[sourceKind]
if !ok {
return PackageInfo{}, fmt.Errorf("Unknown source kind %s", sourceKind)
}
// Check for a single module.
filePath := path + handler.PackageFileExtension()
if p.pathLoader.IsSourceFile(filePath) {
return PackageInfo{
kind: sourceKind,
referenceID: filePath,
modulePaths: []compilercommon.InputSource{compilercommon.InputSource(filePath)},
}, nil
}
// Otherwise, read the contents of the directory.
return p.packageInfoForPackageDirectory(path, sourceKind)
}
// packageInfoForDirectory returns a PackageInfo for the package found at the given path.
func (p *PackageLoader) packageInfoForPackageDirectory(packagePath string, sourceKind string) (PackageInfo, error) {
directoryContents, err := p.pathLoader.LoadDirectory(packagePath)
if err != nil {
return PackageInfo{}, err
}
handler, ok := p.handlers[sourceKind]
if !ok {
return PackageInfo{}, fmt.Errorf("Unknown source kind %s", sourceKind)
}
packageInfo := &PackageInfo{
kind: sourceKind,
referenceID: packagePath,
modulePaths: make([]compilercommon.InputSource, 0),
}
// Find all source files in the directory and add them to the paths list.
for _, entry := range directoryContents {
// Filter any test modules.
if strings.Contains(entry.Name, SerulianTestSuffix+".") {
continue
}
if !entry.IsDirectory && path.Ext(entry.Name) == handler.PackageFileExtension() {
filePath := path.Join(packagePath, entry.Name)
// Add the source file to the package information.
packageInfo.modulePaths = append(packageInfo.modulePaths, compilercommon.InputSource(filePath))
}
}
return *packageInfo, nil
}
// getVCSDirectoryForPath returns the directory on disk where the given VCS path will be placed, if any.
func (p *PackageLoader) getVCSDirectoryForPath(vcsPath string) (string, error) {
pkgDirectory := p.pathLoader.VCSPackageDirectory(p.entrypoint)
return vcs.GetVCSCheckoutDirectory(vcsPath, pkgDirectory, p.vcsDevelopmentDirectories...)
}
// pushLibrary adds a library to be processed by the package loader.
func (p *PackageLoader) pushLibrary(library Library, kind string, sourceRange compilercommon.SourceRange) string {
if library.IsSCM {
return p.pushPath(pathVCSPackage, kind, library.PathOrURL, sourceRange)
}
return p.pushPath(pathLocalPackage, kind, library.PathOrURL, sourceRange)
}
// pushPath adds a path to be processed by the package loader.
func (p *PackageLoader) pushPath(kind pathKind, sourceKind string, path string, sourceRange compilercommon.SourceRange) string {
return p.pushPathWithId(path, sourceKind, kind, path, sourceRange)
}
// pushPathWithId adds a path to be processed by the package loader, with the specified ID.
func (p *PackageLoader) pushPathWithId(pathId string, sourceKind string, kind pathKind, path string, sourceRange compilercommon.SourceRange) string {
if p.cancelationHandle.WasCanceled() {
return pathId
}
p.workTracker.Add(1)
go p.loadAndParsePath(pathInformation{pathId, kind, path, sourceKind, sourceRange})
return pathId
}
// loadAndParsePath parses or loads a specific path.
func (p *PackageLoader) loadAndParsePath(currentPath pathInformation) {
defer p.workTracker.Done()
if p.cancelationHandle.WasCanceled() {
return
}
// Ensure we have not already seen this path and kind.
pathKey := currentPath.String()
if !p.pathKindsEncountered.SetIfAbsent(pathKey, true) {
return
}
// Perform parsing/loading.
switch currentPath.kind {
case pathSourceFile:
p.conductParsing(currentPath)
case pathLocalPackage:
p.loadLocalPackage(currentPath)
case pathVCSPackage:
p.loadVCSPackage(currentPath)
}
}
// loadVCSPackage loads the package found at the given VCS path.
func (p *PackageLoader) loadVCSPackage(packagePath pathInformation) {
if p.cancelationHandle.WasCanceled() {
return
}
// Lock on the package path to ensure no other checkouts occur for this path.
pathLock := p.vcsLockMap.GetLock(packagePath.path)
pathLock.Lock()
defer pathLock.Unlock()
existingCheckoutDir, exists := p.vcsPathsLoaded.Get(packagePath.path)
if exists {
// Note: existingCheckoutDir will be empty if there was an error loading the VCS.
if existingCheckoutDir != "" {
// Push the now-local directory onto the package loading channel.
p.pushPathWithId(packagePath.referenceID, packagePath.sourceKind, pathLocalPackage, existingCheckoutDir.(string), packagePath.sourceRange)
return
}
}
// Perform the checkout of the VCS package.
var cacheOption = vcs.VCSFollowNormalCacheRules
if p.skipVCSRefresh {
cacheOption = vcs.VCSAlwaysUseCache
}
pkgDirectory := p.pathLoader.VCSPackageDirectory(p.entrypoint)
result, err := vcs.PerformVCSCheckout(packagePath.path, pkgDirectory, cacheOption, p.vcsDevelopmentDirectories...)
if err != nil {
p.vcsPathsLoaded.Set(packagePath.path, "")
p.enqueueError(compilercommon.SourceErrorf(packagePath.sourceRange, "Error loading VCS package '%s': %v", packagePath.path, err))
return
}
p.vcsPathsLoaded.Set(packagePath.path, result.PackageDirectory)
if result.Warning != "" {
p.enqueueWarning(compilercommon.NewSourceWarning(packagePath.sourceRange, result.Warning))
}
// Check for VCS version different than a library.
packageVCSPath, _ := vcs.ParseVCSPath(packagePath.path)
for _, library := range p.libraries {
if library.IsSCM && library.Kind == packagePath.sourceKind {
libraryVCSPath, err := vcs.ParseVCSPath(library.PathOrURL)
if err != nil {
continue
}
if libraryVCSPath.URL() == packageVCSPath.URL() {
if libraryVCSPath.String() != packageVCSPath.String() {
p.enqueueWarning(compilercommon.SourceWarningf(packagePath.sourceRange,
"Library specifies VCS package `%s` but source file is loading `%s`, which could lead to incompatibilities. It is recommended to upgrade the package in the source file.",
libraryVCSPath.String(), packageVCSPath.String()))
}
break
}
}
}
// Push the now-local directory onto the package loading channel.
p.pushPathWithId(packagePath.referenceID, packagePath.sourceKind, pathLocalPackage, result.PackageDirectory, packagePath.sourceRange)
}
// loadLocalPackage loads the package found at the path relative to the package directory.
func (p *PackageLoader) loadLocalPackage(packagePath pathInformation) {
packageInfo, err := p.packageInfoForPackageDirectory(packagePath.path, packagePath.sourceKind)
if err != nil {
p.enqueueError(compilercommon.SourceErrorf(packagePath.sourceRange, "Could not load directory '%s'", packagePath.path))
return
}
// Add the module paths to be parsed.
var moduleFound = false
for _, modulePath := range packageInfo.ModulePaths() {
p.pushPath(pathSourceFile, packagePath.sourceKind, string(modulePath), packagePath.sourceRange)
moduleFound = true
}
// Add the package itself to the package map.
p.packageMap.Add(packagePath.sourceKind, packagePath.referenceID, packageInfo)
if !moduleFound {
p.enqueueWarning(compilercommon.SourceWarningf(packagePath.sourceRange, "Package '%s' has no source files", packagePath.path))
return
}
}
// conductParsing performs parsing of a source file found at the given path.
func (p *PackageLoader) conductParsing(sourceFile pathInformation) {
inputSource := compilercommon.InputSource(sourceFile.path)
// Add the file to the package map as a package of one file.
p.packageMap.Add(sourceFile.sourceKind, sourceFile.referenceID, PackageInfo{
kind: sourceFile.sourceKind,
referenceID: sourceFile.referenceID,
modulePaths: []compilercommon.InputSource{inputSource},
})
// Load the source file's contents.
contents, err := p.pathLoader.LoadSourceFile(sourceFile.path)
if err != nil {
p.enqueueError(compilercommon.SourceErrorf(sourceFile.sourceRange, "Could not load source file '%s': %v", sourceFile.path, err))
return
}
// Load the source file's revision ID.
revisionID, err := p.pathLoader.GetRevisionID(sourceFile.path)
if err != nil {
p.enqueueError(compilercommon.SourceErrorf(sourceFile.sourceRange, "Could not load source file '%s': %v", sourceFile.path, err))
return
}
// Add the source file to the tracker.
p.sourceTracker.AddSourceFile(compilercommon.InputSource(sourceFile.path), sourceFile.sourceKind, contents, revisionID)
// Parse the source file.
parser, hasParser := p.parsers[sourceFile.sourceKind]
if !hasParser {
log.Fatalf("Missing handler for source file of kind: [%v]", sourceFile.sourceKind)
}
parser.Parse(inputSource, string(contents), p.handleImport)
}
// verifyNoVCSBoundaryCross does a check to ensure that walking from the given start path
// to the given end path does not cross a VCS boundary. If it does, an error is returned.
func (p *PackageLoader) verifyNoVCSBoundaryCross(startPath string, endPath string, title string, importInformation PackageImport) *compilercommon.SourceError {
var checkPath = startPath
for {
if checkPath == endPath {
return nil
}
if vcs.IsVCSRootDirectory(checkPath) {
err := compilercommon.SourceErrorf(importInformation.SourceRange,
"Import of %s '%s' crosses VCS boundary at package '%s'", title,
importInformation.Path, checkPath)
return &err
}
nextPath := path.Dir(checkPath)
if checkPath == nextPath {
return nil
}
checkPath = nextPath
}
}
// handleImport queues an import found in a source file.
func (p *PackageLoader) handleImport(sourceKind string, importPath string, importType PackageImportType, importSource compilercommon.InputSource, runePosition int) string {
sourceRange := importSource.RangeForRunePosition(runePosition, p.sourceTracker)
importInformation := PackageImport{sourceKind, importPath, importType, sourceRange}
handler, hasHandler := p.handlers[importInformation.Kind]
if !hasHandler {
p.enqueueError(compilercommon.SourceErrorf(importInformation.SourceRange, "Unknown kind of import '%s'. Did you forgot to install a source plugin?", importInformation.Kind))
return ""
}
// Check for a library alias.
switch importInformation.ImportType {
case ImportTypeAlias:
// Aliases get pushed as their library.
libraryName := importInformation.Path
library, found := p.libraries[libraryName]
if !found {
p.enqueueError(compilercommon.SourceErrorf(importInformation.SourceRange, "Import alias `%s` not found", libraryName))
return ""
}
return p.pushLibrary(library, importInformation.Kind, importInformation.SourceRange)
case ImportTypeVCS:
// VCS paths get added directly.
return p.pushPath(pathVCSPackage, importInformation.Kind, importInformation.Path, importInformation.SourceRange)
}
// Check the path to see if it exists as a single source file. If so, we add it
// as a source file instead of a local package.
sourcePath := string(importInformation.SourceRange.Source())
currentDirectory := path.Dir(sourcePath)
dirPath := path.Join(currentDirectory, importInformation.Path)
filePath := dirPath + handler.PackageFileExtension()
var importedDirectoryPath = dirPath
var title = "package"
// Determine if path refers to a single source file. If so, it is imported rather than
// the entire directory.
isSourceFile := p.pathLoader.IsSourceFile(filePath)
if isSourceFile {
title = "module"
importedDirectoryPath = path.Dir(filePath)
}
// Check to ensure we are not crossing a VCS boundary.
if currentDirectory != importedDirectoryPath {
// If the imported directory is underneath the current directory, we need to walk upward.
if strings.HasPrefix(importedDirectoryPath, currentDirectory) {
err := p.verifyNoVCSBoundaryCross(importedDirectoryPath, currentDirectory, title, importInformation)
if err != nil {
p.enqueueError(*err)
return ""
}
} else {
// Otherwise, we walk upward from the current directory to the imported directory.
err := p.verifyNoVCSBoundaryCross(currentDirectory, importedDirectoryPath, title, importInformation)
if err != nil {
p.enqueueError(*err)
return ""
}
}
}
// Push the imported path.
if isSourceFile {
return p.pushPath(pathSourceFile, handler.Kind(), filePath, importInformation.SourceRange)
}
return p.pushPath(pathLocalPackage, handler.Kind(), dirPath, importInformation.SourceRange)
}
// enqueueError queues an error to be added to the Errors slice in the Result.
func (p *PackageLoader) enqueueError(err compilercommon.SourceError) {
p.workTracker.Add(1)
p.errors <- err
}
// enqueueWarning queues a warning to be added to the Warnings slice in the Result.
func (p *PackageLoader) enqueueWarning(warning compilercommon.SourceWarning) {
p.workTracker.Add(1)
p.warnings <- warning
}
// collectIssues watches the errors and warnings channels to collect those issues as they
// are added.
func (p *PackageLoader) collectIssues(result *LoadResult) {
for {
select {
case newError := <-p.errors:
result.Errors = append(result.Errors, newError)
result.Status = false
p.workTracker.Done()
case newWarnings := <-p.warnings:
result.Warnings = append(result.Warnings, newWarnings)
p.workTracker.Done()
case <-p.finished:
return
}
}
}
| Serulian/compiler | packageloader/packageloader.go | GO | bsd-3-clause | 23,170 |
from .factory import factorize, ArgumentError, NonExistentTypeError, NonExistentModuleError | engine-cl/ng-factory | ng_factory/__init__.py | Python | bsd-3-clause | 91 |
from . import test_invoicing
| mycodeday/crm-platform | stock_dropshipping/tests/__init__.py | Python | gpl-3.0 | 29 |
package test;
import java.io.File;
import java.sql.*;
import org.junit.*;
import static org.junit.Assert.*;
/** These tests check whether access to files is woring correctly and
* some Connection.close() cases. */
public class ConnectionTest
{
@BeforeClass public static void forName() throws Exception {
Class.forName("org.sqlite.JDBC");
}
@Test public void openMemory() throws SQLException {
Connection conn = DriverManager.getConnection("jdbc:sqlite:");
conn.close();
}
@Test public void isClosed() throws SQLException {
Connection conn = DriverManager.getConnection("jdbc:sqlite:");
assertFalse(conn.isReadOnly());
conn.close();
assertTrue(conn.isClosed());
}
@Test public void openFile() throws SQLException {
File testdb = new File("test.db");
if (testdb.exists()) testdb.delete();
assertFalse(testdb.exists());
Connection conn = DriverManager.getConnection("jdbc:sqlite:test.db");
assertFalse(conn.isReadOnly());
conn.close();
assertTrue(testdb.exists());
conn = DriverManager.getConnection("jdbc:sqlite:test.db");
assertFalse(conn.isReadOnly());
conn.close();
assertTrue(testdb.exists());
testdb.delete();
}
@Test(expected= SQLException.class)
public void closeTest() throws SQLException {
Connection conn = DriverManager.getConnection("jdbc:sqlite:");
PreparedStatement prep = conn.prepareStatement("select null;");
ResultSet rs = prep.executeQuery();
conn.close();
prep.clearParameters();
}
}
| gwenn/sqlitejdbc | src/test/ConnectionTest.java | Java | isc | 1,650 |
using System;
using System.Collections.Generic;
namespace Protobuild
{
public class KnownToolProvider : IKnownToolProvider
{
private readonly IPackageGlobalTool _packageGlobalTool;
private readonly IPackageManager _packageManager;
private readonly IHostPlatformDetector _hostPlatformDetector;
private readonly Dictionary<string, string> _knownTools = new Dictionary<string, string>
{
{"jsilc", "http://protobuild.org/hach-que/JSIL"},
{"swig", "http://protobuild.org/hach-que/SWIG"},
};
public KnownToolProvider(
IPackageGlobalTool packageGlobalTool,
IPackageManager packageManager,
IHostPlatformDetector hostPlatformDetector)
{
_packageGlobalTool = packageGlobalTool;
_packageManager = packageManager;
_hostPlatformDetector = hostPlatformDetector;
}
public string GetToolExecutablePath(string toolName)
{
var executableFile = _packageGlobalTool.ResolveGlobalToolIfPresent(toolName);
if (executableFile == null && _knownTools.ContainsKey(toolName.ToLowerInvariant()))
{
var package = new PackageRef
{
Uri = _knownTools[toolName.ToLowerInvariant()],
GitRef = "master",
Folder = null
};
Console.WriteLine("Installing {0}...", _knownTools[toolName.ToLowerInvariant()]);
_packageManager.Resolve(null, package, _hostPlatformDetector.DetectPlatform(), null, false, true);
}
else
{
return executableFile;
}
return _packageGlobalTool.ResolveGlobalToolIfPresent(toolName);
}
}
}
| dellis1972/Protobuild | Protobuild.Internal/Packages/PackageGlobalTool/KnownToolProvider.cs | C# | mit | 1,836 |
module('lunr.tokenizer')
test("splitting simple strings into tokens", function () {
var simpleString = "this is a simple string",
tokens = lunr.tokenizer(simpleString)
deepEqual(tokens, ['this', 'is', 'a', 'simple', 'string'])
})
test('downcasing tokens', function () {
var simpleString = 'FOO BAR',
tags = ['Foo', 'BAR']
deepEqual(lunr.tokenizer(simpleString), ['foo', 'bar'])
deepEqual(lunr.tokenizer(tags), ['foo', 'bar'])
})
test('handling arrays', function () {
var tags = ['foo', 'bar'],
tokens = lunr.tokenizer(tags)
deepEqual(tokens, tags)
})
test('handling multiple white spaces', function () {
var testString = ' foo bar ',
tokens = lunr.tokenizer(testString)
deepEqual(tokens, ['foo', 'bar'])
})
test('handling null-like arguments', function () {
deepEqual(lunr.tokenizer(), [])
deepEqual(lunr.tokenizer(null), [])
deepEqual(lunr.tokenizer(undefined), [])
})
test('calling to string on passed val', function () {
var date = new Date (Date.UTC(2013, 0, 1)),
obj = {
toString: function () { return 'custom object' }
}
equal(lunr.tokenizer(41), '41')
equal(lunr.tokenizer(false), 'false')
deepEqual(lunr.tokenizer(obj), ['custom', 'object'])
// slicing here to avoid asserting on the timezone part of the date
// that will be different whereever the test is run.
deepEqual(lunr.tokenizer(date).slice(0, 4), ['tue', 'jan', '01', '2013'])
})
test("splitting strings with hyphens", function () {
var simpleString = "take the New York-San Francisco flight",
tokens = lunr.tokenizer(simpleString)
deepEqual(tokens, ['take', 'the', 'new', 'york', 'san', 'francisco', 'flight'])
})
| nodoio/kb-pulseeditor | node_modules/lunr/test/tokenizer_test.js | JavaScript | mit | 1,694 |
/*
* Copyright (c) 2017. MIT-license for Jari Van Melckebeke
* Note that there was a lot of educational work in this project,
* this project was (or is) used for an assignment from Realdolmen in Belgium.
* Please just don't abuse my work
*/
//! moment.js locale configuration
//! locale : Klingon (tlh)
//! author : Dominika Kruk : https://github.com/amaranthrose
import moment from '../moment';
var numbersNouns = 'pagh_wa’_cha’_wej_loS_vagh_jav_Soch_chorgh_Hut'.split('_');
function translateFuture(output) {
var time = output;
time = (output.indexOf('jaj') !== -1) ?
time.slice(0, -3) + 'leS' :
(output.indexOf('jar') !== -1) ?
time.slice(0, -3) + 'waQ' :
(output.indexOf('DIS') !== -1) ?
time.slice(0, -3) + 'nem' :
time + ' pIq';
return time;
}
function translatePast(output) {
var time = output;
time = (output.indexOf('jaj') !== -1) ?
time.slice(0, -3) + 'Hu’' :
(output.indexOf('jar') !== -1) ?
time.slice(0, -3) + 'wen' :
(output.indexOf('DIS') !== -1) ?
time.slice(0, -3) + 'ben' :
time + ' ret';
return time;
}
function translate(number, withoutSuffix, string, isFuture) {
var numberNoun = numberAsNoun(number);
switch (string) {
case 'mm':
return numberNoun + ' tup';
case 'hh':
return numberNoun + ' rep';
case 'dd':
return numberNoun + ' jaj';
case 'MM':
return numberNoun + ' jar';
case 'yy':
return numberNoun + ' DIS';
}
}
function numberAsNoun(number) {
var hundred = Math.floor((number % 1000) / 100),
ten = Math.floor((number % 100) / 10),
one = number % 10,
word = '';
if (hundred > 0) {
word += numbersNouns[hundred] + 'vatlh';
}
if (ten > 0) {
word += ((word !== '') ? ' ' : '') + numbersNouns[ten] + 'maH';
}
if (one > 0) {
word += ((word !== '') ? ' ' : '') + numbersNouns[one];
}
return (word === '') ? 'pagh' : word;
}
export default moment.defineLocale('tlh', {
months : 'tera’ jar wa’_tera’ jar cha’_tera’ jar wej_tera’ jar loS_tera’ jar vagh_tera’ jar jav_tera’ jar Soch_tera’ jar chorgh_tera’ jar Hut_tera’ jar wa’maH_tera’ jar wa’maH wa’_tera’ jar wa’maH cha’'.split('_'),
monthsShort : 'jar wa’_jar cha’_jar wej_jar loS_jar vagh_jar jav_jar Soch_jar chorgh_jar Hut_jar wa’maH_jar wa’maH wa’_jar wa’maH cha’'.split('_'),
monthsParseExact : true,
weekdays : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'),
weekdaysShort : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'),
weekdaysMin : 'lojmItjaj_DaSjaj_povjaj_ghItlhjaj_loghjaj_buqjaj_ghInjaj'.split('_'),
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD.MM.YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY HH:mm',
LLLL : 'dddd, D MMMM YYYY HH:mm'
},
calendar : {
sameDay: '[DaHjaj] LT',
nextDay: '[wa’leS] LT',
nextWeek: 'LLL',
lastDay: '[wa’Hu’] LT',
lastWeek: 'LLL',
sameElse: 'L'
},
relativeTime : {
future : translateFuture,
past : translatePast,
s : 'puS lup',
m : 'wa’ tup',
mm : translate,
h : 'wa’ rep',
hh : translate,
d : 'wa’ jaj',
dd : translate,
M : 'wa’ jar',
MM : translate,
y : 'wa’ DIS',
yy : translate
},
ordinalParse: /\d{1,2}\./,
ordinal : '%d.',
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
});
| N00bface/Real-Dolmen-Stage-Opdrachten | stageopdracht/src/main/resources/static/vendors/moment/src/locale/tlh.js | JavaScript | mit | 3,713 |
#!/usr/bin/env php
<?php
$COMPOSER = getenv("COMPOSER")?:"composer.json";
$COMPOSER_LOCK = getenv("COMPOSER_LOCK")?:"composer.lock";
$STACK = getenv("STACK")?:"cedar-14";
// prefix keys with "heroku-sys/"
function mkdep($require) { return array_combine(array_map(function($v) { return "heroku-sys/$v"; }, array_keys($require)), $require); }
// check if require section demands a runtime
function hasreq($require) { return isset($require["php"]) || isset($require["hhvm"]); }
// filter platform reqs
$platfilter = function($v) { return preg_match("#^(hhvm$|php(-64bit)?$|ext-)#", $v); };
// remove first arg (0)
array_shift($argv);
// base repos we need - no packagist, and the installer plugin path (first arg)
$repositories = [
["packagist" => false],
["type" => "path", "url" => array_shift($argv)],
];
// all other args are repo URLs; they get passed in ascending order of precedence, so we reverse
foreach(array_reverse($argv) as $repo) $repositories[] = ["type" => "composer", "url" => $repo];
$json = json_decode(file_get_contents($COMPOSER), true);
if(!is_array($json)) exit(1);
$have_runtime_req = false;
$require = [];
if(file_exists($COMPOSER_LOCK)) {
$lock = json_decode(file_get_contents($COMPOSER_LOCK), true);
// basic lock file validity check
if(!$lock || !isset($lock["platform"], $lock["packages"], $lock["hash"])) exit(1);
$have_runtime_req |= hasreq($lock["platform"]);
// for each package that has platform requirements we build a meta-package that we then depend on
// we cannot simply join all those requirements together with " " or "," because of the precedence of the "|" operator: requirements "5.*," and "^5.3.9|^7.0", which should lead to a PHP 5 install, would combine into "5.*,^5.3.9|^7.0" (there is no way to group requirements), and that would give PHP 7
$metapaks = [];
// whatever is in the lock "platform" key will be turned into a meta-package too, named "composer.json/composer.lock"
// this will result in an installer event for that meta-package, from which we can extract what extensions that are bundled (and hence "replace"d) with the runtime need to be enabled
// if we do not do this, then a require for e.g. ext-curl or ext-mbstring in the main composer.json cannot be found by the installer plugin
$root = [
"name" => "$COMPOSER/$COMPOSER_LOCK",
"version" => "dev-".$lock["hash"],
"require" => $lock["platform"],
"replace" => isset($json["replace"]) ? $json["replace"] : [],
"provide" => isset($json["provide"]) ? $json["provide"] : [],
"conflict" => isset($json["conflict"]) ? $json["conflict"] : [],
];
$lock["packages"][] = $root;
$require = [
$root["name"] => $root["version"],
];
foreach($lock["packages"] as $package) {
// extract only platform requires, replaces and provides
$preq = array_filter(isset($package["require"]) ? $package["require"] : [], $platfilter, ARRAY_FILTER_USE_KEY);
$prep = array_filter(isset($package["replace"]) ? $package["replace"] : [], $platfilter, ARRAY_FILTER_USE_KEY);
$ppro = array_filter(isset($package["provide"]) ? $package["provide"] : [], $platfilter, ARRAY_FILTER_USE_KEY);
$pcon = array_filter(isset($package["conflict"]) ? $package["conflict"] : [], $platfilter, ARRAY_FILTER_USE_KEY);
$have_runtime_req |= hasreq($preq);
$metapaks[] = [
"type" => "metapackage",
// we re-use the dep name and version, makes for nice error messages if dependencies cannot be fulfilled :)
"name" => $package["name"],
"version" => $package["version"],
"require" => mkdep($preq),
"replace" => mkdep($prep),
"provide" => mkdep($ppro),
"conflict" => mkdep($pcon),
];
$require[$package["name"]] = $package["version"];
}
// add all meta-packages to one local package repo
if($metapaks) $repositories[] = ["type" => "package", "package" => $metapaks];
}
// if no PHP or HHVM is required anywhere, we need to add something
if(!$have_runtime_req) {
file_put_contents("php://stderr", "NOTICE: No runtime required in $COMPOSER_LOCK; using PHP ". ($require["heroku-sys/php"] = "^5.5.17") . "\n");
} elseif(!isset($root["require"]["php"]) && !isset($root["require"]["hhvm"])) {
file_put_contents("php://stderr", "NOTICE: No runtime required in $COMPOSER; requirements\nfrom dependencies in $COMPOSER_LOCK will be used for selection\n");
}
$require["heroku-sys/apache"] = "^2.4.10";
$require["heroku-sys/nginx"] = "~1.8.0";
preg_match("#^([^-]+)(?:-([0-9]+))?\$#", $STACK, $stack);
$provide = ["heroku-sys/".$stack[1] => (isset($stack[2])?$stack[2]:"1").gmdate(".Y.m.d")]; # cedar: 14.2016.02.16 etc
$json = [
"config" => ["cache-files-ttl" => 0, "discard-changes" => true],
"minimum-stability" => isset($lock["minimum-stability"]) ? $lock["minimum-stability"] : "stable",
"prefer-stable" => isset($lock["prefer-stable"]) ? $lock["prefer-stable"] : false,
"provide" => $provide,
"repositories" => $repositories,
"require" => $require,
];
echo json_encode($json, JSON_PRETTY_PRINT);
| Hyra/heroku-buildpack-enc | bin/util/platform.php | PHP | mit | 4,934 |
'use strict';
process.env.NODE_ENV = 'run-tests';
var assert = require('assert');
var app = require('../server.js');
var Helper = require('./test-helper')(app);
var db = require('../server/models/models.js');
var request = require('supertest-as-promised');
var chai = require('chai');
var chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
var expect = chai.expect;
var adminUserCreds = {username: 'adminuser@test.fi', password: 'adminPassword', admin: true};
var testUserCreds = {username: 'testuser@test.fi', password: 'testPassword'};
var adminUser, testUser;
var testUserId, adminUserId;
var testUserToken, adminUserToken;
var server;
describe("Authentication", function () {
var organizationId, testUserId;
var userRoles = ['user'];
before(function () {
return Helper.createOrganization()
.then(function (org) {
organizationId = org._id;
})
.then(function () {
return Helper.createUser(adminUserCreds);
})
.then(function (adminUserData) {
adminUser = adminUserData;
adminUserId = adminUserData._id;
Promise.resolve(adminUser);
})
.then(function() {
return Helper.login(adminUserCreds);
})
.then(function (userCreds) {
adminUserToken = userCreds.token;
return Helper.createUser(testUserCreds);
})
.then(function (testUserData) {
testUser = testUserData;
testUserId = testUserData._id;
return testUser;
})
.then(function() {
return Helper.createUserOrganization(testUserId, organizationId, userRoles);
})
});
after(function () {
return Helper.clearDB()
.then(function () {
Promise.resolve(true);
})
});
it("Non-autenticated request should be rejected by default", function (done) {
return new Promise(function (resolve, reject) {
request(app)
.get('/users/' + testUser.id)
.expect(401, function (err) {
if (err) reject(err);
done();
});
});
});
it("Authenticated request by admin should be accepted", function (done) {
request(app)
.get('/users/' + adminUser.id)
.set('Authorization', 'Bearer ' + adminUserToken)
.expect(200, done);
});
it("Login with wrong credentials should be rejected", function (done) {
request(app)
.post('/users/login')
.send({username:'testuser@test.fi', password: 'invalidPassword'})
.type('application/json')
.accept('json')
.expect(401, done);
});
it("Login with valid credentials should be accepted", function (done) {
request(app)
.post('/users/login')
.send(testUserCreds)
.type('application/json')
.accept('json')
.expect(200, done);
});
it("Logout with valid credentials should be accepted", function (done) {
var token;
Helper.login(testUserCreds).then(function (credentials) {
token = credentials.token;
return token;
}).then(function (token) {
request(app)
.get('/users/logout')
.set('Authorization', 'Bearer ' + token)
.expect(200, done);
});
});
it("Request using former access token after logout should fail", function (done) {
var token;
Helper.login(testUserCreds).then(function (credentials) {
token = credentials.token;
return Helper.logout(credentials);
})
.then(function (res) {
request(app)
.get('/users/current')
.set('Authorization', 'Bearer ' + token)
.expect(401, done);
});
});
});
describe("Items", function () {
var token, adminToken, organizationId, inventoryId, itemId, adminUserId, userId;
before(function () {
return Helper.createUser(testUserCreds)
.then(function (user) {
userId = user._id;
return Helper.login(testUserCreds)
.then(function (tokenData) {
token = tokenData.token;
});
})
.then(function () {
return Helper.createUser(adminUserCreds)
.then(function (adminUserData) {
adminUserId = adminUserData._id;
return Helper.login(adminUserCreds);
})
.then(function (adminCreds) {
adminToken = adminCreds.token;
})
})
.then(function () {
return Helper.createOrganization()
.then(function (org) {
organizationId = org._id;
});
})
.then(function () {
return Helper.createInventory(organizationId)
.then(function (inv) {
inventoryId = inv._id;
});
})
.then(function () {
return Helper.createItem(inventoryId)
.then(function (item) {
itemId = item._id;
});
})
.then(function () {
return Helper.createUserOrganization(userId, organizationId, ['user'])
.then(function (success) {
});
})
.then(function () {
return Helper.createUserOrganization(adminUserId, organizationId, ['user', 'admin'])
.then(function (success) {
// nop
});
})
});
after(function () {
return Helper.clearDB()
.then(function () {
Promise.resolve(true);
});
});
it("List items should return inventory items", function (done) {
request(app)
.get('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items')
.set('Authorization', 'Bearer ' + token)
.expect(200, done);
});
it("Read item should return item", function (done) {
request(app)
.get('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items/' + itemId)
.set('Authorization', 'Bearer ' + token)
.expect(200, done);
});
it("Update item with insufficient access rights should return error", function (done) {
request(app)
.put('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items/' + itemId)
.send({
name: 'randomNamesss'
})
.set('Authorization', 'Bearer ' + token)
.expect(403, done);
});
it("Update item with valid item should update item", function (done) {
request(app)
.put('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items/' + itemId)
.send({
item: {
name: 'randomNamesss'
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(200, done);
});
it("Update item with invalid item should return error", function (done) {
request(app)
.put('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items/' + itemId)
.send({
name: 'randomName'
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(400, done);
});
it("Create item with invalid item should return error", function (done) {
request(app)
.post('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items')
.send({
item: {
description: 'randomName'
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(400, done)
});
it("Create item with valid item should return success", function (done) {
request(app)
.post('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items')
.send({
item: {
inventoryId: inventoryId,
name: 'Item1',
description: 'randomName',
amount: 0
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(201)
.then(function (res) {
db.itemModel.count({_id: res.body._id}).then(function (countRes) {
assert.equal(countRes, 1);
done();
});
})
.catch(function (err) {
done(err);
});
});
it("Delete item with valid item should return success", function (done) {
return Helper.createItem(inventoryId).then(function (newItem) {
request(app)
.delete('/organizations/' + organizationId + '/inventories/' + inventoryId + '/items/' + newItem._id)
.set('Authorization', 'Bearer ' + adminToken)
.expect(200)
.then(function (res) {
db.itemModel.count({_id: newItem._id}).then(function (countRes) {
assert.equal(countRes, 0);
done();
});
})
.catch(function (err) {
done(err);
});
});
});
});
describe("Dives", function () {
var token, adminToken, organizationId, diveId, siteId;
before(function () {
return Helper.clearDB().then(function () {
return Helper.createUser(testUserCreds);
})
.then(function (user) {
testUserId = user._id;
return Helper.login(testUserCreds)
.then(function (tokenData) {
token = tokenData.token;
});
})
.then(function () {
return Helper.createUser(adminUserCreds)
.then(function (adminUserData) {
adminUserId = adminUserData._id;
return Helper.login(adminUserCreds);
})
.then(function (adminCreds) {
adminToken = adminCreds.token;
})
})
.then(function () {
return Helper.createOrganization()
.then(function (org) {
organizationId = org._id;
});
})
.then(function () {
return Helper.createSite()
.then(function (site) {
siteId = site._id;
});
})
.then(function () {
return Helper.createDive(siteId, adminUserId, organizationId)
.then(function (dive) {
diveId = dive._id;
});
})
});
after(function () {
return Helper.clearDB()
.then(function () {
Promise.resolve(true);
});
});
it("Create dive with valid request should be accepted", function (done) {
var newDive = Helper.getValidDive();
request(app)
.post('/users/' + testUserId + '/organizations/' + organizationId + '/dives')
.set('Authorization', 'Bearer ' + token)
.send({dive: newDive})
.expect(201)
.then(function (res) {
db.diveModel.count({_id: res.body._id}).then(function (countRes) {
assert.equal(countRes, 1);
done();
});
})
.catch(function (err) {
done(err);
})
});
it("Show current user's dive should return dive", function (done) {
request(app)
.get('/users/' + testUserId + '/organizations/' + organizationId + '/dives/' + diveId)
.set('Authorization', 'Bearer ' + token)
.expect(200, done)
});
it("Show another user's dive with no admin rights should return error", function (done) {
request(app)
.get('/users/' + adminUserId + '/organizations/' + organizationId + '/dives/' + diveId)
.set('Authorization', 'Bearer ' + token)
.expect(403, done)
});
it("Show another user's dive with admin rights should return dive", function (done) {
Helper.createDive(siteId, testUserId, organizationId).then(function (dive) {
request(app)
.get('/users/' + testUserId + '/organizations/' + organizationId + '/dives/' + dive._id)
.set('Authorization', 'Bearer ' + adminToken)
.expect(200, done)
});
});
it("List all dives with admin rights should return all dives", function (done) {
request(app)
.get('/users/' + adminUserId + '/organizations/' + organizationId + '/dives')
.set('Authorization', 'Bearer ' + adminToken)
.expect(200, done)
});
it("List another user's dives with admin rights should return dives", function (done) {
request(app)
.get('/users/' + testUserId + '/organizations/' + organizationId + '/dives')
.set('Authorization', 'Bearer ' + adminToken)
.expect(200, done)
});
it("List current users dives should return dives", function (done) {
request(app)
.get('/users/' + testUserId + '/organizations/' + organizationId + '/dives')
.set('Authorization', 'Bearer ' + token)
.expect(200, done)
});
it("List another user's dives should return error", function (done) {
request(app)
.get('/users/' + adminUserId + '/organizations/' + organizationId + '/dives')
.set('Authorization', 'Bearer ' + token)
.expect(403, done)
});
it("Update dive should update dive", function (done) {
request(app)
.put('/users/' + adminUserId + '/organizations/' + organizationId + '/dives/' + diveId)
.send({
dive: {
description: 'New dive description'
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(200)
.then(function (res) {
db.diveModel.findOne({_id: diveId}).then(function (dive) {
assert.equal(dive.description, 'New dive description');
done();
});
})
.catch(function (err) {
done(err);
})
});
it("Update dive with invalid dive id should return 404", function (done) {
var randomDiveId = 'asdasde3434sdfds';
request(app)
.put('/users/' + adminUserId + '/organizations/' + organizationId + '/dives/' + randomDiveId)
.send({
dive: {
description: 'New dive description'
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(404, done)
});
it("Update dive with invalid data should return 400", function (done) {
request(app)
.put('/users/' + adminUserId + '/organizations/' + organizationId + '/dives/' + diveId)
.send({
dive: {
asdas: 'Somtehrsdas'
}
})
.set('Authorization', 'Bearer ' + adminToken)
.expect(400, done)
});
});
| jphire/dive-logger | test/test-api.js | JavaScript | mit | 15,593 |
// Generated on 12/11/2014 19:02:10
using System;
using System.Collections.Generic;
using System.Linq;
using BlueSheep.Common.IO;
namespace BlueSheep.Common.Protocol.Types
{
public class InteractiveElementNamedSkill : InteractiveElementSkill
{
public new const short ID = 220;
public override short TypeId
{
get { return ID; }
}
public int nameId;
public InteractiveElementNamedSkill()
{
}
public InteractiveElementNamedSkill(int skillId, int skillInstanceUid, int nameId)
: base(skillId, skillInstanceUid)
{
this.nameId = nameId;
}
public override void Serialize(BigEndianWriter writer)
{
base.Serialize(writer);
writer.WriteVarInt(nameId);
}
public override void Deserialize(BigEndianReader reader)
{
base.Deserialize(reader);
nameId = reader.ReadVarInt();
if (nameId < 0)
throw new Exception("Forbidden value on nameId = " + nameId + ", it doesn't respect the following condition : nameId < 0");
}
}
} | Sadikk/BlueSheep | BlueSheep/Common/Protocol/types/game/interactive/InteractiveElementNamedSkill.cs | C# | mit | 1,077 |
<?xml version="1.0" ?><!DOCTYPE TS><TS language="sr" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>judgecoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The judgecoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Кликните два пута да промените адресу и/или етикету</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Прави нову адресу</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Копира изабрану адресу на системски клипборд</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your judgecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Избриши</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Зарезом одвојене вредности (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Етикета</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(без етикете)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Унесите лозинку</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Нова лозинка</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Поновите нову лозинку</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Унесите нову лозинку за приступ новчанику.<br/>Молимо Вас да лозинка буде <b>10 или више насумице одабраних знакова</b>, или <b>осам или више речи</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Шифровање новчаника</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ова акција захтева лозинку Вашег новчаника да би га откључала.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Откључавање новчаника</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ова акција захтева да унесете лозинку да би дешифловала новчаник.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Дешифровање новчаника</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Промена лозинке</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Унесите стару и нову лозинку за шифровање новчаника.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Одобрите шифровање новчаника</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Да ли сте сигурни да желите да се новчаник шифује?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Новчаник је шифрован</translation>
</message>
<message>
<location line="-58"/>
<source>judgecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Неуспело шифровање новчаника</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Настала је унутрашња грешка током шифровања новчаника. Ваш новчаник није шифрован.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Лозинке које сте унели се не подударају.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Неуспело откључавање новчаника</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Лозинка коју сте унели за откључавање новчаника је нетачна.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Неуспело дешифровање новчаника</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Лозинка за приступ новчанику је успешно промењена.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Синхронизација са мрежом у току...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Општи преглед</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Погледајте општи преглед новчаника</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Трансакције</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Претражите историјат трансакција</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>I&zlaz</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Напустите програм</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>О &Qt-у</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Прегледајте информације о Qt-у</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>П&оставке...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>&Шифровање новчаника...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup новчаника</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>Промени &лозинку...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Мењање лозинке којом се шифрује новчаник</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>новчаник</translation>
</message>
<message>
<location line="+180"/>
<source>&About judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Фајл</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Подешавања</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>П&омоћ</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Трака са картицама</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>judgecoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to judgecoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About judgecoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about judgecoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Ажурно</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Ажурирање у току...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Послана трансакција</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Придошла трансакција</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1⏎ Iznos: %2⏎ Tip: %3⏎ Adresa: %4⏎</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid judgecoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Новчаник јс <b>шифрован</b> и тренутно <b>откључан</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Новчаник јс <b>шифрован</b> и тренутно <b>закључан</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. judgecoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Iznos:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Potvrdjen</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>kopiraj adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>kopiraj naziv</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>kopiraj iznos</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(без етикете)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Измени адресу</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Етикета</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Адреса</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Унешена адреса "%1" се већ налази у адресару.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid judgecoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Немогуће откључати новчаник.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>judgecoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Поставке</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start judgecoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start judgecoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the judgecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the judgecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting judgecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Јединица за приказивање износа:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show judgecoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting judgecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the judgecoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>новчаник</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Недавне трансакције</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the judgecoin-Qt help message to get a list with possible judgecoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>judgecoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>judgecoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the judgecoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the judgecoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Слање новца</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Iznos:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>123.456 BOST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Потврди акцију слања</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Пошаљи</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a judgecoin address (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>kopiraj iznos</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(без етикете)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Етикета</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+П</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a judgecoin address (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+П</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified judgecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a judgecoin address (e.g. Jjz75uKHzUQJnSdzvpiigEGxseKkDhQToX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter judgecoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Otvorite do %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/nepotvrdjeno</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 potvrde</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>етикета</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, nije još uvek uspešno emitovan</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>nepoznato</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>detalji transakcije</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ovaj odeljak pokazuje detaljan opis transakcije</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>tip</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Otvoreno do %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Potvrdjena (%1 potvrdjenih)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ovaj blok nije primljen od ostalih čvorova (nodova) i verovatno neće biti prihvaćen!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generisan ali nije prihvaćen</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Primljen sa</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Primljeno od</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Poslat ka</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Isplata samom sebi</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minirano</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status vaše transakcije. Predjite mišem preko ovog polja da bi ste videli broj konfirmacija</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum i vreme primljene transakcije.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tip transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Destinacija i adresa transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Iznos odbijen ili dodat balansu.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Sve</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Danas</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>ove nedelje</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ovog meseca</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Prošlog meseca</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ove godine</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Opseg...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Primljen sa</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Poslat ka</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Vama - samom sebi</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minirano</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Drugi</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Navedite adresu ili naziv koji bi ste potražili</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Min iznos</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>kopiraj adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>kopiraj naziv</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>kopiraj iznos</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>promeni naziv</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Зарезом одвојене вредности (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Potvrdjen</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>datum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>tip</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Етикета</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Адреса</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>iznos</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Opseg:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>do</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>judgecoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Korišćenje:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or judgecoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Listaj komande</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Zatraži pomoć za komande</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Opcije</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: judgecoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: judgecoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Gde je konkretni data direktorijum </translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Održavaj najviše <n> konekcija po priključku (default: 125)
</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Prihvati komandnu liniju i JSON-RPC komande</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Radi u pozadini kao daemon servis i prihvati komande</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Koristi testnu mrežu</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong judgecoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Korisničko ime za JSON-RPC konekcije</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Lozinka za JSON-RPC konekcije</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=judgecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "judgecoin Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Dozvoli JSON-RPC konekcije sa posebne IP adrese</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Pošalji komande to nodu koji radi na <ip> (default: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Odredi veličinu zaštićenih ključeva na <n> (default: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Ponovo skeniraj lanac blokova za nedostajuće transakcije iz novčanika</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Koristi OpenSSL (https) za JSON-RPC konekcije</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>privatni ključ za Server (podrazumevan: server.pem)</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Ova poruka Pomoći</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. judgecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>učitavam adrese....</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Грешка током учитавања wallet.dat: Новчаник је покварен </translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of judgecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart judgecoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Грешка током учитавања wallet.dat </translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Učitavam blok indeksa...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. judgecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Новчаник се учитава...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Ponovo skeniram...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Završeno učitavanje</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS> | judgecrypto/judgecoin | src/qt/locale/bitcoin_sr.ts | TypeScript | mit | 112,779 |
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./media/task.contribution';
import 'vs/workbench/parts/tasks/browser/taskQuickOpen';
import * as nls from 'vs/nls';
import { TPromise, Promise } from 'vs/base/common/winjs.base';
import Severity from 'vs/base/common/severity';
import * as Objects from 'vs/base/common/objects';
import { IStringDictionary } from 'vs/base/common/collections';
import { Action } from 'vs/base/common/actions';
import * as Dom from 'vs/base/browser/dom';
import { IDisposable, dispose } from 'vs/base/common/lifecycle';
import { EventEmitter, ListenerUnbind } from 'vs/base/common/eventEmitter';
import * as Builder from 'vs/base/browser/builder';
import * as Types from 'vs/base/common/types';
import { KeyMod, KeyCode } from 'vs/base/common/keyCodes';
import { match } from 'vs/base/common/glob';
import { setTimeout } from 'vs/base/common/platform';
import { TerminateResponse } from 'vs/base/common/processes';
import * as strings from 'vs/base/common/strings';
import { Registry } from 'vs/platform/platform';
import { ILifecycleService } from 'vs/platform/lifecycle/common/lifecycle';
import { SyncActionDescriptor } from 'vs/platform/actions/common/actions';
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { IEventService } from 'vs/platform/event/common/event';
import { IEditor } from 'vs/platform/editor/common/editor';
import { IMessageService } from 'vs/platform/message/common/message';
import { IMarkerService, MarkerStatistics } from 'vs/platform/markers/common/markers';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IFileService, FileChangesEvent, FileChangeType, EventType as FileEventType } from 'vs/platform/files/common/files';
import { IExtensionService } from 'vs/platform/extensions/common/extensions';
import { IModeService } from 'vs/editor/common/services/modeService';
import { IModelService } from 'vs/editor/common/services/modelService';
import jsonContributionRegistry = require('vs/platform/jsonschemas/common/jsonContributionRegistry');
import { IJSONSchema } from 'vs/base/common/jsonSchema';
import { IWorkbenchActionRegistry, Extensions as WorkbenchActionExtensions } from 'vs/workbench/common/actionRegistry';
import { IStatusbarItem, IStatusbarRegistry, Extensions as StatusbarExtensions, StatusbarItemDescriptor, StatusbarAlignment } from 'vs/workbench/browser/parts/statusbar/statusbar';
import { IQuickOpenRegistry, Extensions as QuickOpenExtensions, QuickOpenHandlerDescriptor } from 'vs/workbench/browser/quickopen';
import { IQuickOpenService } from 'vs/workbench/services/quickopen/common/quickOpenService';
import { IWorkbenchEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService';
import { SystemVariables } from 'vs/workbench/parts/lib/node/systemVariables';
import { ITextFileService, EventType } from 'vs/workbench/parts/files/common/files';
import { IOutputService, IOutputChannelRegistry, Extensions as OutputExt, IOutputChannel } from 'vs/workbench/parts/output/common/output';
import { ITaskSystem, ITaskSummary, ITaskRunResult, TaskError, TaskErrors, TaskConfiguration, TaskDescription, TaskSystemEvents } from 'vs/workbench/parts/tasks/common/taskSystem';
import { ITaskService, TaskServiceEvents } from 'vs/workbench/parts/tasks/common/taskService';
import { templates as taskTemplates } from 'vs/workbench/parts/tasks/common/taskTemplates';
import { LanguageServiceTaskSystem, LanguageServiceTaskConfiguration } from 'vs/workbench/parts/tasks/common/languageServiceTaskSystem';
import * as FileConfig from 'vs/workbench/parts/tasks/node/processRunnerConfiguration';
import { ProcessRunnerSystem } from 'vs/workbench/parts/tasks/node/processRunnerSystem';
import { ProcessRunnerDetector } from 'vs/workbench/parts/tasks/node/processRunnerDetector';
let $ = Builder.$;
class AbstractTaskAction extends Action {
protected taskService: ITaskService;
protected telemetryService: ITelemetryService;
private messageService: IMessageService;
private contextService: IWorkspaceContextService;
constructor(id:string, label:string, @ITaskService taskService:ITaskService,
@ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService,
@IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label);
this.taskService = taskService;
this.telemetryService = telemetryService;
this.messageService = messageService;
this.contextService = contextService;
}
protected canRun(): boolean {
if (!this.contextService.getWorkspace()) {
this.messageService.show(Severity.Info, nls.localize('AbstractTaskAction.noWorkspace', 'Tasks are only available on a workspace folder.'));
return false;
}
return true;
}
}
class BuildAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.build';
public static TEXT = nls.localize('BuildAction.label', "Run Build Task");
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
}
public run(): TPromise<ITaskSummary> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.taskService.build();
}
}
class TestAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.test';
public static TEXT = nls.localize('TestAction.label', "Run Test Task");
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
}
public run(): TPromise<ITaskSummary> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.taskService.runTest();
}
}
class RebuildAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.rebuild';
public static TEXT = nls.localize('RebuildAction.label', 'Run Rebuild Task');
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
}
public run(): TPromise<ITaskSummary> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.taskService.rebuild();
}
}
class CleanAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.clean';
public static TEXT = nls.localize('CleanAction.label', 'Run Clean Task');
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
}
public run(): TPromise<ITaskSummary> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.taskService.clean();
}
}
class ConfigureTaskRunnerAction extends Action {
public static ID = 'workbench.action.tasks.configureTaskRunner';
public static TEXT = nls.localize('ConfigureTaskRunnerAction.label', "Configure Task Runner");
private configurationService: IConfigurationService;
private fileService: IFileService;
private editorService: IWorkbenchEditorService;
private contextService: IWorkspaceContextService;
private outputService: IOutputService;
private messageService: IMessageService;
private quickOpenService: IQuickOpenService;
constructor(id: string, label: string, @IConfigurationService configurationService: IConfigurationService,
@IWorkbenchEditorService editorService: IWorkbenchEditorService, @IFileService fileService: IFileService,
@IWorkspaceContextService contextService: IWorkspaceContextService, @IOutputService outputService: IOutputService,
@IMessageService messageService: IMessageService, @IQuickOpenService quickOpenService: IQuickOpenService) {
super(id, label);
this.configurationService = configurationService;
this.editorService = editorService;
this.fileService = fileService;
this.contextService = contextService;
this.outputService = outputService;
this.messageService = messageService;
this.quickOpenService = quickOpenService;
}
public run(event?:any): TPromise<IEditor> {
if (!this.contextService.getWorkspace()) {
this.messageService.show(Severity.Info, nls.localize('ConfigureTaskRunnerAction.noWorkspace', 'Tasks are only available on a workspace folder.'));
return TPromise.as(undefined);
}
let sideBySide = !!(event && (event.ctrlKey || event.metaKey));
return this.fileService.resolveFile(this.contextService.toResource('.vscode/tasks.json')).then((success) => {
return success;
}, (err:any) => {
;
return this.quickOpenService.pick(taskTemplates, { placeHolder: nls.localize('ConfigureTaskRunnerAction.quickPick.template', 'Select a Task Runner')}).then(selection => {
if (!selection) {
return undefined;
}
let contentPromise: TPromise<string>;
if (selection.autoDetect) {
const outputChannel = this.outputService.getChannel(TaskService.OutputChannelId);
outputChannel.show();
outputChannel.append(nls.localize('ConfigureTaskRunnerAction.autoDetecting', 'Auto detecting tasks for {0}', selection.id) + '\n');
let detector = new ProcessRunnerDetector(this.fileService, this.contextService, new SystemVariables(this.editorService, this.contextService));
contentPromise = detector.detect(false, selection.id).then((value) => {
let config = value.config;
if (value.stderr && value.stderr.length > 0) {
value.stderr.forEach((line) => {
outputChannel.append(line + '\n');
});
this.messageService.show(Severity.Warning, nls.localize('ConfigureTaskRunnerAction.autoDetect', 'Auto detecting the task system failed. Using default template. Consult the task output for details.'));
return selection.content;
} else if (config) {
if (value.stdout && value.stdout.length > 0) {
value.stdout.forEach(line => outputChannel.append(line + '\n'));
}
let content = JSON.stringify(config, null, '\t');
content = [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
].join('\n') + content.substr(1);
return content;
} else {
return selection.content;
}
});
} else {
contentPromise = TPromise.as(selection.content);
}
return contentPromise.then(content => {
let editorConfig = this.configurationService.getConfiguration<any>();
if (editorConfig.editor.insertSpaces) {
content = content.replace(/(\n)(\t+)/g, (_, s1, s2) => s1 + strings.repeat(' ', s2.length * editorConfig.editor.tabSize));
}
return this.fileService.createFile(this.contextService.toResource('.vscode/tasks.json'), content);
});
});
}).then((stat) => {
if (!stat) {
return undefined;
}
// // (2) Open editor with configuration file
return this.editorService.openEditor({
resource: stat.resource,
options: {
forceOpen: true
}
}, sideBySide);
}, (error) => {
throw new Error(nls.localize('ConfigureTaskRunnerAction.failed', "Unable to create the 'tasks.json' file inside the '.vscode' folder. Consult the task output for details."));
});
}
}
class CloseMessageAction extends Action {
public static ID = 'workbench.action.build.closeMessage';
public static TEXT = nls.localize('CloseMessageAction.label', 'Close');
public closeFunction: () => void;
constructor() {
super(CloseMessageAction.ID, CloseMessageAction.TEXT);
}
public run(): TPromise<void> {
if (this.closeFunction) {
this.closeFunction();
}
return TPromise.as(null);
}
}
class TerminateAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.terminate';
public static TEXT = nls.localize('TerminateAction.label', "Terminate Running Task");
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
}
public run(): TPromise<TerminateResponse> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.taskService.isActive().then((active) => {
if (active) {
return this.taskService.terminate().then((response) => {
if (response.success) {
return;
} else {
return Promise.wrapError(nls.localize('TerminateAction.failed', 'Failed to terminate running task'));
}
});
}
});
}
}
class ShowLogAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.showLog';
public static TEXT = nls.localize('ShowLogAction.label', "Show Task Log");
private outputService: IOutputService;
constructor(id: string, label: string, @ITaskService taskService:ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService,
@IOutputService outputService:IOutputService) {
super(id, label, taskService, telemetryService, messageService, contextService);
this.outputService = outputService;
}
public run(): TPromise<IEditor> {
if (!this.canRun()) {
return TPromise.as(undefined);
}
return this.outputService.getChannel(TaskService.OutputChannelId).show();
}
}
class RunTaskAction extends AbstractTaskAction {
public static ID = 'workbench.action.tasks.runTask';
public static TEXT = nls.localize('RunTaskAction.label', "Run Task");
private quickOpenService: IQuickOpenService;
constructor(id: string, label: string, @IQuickOpenService quickOpenService:IQuickOpenService,
@ITaskService taskService: ITaskService, @ITelemetryService telemetryService: ITelemetryService,
@IMessageService messageService:IMessageService, @IWorkspaceContextService contextService: IWorkspaceContextService) {
super(id, label, taskService, telemetryService, messageService, contextService);
this.quickOpenService = quickOpenService;
}
public run(event?:any): Promise {
if (!this.canRun()) {
return TPromise.as(undefined);
}
this.quickOpenService.show('task ');
return TPromise.as(null);
}
}
class StatusBarItem implements IStatusbarItem {
private quickOpenService: IQuickOpenService;
private markerService: IMarkerService;
private taskService:ITaskService;
private outputService: IOutputService;
private intervalToken: any;
private activeCount: number;
private static progressChars:string = '|/-\\';
constructor(@IQuickOpenService quickOpenService:IQuickOpenService,
@IMarkerService markerService:IMarkerService, @IOutputService outputService:IOutputService,
@ITaskService taskService:ITaskService) {
this.quickOpenService = quickOpenService;
this.markerService = markerService;
this.outputService = outputService;
this.taskService = taskService;
this.activeCount = 0;
}
public render(container: HTMLElement): IDisposable {
let callOnDispose: IDisposable[] = [],
element = document.createElement('div'),
// icon = document.createElement('a'),
progress = document.createElement('div'),
label = document.createElement('a'),
error = document.createElement('div'),
warning = document.createElement('div'),
info = document.createElement('div');
Dom.addClass(element, 'task-statusbar-item');
// dom.addClass(icon, 'task-statusbar-item-icon');
// element.appendChild(icon);
Dom.addClass(progress, 'task-statusbar-item-progress');
element.appendChild(progress);
progress.innerHTML = StatusBarItem.progressChars[0];
$(progress).hide();
Dom.addClass(label, 'task-statusbar-item-label');
element.appendChild(label);
Dom.addClass(error, 'task-statusbar-item-label-error');
error.innerHTML = '0';
label.appendChild(error);
Dom.addClass(warning, 'task-statusbar-item-label-warning');
warning.innerHTML = '0';
label.appendChild(warning);
Dom.addClass(info, 'task-statusbar-item-label-info');
label.appendChild(info);
$(info).hide();
// callOnDispose.push(dom.addListener(icon, 'click', (e:MouseEvent) => {
// this.outputService.showOutput(TaskService.OutputChannel, e.ctrlKey || e.metaKey, true);
// }));
callOnDispose.push(Dom.addDisposableListener(label, 'click', (e:MouseEvent) => {
this.quickOpenService.show('!');
}));
let updateStatus = (element:HTMLDivElement, stats:number): boolean => {
if (stats > 0) {
element.innerHTML = stats.toString();
$(element).show();
return true;
} else {
$(element).hide();
return false;
}
};
let manyMarkers = nls.localize('manyMarkers', "99+");
let updateLabel = (stats: MarkerStatistics) => {
error.innerHTML = stats.errors < 100 ? stats.errors.toString() : manyMarkers;
warning.innerHTML = stats.warnings < 100 ? stats.warnings.toString() : manyMarkers;
updateStatus(info, stats.infos);
};
this.markerService.onMarkerChanged((changedResources) => {
updateLabel(this.markerService.getStatistics());
});
callOnDispose.push(this.taskService.addListener2(TaskServiceEvents.Active, () => {
this.activeCount++;
if (this.activeCount === 1) {
let index = 1;
let chars = StatusBarItem.progressChars;
progress.innerHTML = chars[0];
this.intervalToken = setInterval(() => {
progress.innerHTML = chars[index];
index++;
if (index >= chars.length) {
index = 0;
}
}, 50);
$(progress).show();
}
}));
callOnDispose.push(this.taskService.addListener2(TaskServiceEvents.Inactive, (data:TaskServiceEventData) => {
this.activeCount--;
if (this.activeCount === 0) {
$(progress).hide();
clearInterval(this.intervalToken);
this.intervalToken = null;
}
}));
callOnDispose.push(this.taskService.addListener2(TaskServiceEvents.Terminated, () => {
if (this.activeCount !== 0) {
$(progress).hide();
if (this.intervalToken) {
clearInterval(this.intervalToken);
this.intervalToken = null;
}
this.activeCount = 0;
}
}));
container.appendChild(element);
return {
dispose: () => {
callOnDispose = dispose(callOnDispose);
}
};
}
}
interface TaskServiceEventData {
error?: any;
}
class NullTaskSystem extends EventEmitter implements ITaskSystem {
public build(): ITaskRunResult {
return {
promise: TPromise.as<ITaskSummary>({})
};
}
public rebuild(): ITaskRunResult {
return {
promise: TPromise.as<ITaskSummary>({})
};
}
public clean(): ITaskRunResult {
return {
promise: TPromise.as<ITaskSummary>({})
};
}
public runTest(): ITaskRunResult {
return {
promise: TPromise.as<ITaskSummary>({})
};
}
public run(taskIdentifier: string): ITaskRunResult {
return {
promise: TPromise.as<ITaskSummary>({})
};
}
public isActive(): TPromise<boolean> {
return TPromise.as(false);
}
public isActiveSync(): boolean {
return false;
}
public canAutoTerminate(): boolean {
return true;
}
public terminate(): TPromise<TerminateResponse> {
return TPromise.as<TerminateResponse>({ success: true });
}
public tasks(): TPromise<TaskDescription[]> {
return TPromise.as<TaskDescription[]>([]);
}
}
class TaskService extends EventEmitter implements ITaskService {
public serviceId = ITaskService;
public static SERVICE_ID: string = 'taskService';
public static OutputChannelId:string = 'tasks';
public static OutputChannelLabel:string = nls.localize('tasks', "Tasks");
private modeService: IModeService;
private configurationService: IConfigurationService;
private markerService: IMarkerService;
private outputService: IOutputService;
private messageService: IMessageService;
private fileService: IFileService;
private telemetryService: ITelemetryService;
private editorService: IWorkbenchEditorService;
private contextService: IWorkspaceContextService;
private textFileService: ITextFileService;
private eventService: IEventService;
private modelService: IModelService;
private extensionService: IExtensionService;
private quickOpenService: IQuickOpenService;
private _taskSystemPromise: TPromise<ITaskSystem>;
private _taskSystem: ITaskSystem;
private taskSystemListeners: ListenerUnbind[];
private clearTaskSystemPromise: boolean;
private outputChannel: IOutputChannel;
private fileChangesListener: ListenerUnbind;
constructor(@IModeService modeService: IModeService, @IConfigurationService configurationService: IConfigurationService,
@IMarkerService markerService: IMarkerService, @IOutputService outputService: IOutputService,
@IMessageService messageService: IMessageService, @IWorkbenchEditorService editorService:IWorkbenchEditorService,
@IFileService fileService:IFileService, @IWorkspaceContextService contextService: IWorkspaceContextService,
@ITelemetryService telemetryService: ITelemetryService, @ITextFileService textFileService:ITextFileService,
@ILifecycleService lifecycleService: ILifecycleService, @IEventService eventService: IEventService,
@IModelService modelService: IModelService, @IExtensionService extensionService: IExtensionService,
@IQuickOpenService quickOpenService: IQuickOpenService) {
super();
this.modeService = modeService;
this.configurationService = configurationService;
this.markerService = markerService;
this.outputService = outputService;
this.messageService = messageService;
this.editorService = editorService;
this.fileService = fileService;
this.contextService = contextService;
this.telemetryService = telemetryService;
this.textFileService = textFileService;
this.eventService = eventService;
this.modelService = modelService;
this.extensionService = extensionService;
this.quickOpenService = quickOpenService;
this.taskSystemListeners = [];
this.clearTaskSystemPromise = false;
this.outputChannel = this.outputService.getChannel(TaskService.OutputChannelId);
this.configurationService.onDidUpdateConfiguration(() => {
this.emit(TaskServiceEvents.ConfigChanged);
if (this._taskSystem && this._taskSystem.isActiveSync()) {
this.clearTaskSystemPromise = true;
} else {
this._taskSystem = null;
this._taskSystemPromise = null;
}
this.disposeTaskSystemListeners();
});
lifecycleService.onWillShutdown(event => event.veto(this.beforeShutdown()));
}
private disposeTaskSystemListeners(): void {
this.taskSystemListeners.forEach(unbind => unbind());
this.taskSystemListeners = [];
}
private disposeFileChangesListener(): void {
if (this.fileChangesListener) {
this.fileChangesListener();
this.fileChangesListener = null;
}
}
private get taskSystemPromise(): TPromise<ITaskSystem> {
if (!this._taskSystemPromise) {
if (!this.contextService.getWorkspace()) {
this._taskSystem = new NullTaskSystem();
this._taskSystemPromise = TPromise.as(this._taskSystem);
} else {
let variables = new SystemVariables(this.editorService, this.contextService);
let clearOutput = true;
this._taskSystemPromise = TPromise.as(this.configurationService.getConfiguration<TaskConfiguration>('tasks')).then((config: TaskConfiguration) => {
let parseErrors: string[] = config ? (<any>config).$parseErrors : null;
if (parseErrors) {
let isAffected = false;
for (let i = 0; i < parseErrors.length; i++) {
if (/tasks\.json$/.test(parseErrors[i])) {
isAffected = true;
break;
}
}
if (isAffected) {
this.outputChannel.append(nls.localize('TaskSystem.invalidTaskJson', 'Error: The content of the tasks.json file has syntax errors. Please correct them before executing a task.\n'));
this.outputChannel.show(true);
return TPromise.wrapError({});
}
}
let configPromise: TPromise<TaskConfiguration>;
if (config) {
if (this.isRunnerConfig(config) && this.hasDetectorSupport(<FileConfig.ExternalTaskRunnerConfiguration>config)) {
let fileConfig = <FileConfig.ExternalTaskRunnerConfiguration>config;
configPromise = new ProcessRunnerDetector(this.fileService, this.contextService, variables, fileConfig).detect(true).then((value) => {
clearOutput = this.printStderr(value.stderr);
let detectedConfig = value.config;
if (!detectedConfig) {
return config;
}
let result: FileConfig.ExternalTaskRunnerConfiguration = Objects.clone(fileConfig);
let configuredTasks: IStringDictionary<FileConfig.TaskDescription> = Object.create(null);
if (!result.tasks) {
if (detectedConfig.tasks) {
result.tasks = detectedConfig.tasks;
}
} else {
result.tasks.forEach(task => configuredTasks[task.taskName] = task);
detectedConfig.tasks.forEach((task) => {
if (!configuredTasks[task.taskName]) {
result.tasks.push(task);
}
});
}
return result;
});
} else {
configPromise = TPromise.as<TaskConfiguration>(config);
}
} else {
configPromise = new ProcessRunnerDetector(this.fileService, this.contextService, variables).detect(true).then((value) => {
clearOutput = this.printStderr(value.stderr);
return value.config;
});
}
return configPromise.then((config) => {
if (!config) {
this._taskSystemPromise = null;
throw new TaskError(Severity.Info, nls.localize('TaskSystem.noConfiguration', 'No task runner configured.'), TaskErrors.NotConfigured);
}
let result: ITaskSystem = null;
if (config.buildSystem === 'service') {
result = new LanguageServiceTaskSystem(<LanguageServiceTaskConfiguration>config, this.telemetryService, this.modeService);
} else if (this.isRunnerConfig(config)) {
result = new ProcessRunnerSystem(<FileConfig.ExternalTaskRunnerConfiguration>config, variables, this.markerService, this.modelService, this.telemetryService, this.outputService, TaskService.OutputChannelId, clearOutput);
}
if (result === null) {
this._taskSystemPromise = null;
throw new TaskError(Severity.Info, nls.localize('TaskSystem.noBuildType', "No valid task runner configured. Supported task runners are 'service' and 'program'."), TaskErrors.NoValidTaskRunner);
}
this.taskSystemListeners.push(result.addListener(TaskSystemEvents.Active, (event) => this.emit(TaskServiceEvents.Active, event)));
this.taskSystemListeners.push(result.addListener(TaskSystemEvents.Inactive, (event) => this.emit(TaskServiceEvents.Inactive, event)));
this._taskSystem = result;
return result;
}, (err: any) => {
this.handleError(err);
return Promise.wrapError(err);
});
});
}
}
return this._taskSystemPromise;
}
private printStderr(stderr: string[]): boolean {
let result = true;
if (stderr && stderr.length > 0) {
stderr.forEach((line) => {
result = false;
this.outputChannel.append(line + '\n');
});
this.outputChannel.show(true);
}
return result;
}
private isRunnerConfig(config: TaskConfiguration): boolean {
return !config.buildSystem || config.buildSystem === 'program';
}
private hasDetectorSupport(config: FileConfig.ExternalTaskRunnerConfiguration): boolean {
if (!config.command) {
return false;
}
return ProcessRunnerDetector.supports(config.command);
}
public configureAction(): Action {
return new ConfigureTaskRunnerAction(ConfigureTaskRunnerAction.ID, ConfigureTaskRunnerAction.TEXT,
this.configurationService, this.editorService, this.fileService, this.contextService,
this.outputService, this.messageService, this.quickOpenService);
}
public build(): TPromise<ITaskSummary> {
return this.executeTarget(taskSystem => taskSystem.build());
}
public rebuild(): TPromise<ITaskSummary> {
return this.executeTarget(taskSystem => taskSystem.rebuild());
}
public clean(): TPromise<ITaskSummary> {
return this.executeTarget(taskSystem => taskSystem.clean());
}
public runTest(): TPromise<ITaskSummary> {
return this.executeTarget(taskSystem => taskSystem.runTest());
}
public run(taskIdentifier: string): TPromise<ITaskSummary> {
return this.executeTarget(taskSystem => taskSystem.run(taskIdentifier));
}
private executeTarget(fn: (taskSystem: ITaskSystem) => ITaskRunResult): TPromise<ITaskSummary> {
return this.textFileService.saveAll().then((value) => { // make sure all dirty files are saved
return this.configurationService.loadConfiguration().then(() => { // make sure configuration is up to date
return this.taskSystemPromise.
then((taskSystem) => {
return taskSystem.isActive().then((active) => {
if (!active) {
return fn(taskSystem);
} else {
throw new TaskError(Severity.Warning, nls.localize('TaskSystem.active', 'There is an active running task right now. Terminate it first before executing another task.'), TaskErrors.RunningTask);
}
});
}).
then((runResult: ITaskRunResult) => {
if (runResult.restartOnFileChanges) {
let pattern = runResult.restartOnFileChanges;
this.fileChangesListener = this.eventService.addListener(FileEventType.FILE_CHANGES, (event: FileChangesEvent) => {
let needsRestart = event.changes.some((change) => {
return (change.type === FileChangeType.ADDED || change.type === FileChangeType.DELETED) && !!match(pattern, change.resource.fsPath);
});
if (needsRestart) {
this.terminate().done(() => {
// We need to give the child process a change to stop.
setTimeout(() => {
this.executeTarget(fn);
}, 2000);
});
}
});
}
return runResult.promise.then((value) => {
if (this.clearTaskSystemPromise) {
this._taskSystemPromise = null;
this.clearTaskSystemPromise = false;
}
return value;
});
}, (err: any) => {
this.handleError(err);
});
});
});
}
public isActive(): TPromise<boolean> {
if (this._taskSystemPromise) {
return this.taskSystemPromise.then(taskSystem => taskSystem.isActive());
}
return TPromise.as(false);
}
public terminate(): TPromise<TerminateResponse> {
if (this._taskSystemPromise) {
return this.taskSystemPromise.then(taskSystem => {
return taskSystem.terminate();
}).then(response => {
if (response.success) {
if (this.clearTaskSystemPromise) {
this._taskSystemPromise = null;
this.clearTaskSystemPromise = false;
}
this.emit(TaskServiceEvents.Terminated, {});
this.disposeFileChangesListener();
}
return response;
});
}
return TPromise.as( { success: true} );
}
public tasks(): TPromise<TaskDescription[]> {
return this.taskSystemPromise.then(taskSystem => taskSystem.tasks());
}
public beforeShutdown(): boolean | TPromise<boolean> {
if (this._taskSystem && this._taskSystem.isActiveSync()) {
if (this._taskSystem.canAutoTerminate() || this.messageService.confirm({
message: nls.localize('TaskSystem.runningTask', 'There is a task running. Do you want to terminate it?'),
primaryButton: nls.localize({ key: 'TaskSystem.terminateTask', comment: ['&& denotes a mnemonic'] }, "&&Terminate Task")
})) {
return this._taskSystem.terminate().then((response) => {
if (response.success) {
this.emit(TaskServiceEvents.Terminated, {});
this._taskSystem = null;
this.disposeFileChangesListener();
this.disposeTaskSystemListeners();
return false; // no veto
}
return true; // veto
}, (err) => {
return true; // veto
});
} else {
return true; // veto
}
}
return false; // Nothing to do here
}
private handleError(err:any):void {
let showOutput = true;
if (err instanceof TaskError) {
let buildError = <TaskError>err;
let needsConfig = buildError.code === TaskErrors.NotConfigured || buildError.code === TaskErrors.NoBuildTask || buildError.code === TaskErrors.NoTestTask;
let needsTerminate = buildError.code === TaskErrors.RunningTask;
if (needsConfig || needsTerminate) {
let closeAction = new CloseMessageAction();
let action = needsConfig
? this.configureAction()
: new TerminateAction(TerminateAction.ID, TerminateAction.TEXT, this, this.telemetryService, this.messageService, this.contextService);
closeAction.closeFunction = this.messageService.show(buildError.severity, { message: buildError.message, actions: [closeAction, action ] });
} else {
this.messageService.show(buildError.severity, buildError.message);
}
} else if (err instanceof Error) {
let error = <Error>err;
this.messageService.show(Severity.Error, error.message);
} else if (Types.isString(err)) {
this.messageService.show(Severity.Error, <string>err);
} else {
this.messageService.show(Severity.Error, nls.localize('TaskSystem.unknownError', 'An error has occurred while running a task. See task log for details.'));
}
if (showOutput) {
this.outputChannel.show(true);
}
}
}
let tasksCategory = nls.localize('tasksCategory', "Tasks");
let workbenchActionsRegistry = <IWorkbenchActionRegistry>Registry.as(WorkbenchActionExtensions.WorkbenchActions);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(ConfigureTaskRunnerAction, ConfigureTaskRunnerAction.ID, ConfigureTaskRunnerAction.TEXT), 'Tasks: Configure Task Runner', tasksCategory);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(BuildAction, BuildAction.ID, BuildAction.TEXT, { primary: KeyMod.CtrlCmd | KeyMod.Shift | KeyCode.KEY_B }), 'Tasks: Run Build Task', tasksCategory);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(TestAction, TestAction.ID, TestAction.TEXT), 'Tasks: Run Test Task', tasksCategory);
// workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(RebuildAction, RebuildAction.ID, RebuildAction.TEXT), tasksCategory);
// workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(CleanAction, CleanAction.ID, CleanAction.TEXT), tasksCategory);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(TerminateAction, TerminateAction.ID, TerminateAction.TEXT), 'Tasks: Terminate Running Task', tasksCategory);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(ShowLogAction, ShowLogAction.ID, ShowLogAction.TEXT), 'Tasks: Show Task Log', tasksCategory);
workbenchActionsRegistry.registerWorkbenchAction(new SyncActionDescriptor(RunTaskAction, RunTaskAction.ID, RunTaskAction.TEXT), 'Tasks: Run Task', tasksCategory);
// Task Service
registerSingleton(ITaskService, TaskService);
// Register Quick Open
(<IQuickOpenRegistry>Registry.as(QuickOpenExtensions.Quickopen)).registerQuickOpenHandler(
new QuickOpenHandlerDescriptor(
'vs/workbench/parts/tasks/browser/taskQuickOpen',
'QuickOpenHandler',
'task ',
nls.localize('taskCommands', "Run Task")
)
);
// Status bar
let statusbarRegistry = <IStatusbarRegistry>Registry.as(StatusbarExtensions.Statusbar);
statusbarRegistry.registerStatusbarItem(new StatusbarItemDescriptor(StatusBarItem, StatusbarAlignment.LEFT, 50 /* Medium Priority */));
// Output channel
let outputChannelRegistry = <IOutputChannelRegistry>Registry.as(OutputExt.OutputChannels);
outputChannelRegistry.registerChannel(TaskService.OutputChannelId, TaskService.OutputChannelLabel);
// (<IWorkbenchContributionsRegistry>Registry.as(WorkbenchExtensions.Workbench)).registerWorkbenchContribution(TaskServiceParticipant);
// tasks.json validation
let schemaId = 'vscode://schemas/tasks';
let schema : IJSONSchema =
{
'id': schemaId,
'description': 'Task definition file',
'type': 'object',
'default': {
'version': '0.1.0',
'command': 'myCommand',
'isShellCommand': false,
'args': [],
'showOutput': 'always',
'tasks': [
{
'taskName': 'build',
'showOutput': 'silent',
'isBuildCommand': true,
'problemMatcher': ['$tsc', '$lessCompile']
}
]
},
'definitions': {
'showOutputType': {
'type': 'string',
'enum': ['always', 'silent', 'never'],
'default': 'silent'
},
'patternType': {
'anyOf': [
{
'type': 'string',
'enum': ['$tsc', '$tsc-watch' ,'$msCompile', '$lessCompile', '$gulp-tsc', '$cpp', '$csc', '$vb', '$jshint', '$jshint-stylish', '$eslint-compact', '$eslint-stylish', '$go']
},
{
'$ref': '#/definitions/pattern'
},
{
'type': 'array',
'items': {
'$ref': '#/definitions/pattern'
}
}
]
},
'pattern': {
'default': {
'regexp': '^([^\\\\s].*)\\\\((\\\\d+,\\\\d+)\\\\):\\\\s*(.*)$',
'file': 1,
'location': 2,
'message': 3
},
'additionalProperties': false,
'properties': {
'regexp': {
'type': 'string',
'description': nls.localize('JsonSchema.pattern.regexp', 'The regular expression to find an error, warning or info in the output.')
},
'file': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.file', 'The match group index of the filename. If omitted 1 is used.')
},
'location': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.location', 'The match group index of the problem\'s location. Valid location patterns are: (line), (line,column) and (startLine,startColumn,endLine,endColumn). If omitted line and column is assumed.')
},
'line': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.line', 'The match group index of the problem\'s line. Defaults to 2')
},
'column': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.column', 'The match group index of the problem\'s column. Defaults to 3')
},
'endLine': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.endLine', 'The match group index of the problem\'s end line. Defaults to undefined')
},
'endColumn': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.endColumn', 'The match group index of the problem\'s end column. Defaults to undefined')
},
'severity': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.severity', 'The match group index of the problem\'s severity. Defaults to undefined')
},
'code': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.code', 'The match group index of the problem\'s code. Defaults to undefined')
},
'message': {
'type': 'integer',
'description': nls.localize('JsonSchema.pattern.message', 'The match group index of the message. If omitted it defaults to 4 if location is specified. Otherwise it defaults to 5.')
},
'loop': {
'type': 'boolean',
'description': nls.localize('JsonSchema.pattern.loop', 'In a multi line matcher loop indicated whether this pattern is executed in a loop as long as it matches. Can only specified on a last pattern in a multi line pattern.')
}
}
},
'problemMatcherType': {
'oneOf': [
{
'type': 'string',
'enum': ['$tsc', '$tsc-watch', '$msCompile', '$lessCompile', '$gulp-tsc', '$jshint', '$jshint-stylish', '$eslint-compact', '$eslint-stylish', '$go']
},
{
'$ref': '#/definitions/problemMatcher'
},
{
'type': 'array',
'items': {
'anyOf': [
{
'$ref': '#/definitions/problemMatcher'
},
{
'type': 'string',
'enum': ['$tsc', '$tsc-watch', '$msCompile', '$lessCompile', '$gulp-tsc', '$jshint', '$jshint-stylish', '$eslint-compact', '$eslint-stylish', '$go']
}
]
}
}
]
},
'watchingPattern': {
'type': 'object',
'additionalProperties': false,
'properties': {
'regexp': {
'type': 'string',
'description': nls.localize('JsonSchema.watchingPattern.regexp', 'The regular expression to detect the begin or end of a watching task.')
},
'file': {
'type': 'integer',
'description': nls.localize('JsonSchema.watchingPattern.file', 'The match group index of the filename. Can be omitted.')
},
}
},
'problemMatcher': {
'type': 'object',
'additionalProperties': false,
'properties': {
'base': {
'type': 'string',
'enum': ['$tsc', '$tsc-watch', '$msCompile', '$lessCompile', '$gulp-tsc', '$jshint', '$jshint-stylish', '$eslint-compact', '$eslint-stylish', '$go'],
'description': nls.localize('JsonSchema.problemMatcher.base', 'The name of a base problem matcher to use.')
},
'owner': {
'type': 'string',
'description': nls.localize('JsonSchema.problemMatcher.owner', 'The owner of the problem inside Code. Can be omitted if base is specified. Defaults to \'external\' if omitted and base is not specified.')
},
'severity': {
'type': 'string',
'enum': ['error', 'warning', 'info'],
'description': nls.localize('JsonSchema.problemMatcher.severity', 'The default severity for captures problems. Is used if the pattern doesn\'t define a match group for severity.')
},
'applyTo': {
'type': 'string',
'enum': ['allDocuments', 'openDocuments', 'closedDocuments'],
'description': nls.localize('JsonSchema.problemMatcher.applyTo', 'Controls if a problem reported on a text document is applied only to open, closed or all documents.')
},
'pattern': {
'$ref': '#/definitions/patternType',
'description': nls.localize('JsonSchema.problemMatcher.pattern', 'A problem pattern or the name of a predefined problem pattern. Can be omitted if base is specified.')
},
'fileLocation': {
'oneOf': [
{
'type': 'string',
'enum': ['absolute', 'relative']
},
{
'type': 'array',
'items': {
'type': 'string'
}
}
],
'description': nls.localize('JsonSchema.problemMatcher.fileLocation', 'Defines how file names reported in a problem pattern should be interpreted.')
},
'watching': {
'type': 'object',
'additionalProperties': false,
'properties': {
'activeOnStart': {
'type': 'boolean',
'description': nls.localize('JsonSchema.problemMatcher.watching.activeOnStart', 'If set to true the watcher is in active mode when the task starts. This is equals of issuing a line that matches the beginPattern')
},
'beginsPattern': {
'oneOf': [
{
'type': 'string'
},
{
'type': '#/definitions/watchingPattern'
}
],
'description': nls.localize('JsonSchema.problemMatcher.watching.beginsPattern', 'If matched in the output the start of a watching task is signaled.')
},
'endsPattern': {
'oneOf': [
{
'type': 'string'
},
{
'type': '#/definitions/watchingPattern'
}
],
'description': nls.localize('JsonSchema.problemMatcher.watching.endsPattern', 'If matched in the output the end of a watching task is signaled.')
}
}
},
'watchedTaskBeginsRegExp': {
'type': 'string',
'description': nls.localize('JsonSchema.problemMatcher.watchedBegin', 'A regular expression signaling that a watched tasks begins executing triggered through file watching.')
},
'watchedTaskEndsRegExp': {
'type': 'string',
'description': nls.localize('JsonSchema.problemMatcher.watchedEnd', 'A regular expression signaling that a watched tasks ends executing.')
}
}
},
'baseTaskRunnerConfiguration': {
'type': 'object',
'properties': {
'command': {
'type': 'string',
'description': nls.localize('JsonSchema.command', 'The command to be executed. Can be an external program or a shell command.')
},
'isShellCommand': {
'type': 'boolean',
'default': true,
'description': nls.localize('JsonSchema.shell', 'Specifies whether the command is a shell command or an external program. Defaults to false if omitted.')
},
'args': {
'type': 'array',
'description': nls.localize('JsonSchema.args', 'Additional arguments passed to the command.'),
'items': {
'type': 'string'
}
},
'options': {
'type': 'object',
'description': nls.localize('JsonSchema.options', 'Additional command options'),
'properties': {
'cwd': {
'type': 'string',
'description': nls.localize('JsonSchema.options.cwd', 'The current working directory of the executed program or script. If omitted Code\'s current workspace root is used.')
},
'env': {
'type': 'object',
'additionalProperties': {
'type': 'string'
},
'description': nls.localize('JsonSchema.options.env', 'The environment of the executed program or shell. If omitted the parent process\' environment is used.')
}
},
'additionalProperties': {
'type': ['string', 'array', 'object']
}
},
'showOutput': {
'$ref': '#/definitions/showOutputType',
'description': nls.localize('JsonSchema.showOutput', 'Controls whether the output of the running task is shown or not. If omitted \'always\' is used.')
},
'isWatching': {
'type': 'boolean',
'description': nls.localize('JsonSchema.watching', 'Whether the executed task is kept alive and is watching the file system.'),
'default': true
},
'promptOnClose': {
'type': 'boolean',
'description': nls.localize('JsonSchema.promptOnClose', 'Whether the user is prompted when VS Code closes with a running background task.'),
'default': false
},
'echoCommand': {
'type': 'boolean',
'description': nls.localize('JsonSchema.echoCommand', 'Controls whether the executed command is echoed to the output. Default is false.'),
'default': true
},
'suppressTaskName': {
'type': 'boolean',
'description': nls.localize('JsonSchema.suppressTaskName', 'Controls whether the task name is added as an argument to the command. Default is false.'),
'default': true
},
'taskSelector': {
'type': 'string',
'description': nls.localize('JsonSchema.taskSelector', 'Prefix to indicate that an argument is task.')
},
'problemMatcher': {
'$ref': '#/definitions/problemMatcherType',
'description': nls.localize('JsonSchema.matchers', 'The problem matcher(s) to use. Can either be a string or a problem matcher definition or an array of strings and problem matchers.')
},
'tasks': {
'type': 'array',
'description': nls.localize('JsonSchema.tasks', 'The task configurations. Usually these are enrichments of task already defined in the external task runner.'),
'items': {
'type': 'object',
'$ref': '#/definitions/taskDescription'
}
}
}
},
'taskDescription': {
'type': 'object',
'required': ['taskName'],
'additionalProperties': false,
'properties': {
'taskName': {
'type': 'string',
'description': nls.localize('JsonSchema.tasks.taskName', "The task's name")
},
'args': {
'type': 'array',
'description': nls.localize('JsonSchema.tasks.args', 'Additional arguments passed to the command when this task is invoked.'),
'items': {
'type': 'string'
}
},
'suppressTaskName': {
'type': 'boolean',
'description': nls.localize('JsonSchema.tasks.suppressTaskName', 'Controls whether the task name is added as an argument to the command. If omitted the globally defined value is used.'),
'default': true
},
'showOutput': {
'$ref': '#/definitions/showOutputType',
'description': nls.localize('JsonSchema.tasks.showOutput', 'Controls whether the output of the running task is shown or not. If omitted the globally defined value is used.')
},
'echoCommand': {
'type': 'boolean',
'description': nls.localize('JsonSchema.echoCommand', 'Controls whether the executed command is echoed to the output. Default is false.'),
'default': true
},
'isWatching': {
'type': 'boolean',
'description': nls.localize('JsonSchema.tasks.watching', 'Whether the executed task is kept alive and is watching the file system.'),
'default': true
},
'isBuildCommand': {
'type': 'boolean',
'description': nls.localize('JsonSchema.tasks.build', 'Maps this task to Code\'s default build command.'),
'default': true
},
'isTestCommand': {
'type': 'boolean',
'description': nls.localize('JsonSchema.tasks.test', 'Maps this task to Code\'s default test command.'),
'default': true
},
'problemMatcher': {
'$ref': '#/definitions/problemMatcherType',
'description': nls.localize('JsonSchema.tasks.matchers', 'The problem matcher(s) to use. Can either be a string or a problem matcher definition or an array of strings and problem matchers.')
}
},
'defaultSnippets': [
{
'label': 'Empty task',
'body': {
'taskName': '{{taskName}}'
}
}
]
}
},
'allOf': [
{
'type': 'object',
'required': ['version'],
'properties': {
'version': {
'type': 'string',
'enum': ['0.1.0'],
'description': nls.localize('JsonSchema.version', 'The config\'s version number')
},
'windows': {
'$ref': '#/definitions/baseTaskRunnerConfiguration',
'description': nls.localize('JsonSchema.windows', 'Windows specific build configuration')
},
'osx': {
'$ref': '#/definitions/baseTaskRunnerConfiguration',
'description': nls.localize('JsonSchema.mac', 'Mac specific build configuration')
},
'linux': {
'$ref': '#/definitions/baseTaskRunnerConfiguration',
'description': nls.localize('JsonSchema.linux', 'Linux specific build configuration')
}
}
},
{
'$ref': '#/definitions/baseTaskRunnerConfiguration'
}
]
};
let jsonRegistry = <jsonContributionRegistry.IJSONContributionRegistry>Registry.as(jsonContributionRegistry.Extensions.JSONContribution);
jsonRegistry.registerSchema(schemaId, schema);
| f111fei/vscode | src/vs/workbench/parts/tasks/electron-browser/task.contribution.ts | TypeScript | mit | 51,630 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-12-21 01:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('simplesite', '0003_auto_20161006_2321'),
]
operations = [
migrations.AddField(
model_name='page',
name='alternative_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Alternative URL'),
),
]
| marsxn/simple-site | simplesite/migrations/0004_page_alternative_url.py | Python | mit | 514 |
<?php
/**
* TOP API: taobao.promotionmisc.activity.range.list.get request
*
* @author auto create
* @since 1.0, 2013-09-13 16:51:03
*/
class Taobao_Request_PromotionmiscActivityRangeListGetRequest {
/**
* 活动id
**/
private $activityId;
private $apiParas = array();
public function setActivityId($activityId) {
$this->activityId = $activityId;
$this->apiParas["activity_id"] = $activityId;
}
public function getActivityId() {
return $this->activityId;
}
public function getApiMethodName() {
return "taobao.promotionmisc.activity.range.list.get";
}
public function getApiParas() {
return $this->apiParas;
}
public function check() {
Taobao_RequestCheckUtil::checkNotNull($this->activityId, "activityId");
Taobao_RequestCheckUtil::checkMinValue($this->activityId, 0, "activityId");
}
public function putOtherTextParam($key, $value) {
$this->apiParas[$key] = $value;
$this->$key = $value;
}
}
| musicsnap/LearnCode | php/code/yaf/application/library/Taobao/Request/PromotionmiscActivityRangeListGetRequest.php | PHP | mit | 944 |
package volume
import (
"context"
"testing"
resourcetypes "github.com/projecteru2/core/resources/types"
"github.com/projecteru2/core/scheduler"
schedulerMocks "github.com/projecteru2/core/scheduler/mocks"
"github.com/projecteru2/core/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func TestMakeRequest(t *testing.T) {
_, err := MakeRequest(types.ResourceOptions{
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
},
})
assert.Nil(t, err)
// Source not match
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data2",
Destination: "/data1",
},
},
})
assert.NotNil(t, err)
// Dest not match
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data2",
},
},
})
assert.NotNil(t, err)
// Flag not match
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
Flags: "r",
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
Flags: "rw",
},
},
})
assert.NotNil(t, err)
// Request SizeInBytes larger then limit
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
SizeInBytes: 10240,
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
SizeInBytes: 5120,
},
},
})
assert.NoError(t, err)
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
{
Source: "/data2",
Destination: "/data2",
},
},
})
assert.NotNil(t, err)
_, err = MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
{
Source: "/data3",
Destination: "/data3",
},
{
Source: "/data2",
Destination: "/data2",
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
},
{
Source: "/data4",
Destination: "/data4",
},
},
})
assert.NotNil(t, err)
}
func TestType(t *testing.T) {
resourceRequest, err := MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
})
assert.Nil(t, err)
assert.True(t, resourceRequest.Type()&(types.ResourceVolume|types.ResourceScheduledVolume) > 0)
}
func TestStoragePlans(t *testing.T) {
testStoragePlans(t, types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 256,
},
},
})
testStoragePlans(t, types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
})
}
func testStoragePlans(t *testing.T, reqOpts types.ResourceOptions) {
mockScheduler := &schedulerMocks.Scheduler{}
var (
volumePlans = []types.VolumePlan{
{
types.VolumeBinding{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
}: types.VolumeMap{
"/dev0": 512,
},
},
{
types.VolumeBinding{
Source: "AUTO",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
}: types.VolumeMap{
"/dev1": 512,
},
},
}
scheduleInfos []resourcetypes.ScheduleInfo = []resourcetypes.ScheduleInfo{
{
NodeMeta: types.NodeMeta{
Name: "TestNode",
CPU: map[string]int64{"0": 10000, "1": 10000},
NUMA: map[string]string{"0": "0", "1": "1"},
NUMAMemory: map[string]int64{"0": 1024, "1": 1204},
MemCap: 10240,
StorageCap: 10240,
Volume: types.VolumeMap{
"/data1": 1024,
"/data2": 1024,
},
InitVolume: types.VolumeMap{
"/data0": 1024,
},
},
VolumePlans: volumePlans,
CPUPlan: []types.CPUMap{{"0": 10000, "1": 10000}},
Capacity: 100,
},
}
volumePlan = map[string][]types.VolumePlan{
"TestNode": volumePlans,
}
)
resourceRequest, err := MakeRequest(reqOpts)
assert.Nil(t, err)
assert.True(t, resourceRequest.Type()&types.ResourceVolume > 0)
sche := resourceRequest.MakeScheduler()
mockScheduler.On(
"SelectVolumeNodes", mock.Anything, mock.Anything, mock.Anything,
).Return(scheduleInfos, volumePlan, 1, nil)
prevSche, _ := scheduler.GetSchedulerV1()
scheduler.InitSchedulerV1(nil)
plans, _, err := sche(context.TODO(), scheduleInfos)
assert.Error(t, err)
scheduler.InitSchedulerV1(mockScheduler)
defer func() {
scheduler.InitSchedulerV1(prevSche)
}()
plans, _, err = sche(context.TODO(), scheduleInfos)
assert.Nil(t, err)
assert.True(t, plans.Type()&types.ResourceVolume > 0)
const storage = int64(10240)
var node = types.Node{
NodeMeta: types.NodeMeta{
Name: "TestNode",
CPU: map[string]int64{"0": 10000, "1": 10000},
NUMA: map[string]string{"0": "0", "1": "1"},
NUMAMemory: map[string]int64{"0": 1024, "1": 1204},
MemCap: 10240,
StorageCap: storage,
Volume: types.VolumeMap{"/dev0": 10240, "/dev1": 5120},
},
}
assert.NotNil(t, plans.Capacity())
plans.ApplyChangesOnNode(&node, 0, 1)
assert.Less(t, node.Volume["/dev0"], int64(10240))
assert.Less(t, node.Volume["/dev1"], int64(5120))
plans.RollbackChangesOnNode(&node, 0, 1)
assert.Equal(t, node.Volume["/dev0"], int64(10240))
assert.Equal(t, node.Volume["/dev1"], int64(5120))
opts := resourcetypes.DispenseOptions{
Node: &node,
Index: 0,
}
r := &types.ResourceMeta{}
_, err = plans.Dispense(opts, r)
assert.Nil(t, err)
assert.Nil(t, plans.(ResourcePlans).GetPlan(""))
opts.Name = "not_exist"
_, err = plans.Dispense(opts, r)
assert.EqualError(t, err, "cannot alloc a each node plan, not enough capacity")
if reqOpts.VolumeRequest[0].SizeInBytes != reqOpts.VolumeLimit[0].SizeInBytes {
diff := reqOpts.VolumeLimit[0].SizeInBytes - reqOpts.VolumeRequest[0].SizeInBytes
assert.Equal(t, int64(512)+diff, r.VolumePlanLimit[*reqOpts.VolumeLimit[0]]["/dev0"])
return
}
assert.Equal(t, int64(512), r.VolumePlanLimit[*reqOpts.VolumeRequest[0]]["/dev0"])
}
func TestStorage(t *testing.T) {
mockScheduler := &schedulerMocks.Scheduler{}
var (
volumePlans = []types.VolumePlan{
{
types.VolumeBinding{
Source: "/data1",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
}: types.VolumeMap{
"/dev0": 512,
},
},
}
scheduleInfos []resourcetypes.ScheduleInfo = []resourcetypes.ScheduleInfo{
{
NodeMeta: types.NodeMeta{
Name: "TestNode",
},
Capacity: 100,
},
}
volumePlan = map[string][]types.VolumePlan{
"TestNode": volumePlans,
}
)
resourceRequest, err := MakeRequest(types.ResourceOptions{
VolumeRequest: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
VolumeLimit: []*types.VolumeBinding{
{
Source: "/data1",
Destination: "/data1",
Flags: "rw",
SizeInBytes: 128,
},
},
})
assert.Nil(t, err)
assert.True(t, resourceRequest.Type()&types.ResourceVolume > 0)
sche := resourceRequest.MakeScheduler()
mockScheduler.On(
"SelectVolumeNodes", mock.Anything, mock.Anything, mock.Anything,
).Return(scheduleInfos, volumePlan, 1, nil)
prevSche, _ := scheduler.GetSchedulerV1()
scheduler.InitSchedulerV1(nil)
plans, _, err := sche(context.TODO(), scheduleInfos)
assert.Error(t, err)
scheduler.InitSchedulerV1(mockScheduler)
defer func() {
scheduler.InitSchedulerV1(prevSche)
}()
plans, _, err = sche(context.TODO(), scheduleInfos)
assert.Nil(t, err)
assert.True(t, plans.Type()&types.ResourceVolume > 0)
const storage = int64(10240)
var node = types.Node{
NodeMeta: types.NodeMeta{
Name: "TestNode",
CPU: map[string]int64{"0": 10000, "1": 10000},
NUMA: map[string]string{"0": "0", "1": "1"},
NUMAMemory: map[string]int64{"0": 1024, "1": 1204},
MemCap: 10240,
StorageCap: storage,
Volume: types.VolumeMap{"/dev0": 10240, "/dev1": 5120},
},
VolumeUsed: 0,
}
assert.NotNil(t, plans.Capacity())
plans.ApplyChangesOnNode(&node, 0)
assert.Less(t, node.Volume["/dev0"], int64(10240))
assert.Equal(t, node.Volume["/dev1"], int64(5120))
plans.RollbackChangesOnNode(&node, 0)
assert.Equal(t, node.Volume["/dev0"], int64(10240))
assert.Equal(t, node.Volume["/dev1"], int64(5120))
opts := resourcetypes.DispenseOptions{
Node: &node,
Index: 0,
}
r := &types.ResourceMeta{}
_, err = plans.Dispense(opts, r)
assert.Nil(t, err)
}
func TestRate(t *testing.T) {
req, err := MakeRequest(types.ResourceOptions{
VolumeRequest: types.VolumeBindings{&types.VolumeBinding{SizeInBytes: 1024}},
VolumeLimit: types.VolumeBindings{&types.VolumeBinding{SizeInBytes: 1024}},
})
assert.Nil(t, err)
node := types.Node{
NodeMeta: types.NodeMeta{
Volume: types.VolumeMap{"1": 1024},
},
}
assert.Equal(t, req.Rate(node), 1.0)
}
| CMGS/core | resources/volume/volume_test.go | GO | mit | 10,261 |
/************************************************************************************
Filename : CAPI_DistortionRenderer.cpp
Content : Combines all of the rendering state associated with the HMD
Created : February 2, 2014
Authors : Michael Antonov
Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
http://www.oculusvr.com/licenses/LICENSE-3.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
#include "CAPI_DistortionRenderer.h"
#if defined (OVR_OS_WIN32)
// TBD: Move to separate config file that handles back-ends.
#define OVR_D3D_VERSION 11
#include "D3D1X/CAPI_D3D1X_DistortionRenderer.h"
#undef OVR_D3D_VERSION
#define OVR_D3D_VERSION 10
#include "D3D1X/CAPI_D3D1X_DistortionRenderer.h"
#undef OVR_D3D_VERSION
#define OVR_D3D_VERSION 9
#include "D3D1X/CAPI_D3D9_DistortionRenderer.h"
#undef OVR_D3D_VERSION
#endif
#include "GL/CAPI_GL_DistortionRenderer.h"
namespace OVR { namespace CAPI {
//-------------------------------------------------------------------------------------
// ***** DistortionRenderer
// TBD: Move to separate config file that handles back-ends.
DistortionRenderer::CreateFunc DistortionRenderer::APICreateRegistry[ovrRenderAPI_Count] =
{
0, // None
&GL::DistortionRenderer::Create,
0, // Android_GLES
#if defined (OVR_OS_WIN32)
&D3D9::DistortionRenderer::Create,
&D3D10::DistortionRenderer::Create,
&D3D11::DistortionRenderer::Create
#else
0,
0,
0
#endif
};
void DistortionRenderer::SetLatencyTestColor(unsigned char* color)
{
if(color)
{
LatencyTestDrawColor[0] = color[0];
LatencyTestDrawColor[1] = color[1];
LatencyTestDrawColor[2] = color[2];
}
LatencyTestActive = color != NULL;
}
void DistortionRenderer::SetLatencyTest2Color(unsigned char* color)
{
if(color)
{
LatencyTest2DrawColor[0] = color[0];
LatencyTest2DrawColor[1] = color[1];
LatencyTest2DrawColor[2] = color[2];
}
LatencyTest2Active = color != NULL;
}
double DistortionRenderer::WaitTillTime(double absTime)
{
double initialTime = ovr_GetTimeInSeconds();
if (initialTime >= absTime)
return 0.0;
double newTime = initialTime;
volatile int i;
while (newTime < absTime)
{
// TODO: Needs further testing before enabling it on all Windows configs
#if 0 //def OVR_OS_WIN32
double remainingWaitTime = absTime - newTime;
// don't yield if <2ms
if(remainingWaitTime > 0.002)
{
// round down wait time to closest 1 ms
int roundedWaitTime = (remainingWaitTime * 1000);
waitableTimerInterval.QuadPart = -10000LL; // 10000 * 100 ns = 1 ms
waitableTimerInterval.QuadPart *= roundedWaitTime;
SetWaitableTimer(timer, &waitableTimerInterval, 0, NULL, NULL, TRUE);
DWORD waitResult = WaitForSingleObject(timer, roundedWaitTime + 3); // give 3 ms extra time
double sleptTime = ovr_GetTimeInSeconds() - newTime;
// Make sure this is reliable (should be waiting for 1ms, make sure it's <2ms)
if (sleptTime > remainingWaitTime)
{
OVR_DEBUG_LOG_TEXT(
("[DistortionRenderer::FlushGpuAndWaitTillTime] Sleep interval too long: %f\n", sleptTime));
}
else
{
OVR_ASSERT(WAIT_OBJECT_0 == waitResult);
OVR_UNUSED(waitResult);
}
}
else
#endif
{
for (int j = 0; j < 50; j++)
i = 0;
}
newTime = ovr_GetTimeInSeconds();
}
// How long we waited
return newTime - initialTime;
}
}} // namespace OVR::CAPI
| mplanck/ShaderToyVR | third/LibOVR_043/Src/CAPI/CAPI_DistortionRenderer.cpp | C++ | mit | 4,430 |
/******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2016-2018 Baldur Karlsson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
******************************************************************************/
#include <unistd.h>
#include "os/os_specific.h"
extern char **environ;
#define INITIAL_WAIT_TIME 1
#define MAX_WAIT_TIME 128000
char **GetCurrentEnvironment()
{
return environ;
}
int GetIdentPort(pid_t childPid)
{
int ret = 0;
string procfile = StringFormat::Fmt("/proc/%d/net/tcp", (int)childPid);
int waitTime = INITIAL_WAIT_TIME;
// try for a little while for the /proc entry to appear
while(ret == 0 && waitTime <= MAX_WAIT_TIME)
{
// back-off for each retry
usleep(waitTime);
waitTime *= 2;
FILE *f = FileIO::fopen(procfile.c_str(), "r");
if(f == NULL)
{
// try again in a bit
continue;
}
// read through the proc file to check for an open listen socket
while(ret == 0 && !feof(f))
{
const size_t sz = 512;
char line[sz];
line[sz - 1] = 0;
fgets(line, sz - 1, f);
int socketnum = 0, hexip = 0, hexport = 0;
int num = sscanf(line, " %d: %x:%x", &socketnum, &hexip, &hexport);
// find open listen socket on 0.0.0.0:port
if(num == 3 && hexip == 0 && hexport >= RenderDoc_FirstTargetControlPort &&
hexport <= RenderDoc_LastTargetControlPort)
{
ret = hexport;
}
}
FileIO::fclose(f);
}
if(ret == 0)
{
RDCWARN("Couldn't locate renderdoc target control listening port between %u and %u in %s",
(uint32_t)RenderDoc_FirstTargetControlPort, (uint32_t)RenderDoc_LastTargetControlPort,
procfile.c_str());
}
return ret;
}
// because OSUtility::DebuggerPresent is called often we want it to be
// cheap. Opening and parsing a file would cause high overhead on each
// call, so instead we just cache it at startup. This fails in the case
// of attaching to processes
bool debuggerPresent = false;
void CacheDebuggerPresent()
{
FILE *f = FileIO::fopen("/proc/self/status", "r");
if(f == NULL)
{
RDCWARN("Couldn't open /proc/self/status");
return;
}
// read through the proc file to check for TracerPid
while(!feof(f))
{
const size_t sz = 512;
char line[sz];
line[sz - 1] = 0;
fgets(line, sz - 1, f);
int tracerpid = 0;
int num = sscanf(line, "TracerPid: %d", &tracerpid);
// found TracerPid line
if(num == 1)
{
debuggerPresent = (tracerpid != 0);
break;
}
}
FileIO::fclose(f);
}
bool OSUtility::DebuggerPresent()
{
return debuggerPresent;
}
const char *Process::GetEnvVariable(const char *name)
{
return getenv(name);
} | etnlGD/renderdoc | renderdoc/os/posix/linux/linux_process.cpp | C++ | mit | 3,811 |
package launcher
import (
"fmt"
"io"
"github.com/influxdata/influxdb/v2/kit/cli"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"gopkg.in/yaml.v3"
)
func NewInfluxdPrintConfigCommand(v *viper.Viper, influxdOpts []cli.Opt) (*cobra.Command, error) {
var keyToPrint string
printOpts := make([]cli.Opt, len(influxdOpts)+1)
printOpts[0] = cli.Opt{
DestP: &keyToPrint,
Flag: "key-name",
Desc: "config key name; if set, only the resolved value of that key will be printed",
}
for i, opt := range influxdOpts {
printOpts[i+1] = cli.Opt{
DestP: opt.DestP,
Flag: opt.Flag,
Hidden: true,
}
}
cmd := &cobra.Command{
Use: "print-config",
Short: "Print the full influxd config resolved from the current environment",
Deprecated: "use the influx-cli command server-config to display the configuration values from the running server",
Long: `
Print config (in YAML) that the influxd server would use if run with the current flags/env vars/config file.
The order of precedence for config options are as follows (1 highest, 3 lowest):
1. flags
2. env vars
3. config file
A config file can be provided via the INFLUXD_CONFIG_PATH env var. If a file is
not provided via an env var, influxd will look in the current directory for a
config.{json|toml|yaml|yml} file. If one does not exist, then it will continue unchanged.
See 'influxd -h' for the full list of config options supported by the server.
`,
RunE: func(cmd *cobra.Command, _ []string) error {
var err error
if keyToPrint == "" {
err = printAllConfigRunE(printOpts, cmd.OutOrStdout())
} else {
err = printOneConfigRunE(printOpts, keyToPrint, cmd.OutOrStdout())
}
if err != nil {
return fmt.Errorf("failed to print config: %w", err)
}
return nil
},
Args: cobra.NoArgs,
}
if err := cli.BindOptions(v, cmd, printOpts); err != nil {
return nil, err
}
return cmd, nil
}
func printAllConfigRunE(configOpts []cli.Opt, out io.Writer) error {
configMap := make(map[string]interface{}, len(configOpts))
for _, o := range configOpts {
configMap[o.Flag] = o.DestP
}
return yaml.NewEncoder(out).Encode(configMap)
}
func printOneConfigRunE(configOpts []cli.Opt, key string, out io.Writer) error {
for _, o := range configOpts {
if o.Flag != key {
continue
}
return yaml.NewEncoder(out).Encode(o.DestP)
}
return fmt.Errorf("key %q not found in config", key)
}
| influxdata/influxdb | cmd/influxd/launcher/print_config.go | GO | mit | 2,427 |
class User
include I18n::Alchemy
attr_accessor :created_at
localize :created_at, :using => :date
end
| sho-wtag/catarse-2.0 | vendor/bundle/ruby/2.2.0/gems/i18n_alchemy-0.2.1/test/models/user.rb | Ruby | mit | 108 |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="GetCaasServerAntiAffinityRuleCmdlet.cs" company="">
//
// </copyright>
// <summary>
// The get caas server anti affinity rule cmdlet.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
using DD.CBU.Compute.Api.Client;
using DD.CBU.Compute.Api.Contracts.Network;
using DD.CBU.Compute.Api.Contracts.Server;
using ServerType = DD.CBU.Compute.Api.Contracts.Network20.ServerType;
namespace DD.CBU.Compute.Powershell
{
using Api.Contracts.Server10;
/// <summary>
/// The get caas server anti affinity rule cmdlet.
/// </summary>
[Cmdlet(VerbsCommon.Get, "CaasServerAntiAffinityRule")]
[OutputType(typeof (AntiAffinityRuleType))]
public class GetCaasServerAntiAffinityRuleCmdlet : PSCmdletCaasWithConnectionBase
{
/// <summary>
/// The network to show the anti affinity rules from
/// </summary>
[Parameter(Mandatory = false, HelpMessage = "filter the network to show the the rules")]
public NetworkWithLocationsNetwork Network { get; set; }
/// <summary>
/// The ruleid to show the anti affinity rules from
/// </summary>
[Parameter(Mandatory = false, Position = 0, HelpMessage = "filter the Antiaffinity rule id")]
public string RuleId { get; set; }
/// <summary>
/// The location to show the anti affinity rules from
/// </summary>
[Parameter(Mandatory = false, HelpMessage = "filter the location to show the rules")]
public string Location { get; set; }
/// <summary>
/// A server to find to show the anti affinity rules from
/// </summary>
[Parameter(Mandatory = false, HelpMessage = "find a rule base in a server")]
public ServerType Server { get; set; }
/// <summary>
/// The process record.
/// </summary>
protected override void ProcessRecord()
{
base.ProcessRecord();
try
{
string networkid = null;
if (Network != null)
{
networkid = Network.id;
}
IEnumerable<AntiAffinityRuleType> resultlist =
Connection.ApiClient.ServerManagementLegacy.Server.GetServerAntiAffinityRules(RuleId, Location, networkid).Result;
if (Server != null)
{
resultlist = resultlist.Where(rule => rule.serverSummary.Any(server => server.id == Server.id));
}
if (resultlist != null && resultlist.Any())
{
switch (resultlist.Count())
{
case 0:
WriteError(
new ErrorRecord(
new ItemNotFoundException(
"This command cannot find a matching object with the given parameters."
), "ItemNotFoundException", ErrorCategory.ObjectNotFound, resultlist));
break;
default:
WriteObject(resultlist, true);
break;
}
}
}
catch (AggregateException ae)
{
ae.Handle(
e =>
{
if (e is ComputeApiException)
{
WriteError(new ErrorRecord(e, "-2", ErrorCategory.InvalidOperation, Connection));
}
else
{
// if (e is HttpRequestException)
ThrowTerminatingError(new ErrorRecord(e, "-1", ErrorCategory.ConnectionError, Connection));
}
return true;
});
}
}
}
} | DimensionDataCBUSydney/DimensionData.ComputeClient | PowershellModule/CloudCompute.Powershell/Networks/GetCaasServerAntiAffinityRuleCmdlet.cs | C# | mit | 3,395 |
# A Nested Array to Model a Bingo Board SOLO CHALLENGE
# I spent [1] hours on this challenge.
# Release 0: Pseudocode
# Outline:
# Create a method to generate a letter ( b, i, n, g, o) and a number (1-100)
# create an instance variable of the list of letters in initialize
# generate a random number between 0 to 4 to represent column
# generate a random number between 1 to 100 to represent target number
# use the column number as index to get the actual letter from the instance array.
# return the column letter and the target number in an array.
# Check the called column for the number called.
# transform the column letter into column number.
# retrieve the target number
# If the number is in the column, replace with an 'x'
#fill in the outline here
# use a while loop to go through flattened array and search for target number.
# if target number is found, divide current index by 5 to get first index; use column number as the second index.
# go to the game board to change the value at the target indexes with an "X".
# Display a column to the console
# print out the nested arrays.
# Display the board to the console (prettily)
# print out the nested arrays.
# Refactored Solution
class BingoBoard
def initialize(board)
@bingo_board = board
@letter_array=["B","I","N","G","O"]
@stop=false
end
def call()
random=Random.new
ran_letter=@letter_array[random.rand(0..4)]
ran_number=random.rand(1..100)
p "letter: "+ran_letter+", number: "+ran_number.to_s
return output=[ran_letter,ran_number]
end
def check(input_array)
column=@letter_array.index(input_array[0])
target=input_array[1]
counter=column
while counter < @bingo_board.flatten.length
if @bingo_board.flatten[counter]==target
@bingo_board[counter/5][column]="X"
@stop=true
end
counter+=5
end
p @bingo_board
end
def stop
return @stop
end
end
#DRIVER CODE (I.E. METHOD CALLS) GO BELOW THIS LINE
board = [[47, 44, 71, 8, 88],
[22, 69, 75, 65, 73],
[83, 85, 97, 89, 57],
[25, 31, 96, 68, 51],
[75, 70, 54, 80, 83]]
new_game = BingoBoard.new(board)
counter=0
while (counter<100)
temp=new_game.call
new_game.check(temp)
counter+=1
end
#Reflection
# How difficult was pseudocoding this challenge? What do you think of your pseudocoding style?
# It wasn't difficult. I think it's pretty concise and accurate.
# What are the benefits of using a class for this challenge?
# It allows the use of instance variables.
# How can you access coordinates in a nested array?
# By using double brackets.
# What methods did you use to access and modify the array?
# flatten and index
# How did you determine what should be an instance variable versus a local variable?
# An instance variable represents information needed by different variables in the class.
# What do you feel is most improved in your refactored solution?
# The use of flatten and index. | zinosama/dbc-phase-0 | week-6/bingo_solution.rb | Ruby | mit | 2,975 |
<?php
namespace PSR2R\Sniffs\WhiteSpace;
use PHP_CodeSniffer_File;
use PHP_CodeSniffer_Tokens;
/**
* No whitespace should be at the beginning and end of an array.
*
* @author Mark Scherer
* @license MIT
*/
class ArraySpacingSniff implements \PHP_CodeSniffer_Sniff {
/**
* @inheritDoc
*/
public function register() {
return [T_OPEN_SHORT_ARRAY];
}
/**
* @inheritDoc
*/
public function process(PHP_CodeSniffer_File $phpcsFile, $stackPtr) {
$tokens = $phpcsFile->getTokens();
$endIndex = $tokens[$stackPtr]['bracket_closer'];
$this->checkBeginning($phpcsFile, $stackPtr);
$this->checkEnding($phpcsFile, $endIndex);
}
/**
* @param \PHP_CodeSniffer_File $phpcsFile
* @param int $stackPtr
* @return void
*/
protected function checkBeginning(PHP_CodeSniffer_File $phpcsFile, $stackPtr) {
$tokens = $phpcsFile->getTokens();
$nextIndex = $phpcsFile->findNext(PHP_CodeSniffer_Tokens::$emptyTokens, ($stackPtr + 1), null, true);
if ($nextIndex - $stackPtr === 1) {
return;
}
if ($tokens[$nextIndex]['line'] !== $tokens[$stackPtr]['line']) {
return;
}
$fix = $phpcsFile->addFixableError('No whitespace after opening bracket', $stackPtr, 'InvalidAfter');
if ($fix) {
$phpcsFile->fixer->replaceToken($nextIndex - 1, '');
}
}
/**
* @param \PHP_CodeSniffer_File $phpcsFile
* @param int $stackPtr
* @return void
*/
protected function checkEnding(PHP_CodeSniffer_File $phpcsFile, $stackPtr) {
$tokens = $phpcsFile->getTokens();
$previousIndex = $phpcsFile->findPrevious(T_WHITESPACE, ($stackPtr - 1), null, true);
if ($stackPtr - $previousIndex === 1) {
return;
}
if ($tokens[$previousIndex]['line'] !== $tokens[$stackPtr]['line']) {
return;
}
// Let another sniffer take care of invalid commas
if ($tokens[$previousIndex]['code'] === T_COMMA) {
return;
}
$fix = $phpcsFile->addFixableError('No whitespace before closing bracket', $stackPtr, 'InvalidBefore');
if ($fix) {
$phpcsFile->fixer->replaceToken($previousIndex + 1, '');
}
}
}
| drulm/user-form-robot | vendor/fig-r/psr2r-sniffer/PSR2R/Sniffs/WhiteSpace/ArraySpacingSniff.php | PHP | mit | 2,045 |
/**
* @fileoverview Get data value from data-attribute
* @author NHN FE Development Lab <dl_javascript@nhn.com>
*/
'use strict';
var convertToKebabCase = require('./_convertToKebabCase');
/**
* Get data value from data-attribute
* @param {HTMLElement} element - target element
* @param {string} key - key
* @returns {string} value
* @memberof module:domUtil
*/
function getData(element, key) {
if (element.dataset) {
return element.dataset[key];
}
return element.getAttribute('data-' + convertToKebabCase(key));
}
module.exports = getData;
| nhnent/fe.code-snippet | domUtil/getData.js | JavaScript | mit | 564 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Bermuda.AdminLibrary.Interfaces;
namespace Bermuda.AdminLibrary.Models
{
public class CloudInstance : ICloudEntity
{
public string Id { get; set; }
public string Name { get; set; }
}
}
| melnx/Bermuda | BermudaAdmin/Bermuda.AdminLibrary/Models/CloudInstance.cs | C# | mit | 325 |
<div class="uk-grid">
<div class="uk-width-1-1 uk-width-medium-1-2 uk-text-center">
<img src="{{ url('images/original/' . $image->filename) }}" alt="{{ $image->alt }}">
</div>
<div class="uk-width-1-1 uk-width-medium-1-2">
<div class="url uk-alert uk-alert-success uk-text-break">
{{ url('images/original/' . $image->filename) }}
</div>
<form class="uk-form uk-form-stacked" id="update-image" method="POST" action="{{ url('admin/images/' . $image->id) }}">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<input type="hidden" name="_method" value="PUT">
@include('laramanager::partials.elements.form.textarea', ['field' => ['name' => 'filename', 'value' => $image->filename]])
@include('laramanager::partials.elements.form.text', ['field' => ['name' => 'title', 'value' => $image->title]])
@include('laramanager::partials.elements.form.textarea', ['field' => ['name' => 'description', 'value' => $image->description]])
@include('laramanager::partials.elements.form.text', ['field' => ['name' => 'original_filename', 'value' => $image->original_filename]])
@include('laramanager::partials.elements.form.text', ['field' => ['name' => 'alt', 'value' => $image->alt]])
<div class="uk-form-row">
@include('laramanager::partials.elements.form.submit', ['class' => 'uk-width-1-1 uk-width-medium-1-4 uk-text-contrast'])
</div>
</form>
</div>
</div>
| philsquare/LaraManager | src/views/images/edit.blade.php | PHP | mit | 1,549 |
jQuery(document).ready(function() {
var makemap = function() {
$('#oabpositioner').html('<div style="position:relative;top:0;left:0;z-index:1000;"> \
<p style="text-align:center;"> \
<a href="https://openaccessbutton.org" style="font-weight:bold;color:#212f3f;"> \
openaccessbutton.org \
<span class="oabmapcount"></span> \
people need access to data. Can you help? \
</a> \
</p> \
</div> \
<div id="mapspace" style="width:100%;height:100%;position:relative;top:-43px;left:0;z-index:1;"></div>');
var topo,projection,path,svg,g,draw;
var updatemap = function(data) {
$('.oabmapcount').html(data.hits.total);
draw(topo,data);
}
var getdata = function() {
var qry = {
"size":100000,
"query": {
filtered: {
query: {
bool: {
must: []
}
},
filter: {
bool: {
must:[]
}
}
}
},
"fields": ["location.geo.lat","location.geo.lon"]
}
$.ajax({
type: 'GET',
url: '//api.openaccessbutton.org/requests?source=' + JSON.stringify(qry),
dataType: 'JSON',
success: updatemap
});
}
var width = document.getElementById('mapspace').offsetWidth;
var height = document.getElementById('mapspace').offsetHeight;
var tooltip = d3.select("#mapspace").append("div").attr("class", "tooltip hidden");
function setup(width,height) {
projection = d3.geo.mercator()
.translate([(width/2), (height/1.55)])
.scale( width / 2 / Math.PI)
.center([0, 0 ]);
path = d3.geo.path().projection(projection);
svg = d3.select("#mapspace").append("svg")
.attr("width", width)
.attr("height", height)
.append("g");
g = svg.append("g");
}
setup(width,height);
d3.json("//static.cottagelabs.com/maps/world-topo.json", function(error, world) {
topo = topojson.feature(world, world.objects.countries).features;
draw(topo);
getdata();
});
function addpoint(lon,lat) {
var gpoint = g.append("g").attr("class", "gpoint");
var x = projection([lon,lat])[0];
var y = projection([lon,lat])[1];
gpoint.append("svg:circle")
.attr("cx", x)
.attr("cy", y)
.attr("class","point")
.attr("r", 2);
}
draw = function(topo,data) {
var country = g.selectAll(".country").data(topo);
country.enter().insert("path")
.attr("class", "country")
.attr("d", path)
.attr("id", function(d,i) { return d.id; });
//add points and repo suggestions
if ( data ) {
data.hits.hits.forEach(function(i){
if ( i.fields && i.fields['location.geo.lat'] && i.fields['location.geo.lon'] ) {
addpoint(
i.fields['location.geo.lon'][0],
i.fields['location.geo.lat'][0]
);
}
});
}
}
}
if ( $('#mapspace').length && $('#mapspace').is(':visible') ) makemap();
});
| OAButton/oab_static | static/map.js | JavaScript | mit | 3,167 |
'use strict';
var Stream = require('stream');
var expect = require('chai').expect;
var Excel = require('../../../excel');
describe('Workbook Writer', function() {
it('returns undefined for non-existant sheet', function() {
var stream = new Stream.Writable({write: function noop() {}});
var wb = new Excel.stream.xlsx.WorkbookWriter({
stream: stream
});
wb.addWorksheet('first');
expect(wb.getWorksheet('w00t')).to.equal(undefined);
});
});
| peakon/exceljs | spec/unit/doc/workbook-writer.spec.js | JavaScript | mit | 474 |
module.exports = [ [ 'zero', 'one', 'two', 'three' ] ]
| bigeasy/packet | test/generated/packed/nested.lookup.js | JavaScript | mit | 55 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace PerplexMail
{
/// <summary>
/// This class is used as a model class to specify which tag (in the email) should be replaced with which value
/// </summary>
public class EmailTag
{
string _tag;
string _value;
public string Tag { get { return _tag; } set { _tag = sanitizeTag(value); } }
public string Value { get { return _value; } set { _value = value; } }
public string FullTag { get { return Constants.TAG_PREFIX + Tag + Constants.TAG_SUFFIX; } }
/// <summary>
/// Creates a basic email tag. This tag can be used when sending a PerplexMail email to replace tags with text values.
/// </summary>
public EmailTag(){ }
/// <summary>
/// Creates a basic email tag. This tag can be used when sending a PerplexMail email to replace tags with text values.
/// </summary>
/// <param name="tag">The tagname, without any prefix [# or suffix #]</param>
/// <param name="value">The value to replace the tags with (in the email)</param>
public EmailTag(string tag, string value)
{
Tag = sanitizeTag(tag);
Value = value;
}
/// <summary>
/// Creates a basic if tag. This tag can be used to show or hide specific content in emails sent by the email package.
/// For example the text with tags: [#IsVehicle#]Text about vehicle details[#/IsVehicle#]
/// is VISIBLE if the tag "IsVehicle" is TRUE and HIDDEN when FALSE.
/// </summary>
/// <param name="tag">The tagname, without any prefix [# or suffix #]</param>
/// <param name="state">True (to show the content) or False (to hide the content)</param>
public EmailTag(string tag, bool state)
{
Tag = sanitizeTag(tag);
Value = state ? "true" : "false";
}
/// <summary>
/// This function strips all opening [# and closing #] characters for a tag. The email pacakge will already handle this for us.
/// </summary>
/// <param name="tag">The tagname to sanitize</param>
/// <returns>A sanitized tagname</returns>
static string sanitizeTag(string tag)
{
if (String.IsNullOrEmpty(tag))
throw new ArgumentNullException("tag");
if (tag.StartsWith("[#"))
if (tag.EndsWith("#]"))
return tag.Substring(2, tag.Length - 4);
else
return tag.Substring(2);
else if (tag.EndsWith("#]"))
return tag.Substring(0, tag.Length - 2);
// Default
return tag;
}
}
} | PerplexInternetmarketing/PerplexMail-for-Umbraco | PerplexMail/EmailTag.cs | C# | mit | 2,800 |
package com.snobot.xlib;
import edu.wpi.first.wpilibj.Preferences;
public class PropertyManager
{
private static boolean sPropertyAdded = false;
public static abstract class IProperty<Type>
{
protected String mKey;
protected Type mDefault;
public IProperty(String aKey, Type aDefault)
{
mKey = aKey;
mDefault = aDefault;
// Force a get-or-save operation. This will guarantee that
// all the properties are added in the order they get constructed,
// and that they will all immediately be written into the file
// rather than have a lazy-instantiation thing going on
getValue();
}
public abstract Type getValue();
public String getKey()
{
return mKey;
}
}
public static class DoubleProperty extends IProperty<Double>
{
public DoubleProperty(String aKey, double aDefault)
{
super(aKey, aDefault);
}
@Override
public Double getValue()
{
if (Preferences.getInstance().containsKey(mKey))
{
return Preferences.getInstance().getDouble(mKey, mDefault);
}
sPropertyAdded = true;
Preferences.getInstance().putDouble(mKey, mDefault);
return mDefault;
}
}
public static class IntegerProperty extends IProperty<Integer>
{
public IntegerProperty(String aKey, int aDefault)
{
super(aKey, aDefault);
}
@Override
public Integer getValue()
{
if (Preferences.getInstance().containsKey(mKey))
{
return Preferences.getInstance().getInt(mKey, mDefault);
}
sPropertyAdded = true;
Preferences.getInstance().putInt(mKey, mDefault);
return mDefault;
}
}
public static class StringProperty extends IProperty<String>
{
public StringProperty(String aKey)
{
this(aKey, "");
}
public StringProperty(String aKey, String aDefault)
{
super(aKey, aDefault);
}
@Override
public String getValue()
{
if (Preferences.getInstance().containsKey(mKey))
{
return Preferences.getInstance().getString(mKey, mDefault);
}
sPropertyAdded = true;
Preferences.getInstance().putString(mKey, mDefault);
return mDefault;
}
}
public static class BooleanProperty extends IProperty<Boolean>
{
public BooleanProperty(String aKey)
{
this(aKey, false);
}
public BooleanProperty(String aKey, boolean aDefault)
{
super(aKey, aDefault);
}
@Override
public Boolean getValue()
{
if (Preferences.getInstance().containsKey(mKey))
{
return Preferences.getInstance().getBoolean(mKey, mDefault);
}
sPropertyAdded = true;
Preferences.getInstance().putBoolean(mKey, mDefault);
return mDefault;
}
}
public static void saveIfUpdated()
{
if (sPropertyAdded)
{
System.out.println("-------------------------------------------");
System.out.println("Config file updated, saving it");
System.out.println("-------------------------------------------");
sPropertyAdded = false;
}
}
}
| ArcticWarriors/snobot-2017 | RobotCode/snobot2016/src/com/snobot/xlib/PropertyManager.java | Java | mit | 3,643 |
#!/usr/bin/env python
""" A unittest script for the WgsDnaPrep module. """
import unittest
import json
from cutlass import WgsDnaPrep
from cutlass import MIMS, MimsException
from CutlassTestConfig import CutlassTestConfig
from CutlassTestUtil import CutlassTestUtil
# pylint: disable=W0703, C1801
class WgsDnaPrepTest(unittest.TestCase):
""" A unit test class for the WgsDnaPrep class. """
session = None
util = None
@classmethod
def setUpClass(cls):
""" Setup for the unittest. """
# Establish the session for each test method
cls.session = CutlassTestConfig.get_session()
cls.util = CutlassTestUtil()
def testImport(self):
""" Test the importation of the WgsDnaPrep module. """
success = False
try:
from cutlass import WgsDnaPrep
success = True
except Exception:
pass
self.failUnless(success)
self.failIf(WgsDnaPrep is None)
def testSessionCreate(self):
""" Test the creation of a WgsDnaPrep via the session. """
success = False
wgsDnaPrep = None
try:
wgsDnaPrep = self.session.create_object("wgs_dna_prep")
success = True
except Exception:
pass
self.failUnless(success)
self.failIf(wgsDnaPrep is None)
def testToJson(self):
""" Test the generation of JSON from a WgsDnaPrep instance. """
wgsDnaPrep = self.session.create_object("wgs_dna_prep")
success = False
comment = "Test comment"
wgsDnaPrep.comment = comment
wgsDnaPrep_json = None
try:
wgsDnaPrep_json = wgsDnaPrep.to_json()
success = True
except Exception:
pass
self.assertTrue(success, "Able to use 'to_json'.")
self.assertTrue(wgsDnaPrep_json is not None, "to_json() returned data.")
parse_success = False
try:
wgsDnaPrep_data = json.loads(wgsDnaPrep_json)
parse_success = True
except Exception:
pass
self.assertTrue(parse_success, "to_json() did not throw an exception.")
self.assertTrue(wgsDnaPrep_data is not None, "to_json() returned parsable JSON.")
self.assertTrue('meta' in wgsDnaPrep_data, "JSON has 'meta' key in it.")
self.assertEqual(wgsDnaPrep_data['meta']['comment'],
comment, "'comment' in JSON had expected value.")
def testId(self):
""" Test the id property. """
prep = self.session.create_object("wgs_dna_prep")
self.assertTrue(prep.id is None,
"New template WgsDnaPrep has no ID.")
with self.assertRaises(AttributeError):
prep.id = "test"
def testVersion(self):
""" Test the version property. """
prep = self.session.create_object("wgs_dna_prep")
self.assertTrue(prep.version is None,
"New template WgsDnaPrep has no version.")
with self.assertRaises(ValueError):
prep.version = "test"
def testComment(self):
""" Test the comment property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "comment")
self.util.stringPropertyTest(self, prep, "comment")
def testFragSize(self):
""" Test the frag_size property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.intTypeTest(self, prep, "frag_size")
self.util.intPropertyTest(self, prep, "frag_size")
def testFragSizeNegative(self):
""" Test the frag_size property with an illegal negative value. """
prep = self.session.create_object("wgs_dna_prep")
with self.assertRaises(Exception):
prep.frag_size = -1
def testLibLayout(self):
""" Test the lib_layout property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "lib_layout")
self.util.stringPropertyTest(self, prep, "lib_layout")
def testLibSelection(self):
""" Test the lib_selection property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "lib_selection")
self.util.stringPropertyTest(self, prep, "lib_selection")
def testNCBITaxonID(self):
""" Test the ncbi_taxon_id property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "ncbi_taxon_id")
self.util.stringPropertyTest(self, prep, "ncbi_taxon_id")
def testPrepID(self):
""" Test the prep_id property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "prep_id")
self.util.stringPropertyTest(self, prep, "prep_id")
def testSequencingCenter(self):
""" Test the sequencing_center property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "sequencing_center")
self.util.stringPropertyTest(self, prep, "sequencing_center")
def testSequencingContact(self):
""" Test the sequencing_contact property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "sequencing_contact")
self.util.stringPropertyTest(self, prep, "sequencing_contact")
def testSRSID(self):
""" Test the srs_id property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.stringTypeTest(self, prep, "srs_id")
self.util.stringPropertyTest(self, prep, "srs_id")
def testStorageDuration(self):
""" Test the storage_duration property. """
prep = self.session.create_object("wgs_dna_prep")
self.util.intTypeTest(self, prep, "storage_duration")
self.util.intPropertyTest(self, prep, "storage_duration")
def testStorageDurationNegative(self):
""" Test the storage_duration property with an illegal negative value. """
prep = self.session.create_object("wgs_dna_prep")
with self.assertRaises(Exception):
prep.storage_duration = -1
def testTags(self):
""" Test the tags property. """
prep = self.session.create_object("wgs_dna_prep")
tags = prep.tags
self.assertTrue(type(tags) == list, "WgsDnaPrep tags() method returns a list.")
self.assertEqual(len(tags), 0, "Template wgsDnaPrep tags list is empty.")
new_tags = ["tagA", "tagB"]
prep.tags = new_tags
self.assertEqual(prep.tags, new_tags, "Can set tags on a WgsDnaPrep.")
json_str = prep.to_json()
doc = json.loads(json_str)
self.assertTrue('tags' in doc['meta'],
"JSON representation has 'tags' field in 'meta'.")
self.assertEqual(doc['meta']['tags'], new_tags,
"JSON representation had correct tags after setter.")
def testAddTag(self):
""" Test the add_tag() method. """
prep = self.session.create_object("wgs_dna_prep")
prep.add_tag("test")
self.assertEqual(prep.tags, ["test"], "Can add a tag to a wgsDnaPrep.")
json_str = prep.to_json()
doc = json.loads(json_str)
self.assertEqual(doc['meta']['tags'], ["test"],
"JSON representation had correct tags after add_tag().")
# Try adding the same tag yet again, shouldn't get a duplicate
with self.assertRaises(ValueError):
prep.add_tag("test")
json_str = prep.to_json()
doc2 = json.loads(json_str)
self.assertEqual(doc2['meta']['tags'], ["test"],
"JSON document did not end up with duplicate tags.")
def testMims(self):
""" Test the mims property. """
wgsDnaPrep = self.session.create_object("wgs_dna_prep")
self.assertTrue(wgsDnaPrep.mims is None,
"New template wgsDnaPrep has no MIMS data.")
invalid_test_mims = {
"a": 1,
"b": 2
}
with self.assertRaises(MimsException):
wgsDnaPrep.mims = invalid_test_mims
self.assertTrue(wgsDnaPrep.mims is None,
"Template wgsDnaPrep has no MIMS after invalid set attempt.")
valid_mims = {
"adapters": "test_adapters",
"annot_source": "test_annot_source",
"assembly": "test_assembly",
"assembly_name": "test_assembly_name",
"biome": "test_biome",
"collection_date": "test_collection_date",
"env_package": "test_env_package",
"extrachrom_elements": "test_extrachrom_elements",
"encoded_traits": "test_encoded_traits",
"experimental_factor": "test_experimental_factor",
"feature": "test_feature",
"findex": "test_findex",
"finishing_strategy": "test_finishing_strategy",
"geo_loc_name": "test_geo_loc_name",
"investigation_type": "test_investigation_type",
"lat_lon": "test_lat_long",
"lib_const_meth": "test_lib_const_meth",
"lib_reads_seqd": "test_lib_reads_seqd",
"lib_screen": "test_lib_screen",
"lib_size": 2000,
"lib_vector": "test_lib_vector",
"material": "test_material",
"nucl_acid_amp": "test_nucl_acid_amp",
"nucl_acid_ext": "test_nucl_acid_ext",
"project_name": "test_project_name",
"rel_to_oxygen": "test_rel_to_oxygen",
"rindex": "test_rindex",
"samp_collect_device": "test_samp_collect_device",
"samp_mat_process": "test_samp_map_process",
"samp_size": "test_samp_size",
"seq_meth": "test_seq_meth",
"sop": ["a", "b", "c"],
"source_mat_id": ["a", "b", "c"],
"submitted_to_insdc": True,
"url": ["a", "b", "c"]
}
# Assume failure
success = False
try:
wgsDnaPrep.mims = valid_mims
success = True
except Exception:
pass
self.assertTrue(success, "Valid MIMS data does not raise exception.")
self.assertTrue(wgsDnaPrep.mims is not None, "mims getter retrieves data.")
biome = wgsDnaPrep.mims['biome']
self.assertEqual(biome, valid_mims["biome"],
"Retrieved MIMS data appears to be okay.")
def testRequiredFields(self):
""" Test the required_fields() static method. """
required = WgsDnaPrep.required_fields()
self.assertEqual(type(required), tuple,
"required_fields() returns a tuple.")
self.assertTrue(len(required) > 0,
"required_field() did not return empty value.")
def testLoadSaveDeleteWgsDnaPrep(self):
""" Extensive test for the load, edit, save and delete functions. """
# attempt to save the prep at all points before and after adding
# the required fields
prep = self.session.create_object("wgs_dna_prep")
test_comment = "Test comment"
frag_size = 10
lib_layout = "asdfads"
lib_selection = "asdfhewofue"
mims = {
"adapters": "test_adapters",
"annot_source": "test_annot_source",
"assembly": "test_assembly",
"assembly_name": "test_assembly_name",
"biome": "test_biome",
"collection_date": "test_collection_date",
"env_package": "test_env_package",
"extrachrom_elements": "test_extrachrom_elements",
"encoded_traits": "test_encoded_traits",
"experimental_factor": "test_experimental_factor",
"feature": "test_feature",
"findex": "test_findex",
"finishing_strategy": "test_finishing_strategy",
"geo_loc_name": "test_geo_loc_name",
"investigation_type": "test_investigation_type",
"lat_lon": "test_lat_long",
"lib_const_meth": "test_lib_const_meth",
"lib_reads_seqd": "test_lib_reads_seqd",
"lib_screen": "test_lib_screen",
"lib_size": 2000,
"lib_vector": "test_lib_vector",
"material": "test_material",
"nucl_acid_amp": "test_nucl_acid_amp",
"nucl_acid_ext": "test_nucl_acid_ext",
"project_name": "test_project_name",
"rel_to_oxygen": "test_rel_to_oxygen",
"rindex": "test_rindex",
"samp_collect_device": "test_samp_collect_device",
"samp_mat_process": "test_samp_map_process",
"samp_size": "test_samp_size",
"seq_meth": "test_seq_meth",
"sop": ["a", "b", "c"],
"source_mat_id": ["a", "b", "c"],
"submitted_to_insdc": True,
"url": ["a", "b", "c"]
}
ncbi_taxon_id = "sadfadsfawefw"
prep_id = "asdsadewqrewq"
sequencing_center = "center for sequencing"
sequencing_contact = "me right now"
srs_id = "the id for the srs"
storage_duration = 10
test_links = {"prepared_from": []}
self.assertFalse(prep.save(), "WgsDnaPrep not saved successfully, no required fields")
prep.comment = test_comment
self.assertFalse(prep.save(), "WgsDnaPrep not saved successfully")
prep.frag_size = frag_size
self.assertFalse(prep.save(), "WgsDnaPrep not saved successfully")
prep.links = test_links
self.assertFalse(prep.save(), "WgsDnaPrep not saved successfully")
prep.lib_layout = lib_layout
prep.lib_selection = lib_selection
prep.mims = mims
prep.ncbi_taxon_id = ncbi_taxon_id
prep.prep_id = prep_id
prep.sequencing_center = sequencing_center
prep.sequencing_contact = sequencing_contact
prep.srs_id = srs_id
prep.storage_duration = storage_duration
# make sure prep does not delete if it does not exist
with self.assertRaises(Exception):
prep.delete()
self.assertTrue(prep.save() is True, "WgsDnaPrep was not saved successfully")
# load the prep that was just saved from the OSDF instance
prep_loaded = self.session.create_object("wgs_dna_prep")
prep_loaded = prep_loaded.load(prep.id)
# check all fields were saved and loaded successfully
self.assertEqual(prep.comment,
prep_loaded.comment,
"WgsDnaPrep comment not saved & loaded successfully")
self.assertEqual(prep.mims["biome"],
prep_loaded.mims["biome"],
"WgsDnaPrep mims not saved & loaded successfully")
# prep is deleted successfully
self.assertTrue(prep.delete(), "WgsDnaPrep was not deleted successfully")
# the prep of the initial ID should not load successfully
load_test = self.session.create_object("wgs_dna_prep")
with self.assertRaises(Exception):
load_test = load_test.load(prep.id)
if __name__ == '__main__':
unittest.main()
| ihmpdcc/cutlass | tests/test_wgs_dna_prep.py | Python | mit | 15,332 |
# -*- coding: utf-8 -*-
'''
# Copyright (c) 2015 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This file was generated and any changes will be overwritten.
'''
from __future__ import unicode_literals
from ..model.item_delta_collection_page import ItemDeltaCollectionPage
from ..request.items_collection import ItemsCollectionResponse
class ItemDeltaCollectionResponse(ItemsCollectionResponse):
@property
def collection_page(self):
"""The collection page stored in the response JSON
Returns:
:class:`ItemDeltaCollectionPage<onedrivesdk.request.item_delta_collection.ItemDeltaCollectionPage>`:
The collection page
"""
token = self._prop_dict["@delta.token"] if "@delta.token" in self._prop_dict else None
delta_link = self._prop_dict["@odata.deltaLink"] if "@odata.deltaLink" in self._prop_dict else None
next_page_link = self._prop_dict["@odata.nextLink"] if "@odata.nextLink" in self._prop_dict else None
if self._collection_page:
self._collection_page._prop_list = self._prop_dict["value"]
self._collection_page._token = token
self._collection_page._delta_link = delta_link
self._collection_page._next_page_link = next_page_link
else:
self._collection_page = ItemDeltaCollectionPage(self._prop_dict["value"],
token,
delta_link,
next_page_link)
return self._collection_page
from ..request.item_delta import ItemDeltaRequest | OneDrive/onedrive-sdk-python | src/python3/request/item_delta_collection.py | Python | mit | 2,730 |
require "pathname"
require "vagrant/action/builder"
module VagrantPlugins
module CommandPlugin
module Action
# This middleware sequence will install a plugin.
def self.action_expunge
Vagrant::Action::Builder.new.tap do |b|
b.use ExpungePlugins
end
end
def self.action_install
Vagrant::Action::Builder.new.tap do |b|
b.use InstallGem
end
end
# This middleware sequence licenses paid addons.
def self.action_license
Vagrant::Action::Builder.new.tap do |b|
b.use PluginExistsCheck
b.use LicensePlugin
end
end
# This middleware sequence will list all installed plugins.
def self.action_list
Vagrant::Action::Builder.new.tap do |b|
b.use ListPlugins
end
end
# This middleware sequence will repair installed plugins.
def self.action_repair
Vagrant::Action::Builder.new.tap do |b|
b.use RepairPlugins
end
end
# This middleware sequence will uninstall a plugin.
def self.action_uninstall
Vagrant::Action::Builder.new.tap do |b|
b.use PluginExistsCheck
b.use UninstallPlugin
end
end
# This middleware sequence will update a plugin.
def self.action_update
Vagrant::Action::Builder.new.tap do |b|
b.use UpdateGems
end
end
# The autoload farm
action_root = Pathname.new(File.expand_path("../action", __FILE__))
autoload :ExpungePlugins, action_root.join("expunge_plugins")
autoload :InstallGem, action_root.join("install_gem")
autoload :LicensePlugin, action_root.join("license_plugin")
autoload :ListPlugins, action_root.join("list_plugins")
autoload :PluginExistsCheck, action_root.join("plugin_exists_check")
autoload :RepairPlugins, action_root.join("repair_plugins")
autoload :UninstallPlugin, action_root.join("uninstall_plugin")
autoload :UpdateGems, action_root.join("update_gems")
end
end
end
| jtopper/vagrant | plugins/commands/plugin/action.rb | Ruby | mit | 2,093 |
module DiffHelper
def mark_inline_diffs(old_line, new_line)
old_diffs, new_diffs = Gitlab::Diff::InlineDiff.new(old_line, new_line).inline_diffs
marked_old_line = Gitlab::Diff::InlineDiffMarker.new(old_line).mark(old_diffs, mode: :deletion)
marked_new_line = Gitlab::Diff::InlineDiffMarker.new(new_line).mark(new_diffs, mode: :addition)
[marked_old_line, marked_new_line]
end
def expand_all_diffs?
params[:expand_all_diffs].present?
end
def diff_view
@diff_view ||= begin
diff_views = %w(inline parallel)
diff_view = cookies[:diff_view]
diff_view = diff_views.first unless diff_views.include?(diff_view)
diff_view.to_sym
end
end
def diff_options
options = { ignore_whitespace_change: hide_whitespace?, no_collapse: expand_all_diffs? }
if action_name == 'diff_for_path'
options[:no_collapse] = true
options[:paths] = params.values_at(:old_path, :new_path)
end
options
end
def diff_match_line(old_pos, new_pos, text: '', view: :inline, bottom: false)
content = content_tag :td, text, class: "line_content match #{view == :inline ? '' : view}"
cls = ['diff-line-num', 'unfold', 'js-unfold']
cls << 'js-unfold-bottom' if bottom
html = ''
if old_pos
html << content_tag(:td, '...', class: cls + ['old_line'], data: { linenumber: old_pos })
html << content unless view == :inline
end
if new_pos
html << content_tag(:td, '...', class: cls + ['new_line'], data: { linenumber: new_pos })
html << content
end
html.html_safe
end
def diff_line_content(line)
if line.blank?
" ".html_safe
else
# We can't use `sub` because the HTML-safeness of `line` will not survive.
line[0] = '' if line.start_with?('+', '-', ' ')
line
end
end
def parallel_diff_discussions(left, right, diff_file)
discussion_left = discussion_right = nil
if left && (left.unchanged? || left.removed?)
line_code = diff_file.line_code(left)
discussion_left = @grouped_diff_discussions[line_code]
end
if right && right.added?
line_code = diff_file.line_code(right)
discussion_right = @grouped_diff_discussions[line_code]
end
[discussion_left, discussion_right]
end
def inline_diff_btn
diff_btn('Inline', 'inline', diff_view == :inline)
end
def parallel_diff_btn
diff_btn('Side-by-side', 'parallel', diff_view == :parallel)
end
def submodule_link(blob, ref, repository = @repository)
tree, commit = submodule_links(blob, ref, repository)
commit_id = if commit.nil?
Commit.truncate_sha(blob.id)
else
link_to Commit.truncate_sha(blob.id), commit
end
[
content_tag(:span, link_to(truncate(blob.name, length: 40), tree)),
'@',
content_tag(:span, commit_id, class: 'monospace'),
].join(' ').html_safe
end
def commit_for_diff(diff_file)
return diff_file.content_commit if diff_file.content_commit
if diff_file.deleted_file
@base_commit || @commit.parent || @commit
else
@commit
end
end
def diff_file_html_data(project, diff_file_path, diff_commit_id)
{
blob_diff_path: namespace_project_blob_diff_path(project.namespace, project,
tree_join(diff_commit_id, diff_file_path)),
view: diff_view
}
end
def editable_diff?(diff)
!diff.deleted_file && @merge_request && @merge_request.source_project
end
private
def diff_btn(title, name, selected)
params_copy = params.dup
params_copy[:view] = name
# Always use HTML to handle case where JSON diff rendered this button
params_copy.delete(:format)
link_to url_for(params_copy), id: "#{name}-diff-btn", class: (selected ? 'btn active' : 'btn'), data: { view_type: name } do
title
end
end
def commit_diff_whitespace_link(project, commit, options)
url = namespace_project_commit_path(project.namespace, project, commit.id, params_with_whitespace)
toggle_whitespace_link(url, options)
end
def diff_merge_request_whitespace_link(project, merge_request, options)
url = diffs_namespace_project_merge_request_path(project.namespace, project, merge_request, params_with_whitespace)
toggle_whitespace_link(url, options)
end
def diff_compare_whitespace_link(project, from, to, options)
url = namespace_project_compare_path(project.namespace, project, from, to, params_with_whitespace)
toggle_whitespace_link(url, options)
end
def hide_whitespace?
params[:w] == '1'
end
def params_with_whitespace
hide_whitespace? ? request.query_parameters.except(:w) : request.query_parameters.merge(w: 1)
end
def toggle_whitespace_link(url, options)
options[:class] ||= ''
options[:class] << ' btn btn-default'
link_to "#{hide_whitespace? ? 'Show' : 'Hide'} whitespace changes", url, class: options[:class]
end
def render_overflow_warning?(diff_files)
diffs = @merge_request_diff.presence || diff_files
diffs.overflow?
end
end
| openwide-java/gitlabhq | app/helpers/diff_helper.rb | Ruby | mit | 5,138 |
package April2021Leetcode;
public class _0832FlippingAnImage {
public static void main(String[] args) {
System.out.println(flipAndInvertImage(
new int[][] { new int[] { 1, 1, 0 }, new int[] { 1, 0, 1 }, new int[] { 0, 0, 0 } }));
System.out.println(flipAndInvertImage(new int[][] { new int[] { 1, 1, 0, 0 }, new int[] { 1, 0, 0, 1 },
new int[] { 0, 1, 1, 1 }, new int[] { 1, 0, 1, 0 } }));
}
public static int[][] flipAndInvertImage(int[][] A) {
}
}
| darshanhs90/Java-InterviewPrep | src/April2021Leetcode/_0832FlippingAnImage.java | Java | mit | 472 |
/**
* istanbul ignore next
*/
define(function(require, exports, module) {
'use strict';
var Observable = require('./Observable').Class;
/**
* @constructor
* @extends Observable
* @alias RCSDK.core.PageVisibility
*/
function PageVisibility() {
Observable.call(this);
var hidden = "hidden",
onchange = function(evt) {
evt = evt || window.event;
var v = 'visible',
h = 'hidden',
evtMap = {
focus: v, focusin: v, pageshow: v, blur: h, focusout: h, pagehide: h
};
this.visible = (evt.type in evtMap) ? evtMap[evt.type] == v : !document[hidden];
this.emit(this.events.change, this.visible);
}.bind(this);
this.visible = true;
if (typeof document == 'undefined' || typeof window == 'undefined') return;
// Standards:
if (hidden in document)
document.addEventListener("visibilitychange", onchange);
else if ((hidden = "mozHidden") in document)
document.addEventListener("mozvisibilitychange", onchange);
else if ((hidden = "webkitHidden") in document)
document.addEventListener("webkitvisibilitychange", onchange);
else if ((hidden = "msHidden") in document)
document.addEventListener("msvisibilitychange", onchange);
// IE 9 and lower:
else if ('onfocusin' in document)
document.onfocusin = document.onfocusout = onchange;
// All others:
else
window.onpageshow = window.onpagehide = window.onfocus = window.onblur = onchange;
}
PageVisibility.prototype = Object.create(Observable.prototype);
Object.defineProperty(PageVisibility.prototype, 'constructor', {value: PageVisibility, enumerable: false});
PageVisibility.prototype.events = {
change: 'change'
};
PageVisibility.prototype.isVisible = function() {
return this.visible;
};
module.exports = {
Class: PageVisibility,
/**
* @param {Context} context
* @returns {PageVisibility}
*/
$get: function(context) {
return new PageVisibility();
}
};
});
| grokify/ringcentral-cti-demo-js | public/vendor/rcsdk/1.2.1/lib/core/PageVisibility.js | JavaScript | mit | 2,319 |
require File.join(File.dirname(File.expand_path(__FILE__)), "spec_helper")
describe "Model#values" do
before do
@c = Class.new(Sequel::Model(:items))
end
it "should return the hash of model values" do
hash = {:x=>1}
@c.load(hash).values.should equal(hash)
end
it "should be aliased as to_hash" do
hash = {:x=>1}
@c.load(hash).to_hash.should equal(hash)
end
end
describe "Model#get_column_value and set_column_value" do
before do
@c = Class.new(Sequel::Model(:items))
@c.columns :x
@o = @c.load(:x=>1)
end
it "should get and set column values" do
@o.get_column_value(:x).should == 1
@o.set_column_value(:x=, 2)
@o.get_column_value(:x).should == 2
@o.x.should == 2
end
end
describe "Model#save server use" do
before do
@db = Sequel.mock(:autoid=>proc{|sql| 10}, :fetch=>{:x=>1, :id=>10}, :servers=>{:blah=>{}, :read_only=>{}})
@c = Class.new(Sequel::Model(@db[:items]))
@c.columns :id, :x, :y
@c.dataset.columns(:id, :x, :y)
@db.sqls
end
it "should use the :default server if the model doesn't have one already specified" do
@c.new(:x=>1).save.should == @c.load(:x=>1, :id=>10)
@db.sqls.should == ["INSERT INTO items (x) VALUES (1)", 'SELECT * FROM items WHERE (id = 10) LIMIT 1']
end
it "should use the model's server if the model has one already specified" do
@c.dataset = @c.dataset.server(:blah)
@c.new(:x=>1).save.should == @c.load(:x=>1, :id=>10)
@db.sqls.should == ["INSERT INTO items (x) VALUES (1) -- blah", 'SELECT * FROM items WHERE (id = 10) LIMIT 1 -- blah']
end
end
describe "Model#save" do
before do
@c = Class.new(Sequel::Model(:items)) do
columns :id, :x, :y
end
@c.instance_dataset.autoid = @c.dataset.autoid = 13
DB.reset
end
it "should insert a record for a new model instance" do
o = @c.new(:x => 1)
o.save
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 13) LIMIT 1"]
end
it "should use dataset's insert_select method if present" do
ds = @c.instance_dataset
ds._fetch = {:y=>2}
def ds.supports_insert_select?() true end
def ds.insert_select(hash)
with_sql_first("INSERT INTO items (y) VALUES (2) RETURNING *")
end
o = @c.new(:x => 1)
o.save
o.values.should == {:y=>2}
DB.sqls.should == ["INSERT INTO items (y) VALUES (2) RETURNING *"]
end
it "should not use dataset's insert_select method if specific columns are selected" do
ds = @c.dataset = @c.dataset.select(:y)
ds.should_not_receive(:insert_select)
@c.new(:x => 1).save
end
it "should use dataset's insert_select method if the dataset uses returning, even if specific columns are selected" do
def (@c.dataset).supports_returning?(_) true end
ds = @c.dataset = @c.dataset.select(:y).returning(:y)
DB.reset
ds = @c.instance_dataset
ds._fetch = {:y=>2}
def ds.supports_insert_select?() true end
def ds.insert_select(hash)
with_sql_first("INSERT INTO items (y) VALUES (2) RETURNING y")
end
o = @c.new(:x => 1)
o.save
o.values.should == {:y=>2}
DB.sqls.should == ["INSERT INTO items (y) VALUES (2) RETURNING y"]
end
it "should use value returned by insert as the primary key and refresh the object" do
o = @c.new(:x => 11)
o.save
DB.sqls.should == ["INSERT INTO items (x) VALUES (11)",
"SELECT * FROM items WHERE (id = 13) LIMIT 1"]
end
it "should allow you to skip refreshing by overridding _save_refresh" do
@c.send(:define_method, :_save_refresh){}
@c.create(:x => 11)
DB.sqls.should == ["INSERT INTO items (x) VALUES (11)"]
end
it "should work correctly for inserting a record without a primary key" do
@c.no_primary_key
o = @c.new(:x => 11)
o.save
DB.sqls.should == ["INSERT INTO items (x) VALUES (11)"]
end
it "should set the autoincrementing_primary_key value to the value returned by insert" do
@c.unrestrict_primary_key
@c.set_primary_key [:x, :y]
o = @c.new(:x => 11)
def o.autoincrementing_primary_key() :y end
o.save
sqls = DB.sqls
sqls.length.should == 2
sqls.first.should == "INSERT INTO items (x) VALUES (11)"
sqls.last.should =~ %r{SELECT \* FROM items WHERE \(\([xy] = 1[13]\) AND \([xy] = 1[13]\)\) LIMIT 1}
end
it "should update a record for an existing model instance" do
o = @c.load(:id => 3, :x => 1)
o.save
DB.sqls.should == ["UPDATE items SET x = 1 WHERE (id = 3)"]
end
it "should raise a NoExistingObject exception if the dataset update call doesn't return 1, unless require_modification is false" do
o = @c.load(:id => 3, :x => 1)
t = o.this
t.numrows = 0
proc{o.save}.should raise_error(Sequel::NoExistingObject)
t.numrows = 2
proc{o.save}.should raise_error(Sequel::NoExistingObject)
t.numrows = 1
proc{o.save}.should_not raise_error
o.require_modification = false
t.numrows = 0
proc{o.save}.should_not raise_error
t.numrows = 2
proc{o.save}.should_not raise_error
end
it "should respect the :columns option to specify the columns to save" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.save(:columns=>:y)
DB.sqls.first.should == "UPDATE items SET y = NULL WHERE (id = 3)"
end
it "should mark saved columns as not changed" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o[:y] = 4
o.changed_columns.should == [:y]
o.save(:columns=>:x)
o.changed_columns.should == [:y]
o.save(:columns=>:y)
o.changed_columns.should == []
end
it "should mark all columns as not changed if this is a new record" do
o = @c.new(:x => 1, :y => nil)
o.x = 4
o.changed_columns.should == [:x]
o.save
o.changed_columns.should == []
end
it "should mark all columns as not changed if this is a new record and insert_select was used" do
def (@c.dataset).insert_select(h) h.merge(:id=>1) end
o = @c.new(:x => 1, :y => nil)
o.x = 4
o.changed_columns.should == [:x]
o.save
o.changed_columns.should == []
end
it "should store previous value of @new in @was_new and as well as the hash used for updating in @columns_updated until after hooks finish running" do
res = nil
@c.send(:define_method, :after_save){ res = [@columns_updated, @was_new]}
o = @c.new(:x => 1, :y => nil)
o[:x] = 2
o.save
res.should == [nil, true]
o.after_save
res.should == [nil, nil]
res = nil
o = @c.load(:id => 23,:x => 1, :y => nil)
o[:x] = 2
o.save
res.should == [{:x => 2, :y => nil}, nil]
o.after_save
res.should == [nil, nil]
res = nil
o = @c.load(:id => 23,:x => 2, :y => nil)
o[:x] = 2
o[:y] = 22
o.save(:columns=>:x)
res.should == [{:x=>2},nil]
o.after_save
res.should == [nil, nil]
end
it "should use Model's use_transactions setting by default" do
@c.use_transactions = true
@c.load(:id => 3, :x => 1, :y => nil).save(:columns=>:y)
DB.sqls.should == ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"]
@c.use_transactions = false
@c.load(:id => 3, :x => 1, :y => nil).save(:columns=>:y)
DB.sqls.should == ["UPDATE items SET y = NULL WHERE (id = 3)"]
end
it "should inherit Model's use_transactions setting" do
@c.use_transactions = true
Class.new(@c).load(:id => 3, :x => 1, :y => nil).save(:columns=>:y)
DB.sqls.should == ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"]
@c.use_transactions = false
Class.new(@c).load(:id => 3, :x => 1, :y => nil).save(:columns=>:y)
DB.sqls.should == ["UPDATE items SET y = NULL WHERE (id = 3)"]
end
it "should use object's use_transactions setting" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = false
@c.use_transactions = true
o.save(:columns=>:y)
DB.sqls.should == ["UPDATE items SET y = NULL WHERE (id = 3)"]
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
@c.use_transactions = false
o.save(:columns=>:y)
DB.sqls.should == ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"]
end
it "should use :transaction option if given" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.save(:columns=>:y, :transaction=>false)
DB.sqls.should == ["UPDATE items SET y = NULL WHERE (id = 3)"]
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = false
o.save(:columns=>:y, :transaction=>true)
DB.sqls.should == ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"]
end
it "should rollback if before_save returns false and raise_on_save_failure = true" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.raise_on_save_failure = true
def o.before_save
false
end
proc { o.save(:columns=>:y) }.should raise_error(Sequel::HookFailed)
DB.sqls.should == ["BEGIN", "ROLLBACK"]
end
it "should rollback if before_save calls cancel_action and raise_on_save_failure = true" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.raise_on_save_failure = true
def o.before_save
cancel_action
end
proc { o.save(:columns=>:y) }.should raise_error(Sequel::HookFailed)
DB.sqls.should == ["BEGIN", "ROLLBACK"]
end
it "should rollback if before_save returns false and :raise_on_failure option is true" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.raise_on_save_failure = false
def o.before_save
false
end
proc { o.save(:columns=>:y, :raise_on_failure => true) }.should raise_error(Sequel::HookFailed)
DB.sqls.should == ["BEGIN", "ROLLBACK"]
end
it "should not rollback outer transactions if before_save returns false and raise_on_save_failure = false" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.raise_on_save_failure = false
def o.before_save
false
end
DB.transaction do
o.save(:columns=>:y).should == nil
DB.run "BLAH"
end
DB.sqls.should == ["BEGIN", "BLAH", "COMMIT"]
end
it "should rollback if before_save returns false and raise_on_save_failure = false" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = true
o.raise_on_save_failure = false
def o.before_save
false
end
o.save(:columns=>:y).should == nil
DB.sqls.should == ["BEGIN", "ROLLBACK"]
end
it "should not rollback if before_save throws Rollback and use_transactions = false" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.use_transactions = false
def o.before_save
raise Sequel::Rollback
end
proc { o.save(:columns=>:y) }.should raise_error(Sequel::Rollback)
DB.sqls.should == []
end
it "should support a :server option to set the server/shard to use" do
db = Sequel.mock(:fetch=>{:id=>13, :x=>1}, :autoid=>proc{13}, :numrows=>1, :servers=>{:s1=>{}})
c = Class.new(Sequel::Model(db[:items]))
c.columns :id, :x
db.sqls
o = c.new(:x => 1)
o.save(:server=>:s1)
db.sqls.should == ["INSERT INTO items (x) VALUES (1) -- s1", "SELECT * FROM items WHERE (id = 13) LIMIT 1 -- s1"]
o.save(:server=>:s1, :transaction=>true)
db.sqls.should == ["BEGIN -- s1", "UPDATE items SET x = 1 WHERE (id = 13) -- s1", 'COMMIT -- s1']
end
end
describe "Model#set_server" do
before do
@db = Sequel.mock(:fetch=>{:id=>13, :x=>1}, :autoid=>proc{13}, :numrows=>1, :servers=>{:s1=>{}})
@c = Class.new(Sequel::Model(@db[:items])) do
columns :id, :x
end
@db.sqls
end
it "should set the server to use when inserting" do
@c.new(:x => 1).set_server(:s1).save
@db.sqls.should == ["INSERT INTO items (x) VALUES (1) -- s1", "SELECT * FROM items WHERE (id = 13) LIMIT 1 -- s1"]
end
it "should set the server to use when updating" do
@c.load(:id=>13, :x => 1).set_server(:s1).save
@db.sqls.should == ["UPDATE items SET x = 1 WHERE (id = 13) -- s1"]
end
it "should set the server to use for transactions when saving" do
@c.load(:id=>13, :x => 1).set_server(:s1).save(:transaction=>true)
@db.sqls.should == ["BEGIN -- s1", "UPDATE items SET x = 1 WHERE (id = 13) -- s1", 'COMMIT -- s1']
end
it "should set the server to use when deleting" do
@c.load(:id=>13).set_server(:s1).delete
@db.sqls.should == ["DELETE FROM items WHERE (id = 13) -- s1"]
end
it "should set the server to use when deleting when using optimized delete" do
@c.set_primary_key :id
@c.load(:id=>13).set_server(:s1).delete
@db.sqls.should == ["DELETE FROM items WHERE id = 13 -- s1"]
end
it "should set the server to use for transactions when destroying" do
o = @c.load(:id=>13).set_server(:s1)
o.use_transactions = true
o.destroy
@db.sqls.should == ["BEGIN -- s1", "DELETE FROM items WHERE (id = 13) -- s1", 'COMMIT -- s1']
end
it "should set the server on this if this is already loaded" do
o = @c.load(:id=>13, :x => 1)
o.this
o.set_server(:s1)
o.this.opts[:server].should == :s1
end
it "should set the server on this if this is not already loaded" do
@c.load(:id=>13, :x => 1).set_server(:s1).this.opts[:server].should == :s1
end
end
describe "Model#freeze" do
before do
class ::Album < Sequel::Model
columns :id
class B < Sequel::Model
columns :id, :album_id
end
end
@o = Album.load(:id=>1).freeze
DB.sqls
end
after do
Object.send(:remove_const, :Album)
end
it "should freeze the object" do
@o.frozen?.should == true
end
it "should freeze the object if the model doesn't have a primary key" do
Album.no_primary_key
@o = Album.load(:id=>1).freeze
@o.frozen?.should == true
end
it "should freeze the object's values, associations, changed_columns, errors, and this" do
@o.values.frozen?.should == true
@o.changed_columns.frozen?.should == true
@o.errors.frozen?.should == true
@o.this.frozen?.should == true
end
it "should still have working class attr overriddable methods" do
Sequel::Model::BOOLEAN_SETTINGS.each{|m| @o.send(m) == Album.send(m)}
end
it "should have working new? method" do
@o.new?.should == false
Album.new.freeze.new?.should == true
end
it "should have working valid? method" do
@o.valid?.should == true
o = Album.new
def o.validate() errors.add(:foo, '') end
o.freeze
o.valid?.should == false
end
it "should raise an Error if trying to save/destroy/delete/refresh" do
proc{@o.save}.should raise_error(Sequel::Error)
proc{@o.destroy}.should raise_error(Sequel::Error)
proc{@o.delete}.should raise_error(Sequel::Error)
proc{@o.refresh}.should raise_error(Sequel::Error)
@o.db.sqls.should == []
end
end
describe "Model#dup" do
before do
@Album = Class.new(Sequel::Model(:albums))
@o = @Album.load(:id=>1)
DB.sqls
end
it "should be equal to existing object" do
@o.dup.should == @o
@o.dup.values.should == @o.values
@o.dup.changed_columns.should == @o.changed_columns
@o.dup.errors.should == @o.errors
@o.dup.this.should == @o.this
end
it "should not use identical structures" do
@o.dup.should_not equal(@o)
@o.dup.values.should_not equal(@o.values)
@o.dup.changed_columns.should_not equal(@o.changed_columns)
@o.dup.errors.should_not equal(@o.errors)
@o.dup.this.should_not equal(@o.this)
end
it "should keep new status" do
@o.dup.new?.should == false
@Album.new.dup.new?.should == true
end
it "should not copy frozen status" do
@o.freeze.dup.should_not be_frozen
@o.freeze.dup.values.should_not be_frozen
@o.freeze.dup.changed_columns.should_not be_frozen
@o.freeze.dup.errors.should_not be_frozen
@o.freeze.dup.this.should_not be_frozen
end
end
describe "Model#clone" do
before do
@Album = Class.new(Sequel::Model(:albums))
@o = @Album.load(:id=>1)
DB.sqls
end
it "should be equal to existing object" do
@o.clone.should == @o
@o.clone.values.should == @o.values
@o.clone.changed_columns.should == @o.changed_columns
@o.clone.errors.should == @o.errors
@o.clone.this.should == @o.this
end
it "should not use identical structures" do
@o.clone.should_not equal(@o)
@o.clone.values.should_not equal(@o.values)
@o.clone.changed_columns.should_not equal(@o.changed_columns)
@o.clone.errors.should_not equal(@o.errors)
@o.clone.this.should_not equal(@o.this)
end
it "should keep new status" do
@o.clone.new?.should == false
@Album.new.clone.new?.should == true
end
it "should copy frozen status" do
@o.freeze.clone.should be_frozen
@o.freeze.clone.values.should be_frozen
@o.freeze.clone.changed_columns.should be_frozen
@o.freeze.clone.errors.should be_frozen
@o.freeze.clone.this.should be_frozen
end
end
describe "Model#marshallable" do
before do
class ::Album < Sequel::Model
columns :id, :x
end
end
after do
Object.send(:remove_const, :Album)
end
it "should make an object marshallable" do
i = Album.new(:x=>2)
s = nil
i2 = nil
i.marshallable!
proc{s = Marshal.dump(i)}.should_not raise_error
proc{i2 = Marshal.load(s)}.should_not raise_error
i2.should == i
i.save
i.marshallable!
proc{s = Marshal.dump(i)}.should_not raise_error
proc{i2 = Marshal.load(s)}.should_not raise_error
i2.should == i
i.save
i.marshallable!
proc{s = Marshal.dump(i)}.should_not raise_error
proc{i2 = Marshal.load(s)}.should_not raise_error
i2.should == i
end
end
describe "Model#modified?" do
before do
@c = Class.new(Sequel::Model(:items))
@c.class_eval do
columns :id, :x
@db_schema = {:x => {:type => :integer}}
end
DB.reset
end
it "should be true if the object is new" do
@c.new.modified?.should == true
end
it "should be false if the object has not been modified" do
@c.load(:id=>1).modified?.should == false
end
it "should be true if the object has been modified" do
o = @c.load(:id=>1, :x=>2)
o.x = 3
o.modified?.should == true
end
it "should be true if the object is marked modified!" do
o = @c.load(:id=>1, :x=>2)
o.modified!
o.modified?.should == true
end
it "should be false if the object is marked modified! after saving until modified! again" do
o = @c.load(:id=>1, :x=>2)
o.modified!
o.save
o.modified?.should == false
o.modified!
o.modified?.should == true
end
it "should be false if a column value is set that is the same as the current value after typecasting" do
o = @c.load(:id=>1, :x=>2)
o.x = '2'
o.modified?.should == false
end
it "should be true if a column value is set that is the different as the current value after typecasting" do
o = @c.load(:id=>1, :x=>'2')
o.x = '2'
o.modified?.should == true
end
it "should be true if given a column argument and the column has been changed" do
o = @c.new
o.modified?(:id).should == false
o.id = 1
o.modified?(:id).should == true
end
end
describe "Model#modified!" do
before do
@c = Class.new(Sequel::Model(:items))
@c.class_eval do
columns :id, :x
end
DB.reset
end
it "should mark the object as modified so that save_changes still runs the callbacks" do
o = @c.load(:id=>1, :x=>2)
def o.after_save
values[:x] = 3
end
o.update({})
o.x.should == 2
o.modified!
o.update({})
o.x.should == 3
o.db.sqls.should == []
end
it "should mark given column argument as modified" do
o = @c.load(:id=>1, :x=>2)
o.modified!(:x)
o.changed_columns.should == [:x]
o.save
o.db.sqls.should == ["UPDATE items SET x = 2 WHERE (id = 1)"]
end
end
describe "Model#save_changes" do
before do
@c = Class.new(Sequel::Model(:items)) do
unrestrict_primary_key
columns :id, :x, :y
end
DB.reset
end
it "should always save if the object is new" do
o = @c.new(:x => 1)
o.save_changes
DB.sqls.first.should == "INSERT INTO items (x) VALUES (1)"
end
it "should take options passed to save" do
o = @c.new(:x => 1)
def o.before_validation; false; end
proc{o.save_changes}.should raise_error(Sequel::Error)
DB.sqls.should == []
o.save_changes(:validate=>false)
DB.sqls.first.should == "INSERT INTO items (x) VALUES (1)"
end
it "should do nothing if no changed columns" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.save_changes
DB.sqls.should == []
end
it "should do nothing if modified? is false" do
o = @c.load(:id => 3, :x => 1, :y => nil)
def o.modified?; false; end
o.save_changes
DB.sqls.should == []
end
it "should update only changed columns" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.x = 2
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 2 WHERE (id = 3)"]
o.save_changes
o.save_changes
DB.sqls.should == []
o.y = 4
o.save_changes
DB.sqls.should == ["UPDATE items SET y = 4 WHERE (id = 3)"]
o.save_changes
o.save_changes
DB.sqls.should == []
end
it "should not consider columns changed if the values did not change" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.x = 1
o.save_changes
DB.sqls.should == []
o.x = 3
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 3 WHERE (id = 3)"]
o[:y] = nil
o.save_changes
DB.sqls.should == []
o[:y] = 4
o.save_changes
DB.sqls.should == ["UPDATE items SET y = 4 WHERE (id = 3)"]
end
it "should clear changed_columns" do
o = @c.load(:id => 3, :x => 1, :y => nil)
o.x = 4
o.changed_columns.should == [:x]
o.save_changes
o.changed_columns.should == []
end
it "should update columns changed in a before_update hook" do
o = @c.load(:id => 3, :x => 1, :y => nil)
@c.send(:define_method, :before_update){self.x += 1}
o.save_changes
DB.sqls.should == []
o.x = 2
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 3 WHERE (id = 3)"]
o.save_changes
DB.sqls.should == []
o.x = 4
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 5 WHERE (id = 3)"]
end
it "should update columns changed in a before_save hook" do
o = @c.load(:id => 3, :x => 1, :y => nil)
@c.send(:define_method, :before_update){self.x += 1}
o.save_changes
DB.sqls.should == []
o.x = 2
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 3 WHERE (id = 3)"]
o.save_changes
DB.sqls.should == []
o.x = 4
o.save_changes
DB.sqls.should == ["UPDATE items SET x = 5 WHERE (id = 3)"]
end
end
describe "Model#new?" do
before do
@c = Class.new(Sequel::Model(:items)) do
unrestrict_primary_key
columns :x
end
DB.reset
end
it "should be true for a new instance" do
n = @c.new(:x => 1)
n.should be_new
end
it "should be false after saving" do
n = @c.new(:x => 1)
n.save
n.should_not be_new
end
end
describe Sequel::Model, "with a primary key" do
it "should default to :id" do
model_a = Class.new Sequel::Model
model_a.primary_key.should == :id
end
it "should be changed through 'set_primary_key'" do
model_a = Class.new(Sequel::Model){ set_primary_key :a }
model_a.primary_key.should == :a
end
it "should accept single argument composite keys" do
model_a = Class.new(Sequel::Model){ set_primary_key [:a, :b] }
model_a.primary_key.should == [:a, :b]
end
end
describe Sequel::Model, "without a primary key" do
it "should return nil for primary key" do
Class.new(Sequel::Model){no_primary_key}.primary_key.should be_nil
end
it "should raise a Sequel::Error on 'this'" do
instance = Class.new(Sequel::Model){no_primary_key}.new
proc{instance.this}.should raise_error(Sequel::Error)
end
end
describe Sequel::Model, "#this" do
before do
@example = Class.new(Sequel::Model(:examples))
@example.columns :id, :a, :x, :y
end
it "should return a dataset identifying the record" do
instance = @example.load(:id => 3)
instance.this.sql.should == "SELECT * FROM examples WHERE (id = 3) LIMIT 1"
end
it "should support arbitary primary keys" do
@example.set_primary_key :a
instance = @example.load(:a => 3)
instance.this.sql.should == "SELECT * FROM examples WHERE (a = 3) LIMIT 1"
end
it "should use a qualified primary key if the dataset is joined" do
@example.dataset = @example.dataset.cross_join(:a)
instance = @example.load(:id => 3)
instance.this.sql.should == "SELECT * FROM examples CROSS JOIN a WHERE (examples.id = 3) LIMIT 1"
end
it "should support composite primary keys" do
@example.set_primary_key [:x, :y]
instance = @example.load(:x => 4, :y => 5)
instance.this.sql.should =~ /SELECT \* FROM examples WHERE \(\([xy] = [45]\) AND \([xy] = [45]\)\) LIMIT 1/
end
end
describe "Model#pk" do
before do
@m = Class.new(Sequel::Model)
@m.columns :id, :x, :y
end
it "should by default return the value of the :id column" do
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk.should == 111
end
it "should return the primary key value for custom primary key" do
@m.set_primary_key :x
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk.should == 2
end
it "should return the primary key value for composite primary key" do
@m.set_primary_key [:y, :x]
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk.should == [3, 2]
end
it "should raise if no primary key" do
@m.set_primary_key nil
m = @m.new(:id => 111, :x => 2, :y => 3)
proc {m.pk}.should raise_error(Sequel::Error)
@m.no_primary_key
m = @m.new(:id => 111, :x => 2, :y => 3)
proc {m.pk}.should raise_error(Sequel::Error)
end
end
describe "Model#pk_hash" do
before do
@m = Class.new(Sequel::Model)
@m.columns :id, :x, :y
end
it "should by default return a hash with the value of the :id column" do
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk_hash.should == {:id => 111}
end
it "should return a hash with the primary key value for custom primary key" do
@m.set_primary_key :x
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk_hash.should == {:x => 2}
end
it "should return a hash with the primary key values for composite primary key" do
@m.set_primary_key [:y, :x]
m = @m.load(:id => 111, :x => 2, :y => 3)
m.pk_hash.should == {:y => 3, :x => 2}
end
it "should raise if no primary key" do
@m.set_primary_key nil
m = @m.new(:id => 111, :x => 2, :y => 3)
proc{m.pk_hash}.should raise_error(Sequel::Error)
@m.no_primary_key
m = @m.new(:id => 111, :x => 2, :y => 3)
proc{m.pk_hash}.should raise_error(Sequel::Error)
end
end
describe "Model#qualified_pk_hash" do
before do
@m = Class.new(Sequel::Model(:items))
@m.columns :id, :x, :y
end
it "should by default return a hash with the value of the :id column" do
m = @m.load(:id => 111, :x => 2, :y => 3)
m.qualified_pk_hash.should == {Sequel.qualify(:items, :id) => 111}
end
it "should accept a custom qualifier" do
m = @m.load(:id => 111, :x => 2, :y => 3)
m.qualified_pk_hash(:foo).should == {Sequel.qualify(:foo, :id) => 111}
end
it "should return a hash with the primary key value for custom primary key" do
@m.set_primary_key :x
m = @m.load(:id => 111, :x => 2, :y => 3)
m.qualified_pk_hash.should == {Sequel.qualify(:items, :x) => 2}
end
it "should return a hash with the primary key values for composite primary key" do
@m.set_primary_key [:y, :x]
m = @m.load(:id => 111, :x => 2, :y => 3)
m.qualified_pk_hash.should == {Sequel.qualify(:items, :y) => 3, Sequel.qualify(:items, :x) => 2}
end
it "should raise if no primary key" do
@m.set_primary_key nil
m = @m.new(:id => 111, :x => 2, :y => 3)
proc{m.qualified_pk_hash}.should raise_error(Sequel::Error)
@m.no_primary_key
m = @m.new(:id => 111, :x => 2, :y => 3)
proc{m.qualified_pk_hash}.should raise_error(Sequel::Error)
end
end
describe Sequel::Model, "#set" do
before do
@c = Class.new(Sequel::Model(:items)) do
set_primary_key :id
columns :x, :y, :id
end
@c.strict_param_setting = false
@o1 = @c.new
@o2 = @c.load(:id => 5)
DB.reset
end
it "should filter the given params using the model columns" do
@o1.set(:x => 1, :z => 2)
@o1.values.should == {:x => 1}
DB.sqls.should == []
@o2.set(:y => 1, :abc => 2)
@o2.values.should == {:y => 1, :id=> 5}
DB.sqls.should == []
end
it "should work with both strings and symbols" do
@o1.set('x'=> 1, 'z'=> 2)
@o1.values.should == {:x => 1}
DB.sqls.should == []
@o2.set('y'=> 1, 'abc'=> 2)
@o2.values.should == {:y => 1, :id=> 5}
DB.sqls.should == []
end
it "should support virtual attributes" do
@c.send(:define_method, :blah=){|v| self.x = v}
@o1.set(:blah => 333)
@o1.values.should == {:x => 333}
DB.sqls.should == []
@o1.set('blah'=> 334)
@o1.values.should == {:x => 334}
DB.sqls.should == []
end
it "should not modify the primary key" do
@o1.set(:x => 1, :id => 2)
@o1.values.should == {:x => 1}
DB.sqls.should == []
@o2.set('y'=> 1, 'id'=> 2)
@o2.values.should == {:y => 1, :id=> 5}
DB.sqls.should == []
end
it "should return self" do
returned_value = @o1.set(:x => 1, :z => 2)
returned_value.should == @o1
DB.sqls.should == []
end
it "should raise error if strict_param_setting is true and method does not exist" do
@o1.strict_param_setting = true
proc{@o1.set('foo' => 1)}.should raise_error(Sequel::Error)
end
it "should raise error if strict_param_setting is true and column is a primary key" do
@o1.strict_param_setting = true
proc{@o1.set('id' => 1)}.should raise_error(Sequel::Error)
end
it "should raise error if strict_param_setting is true and column is restricted" do
@o1.strict_param_setting = true
@c.set_allowed_columns
proc{@o1.set('x' => 1)}.should raise_error(Sequel::Error)
end
it "should not create a symbol if strict_param_setting is true and string is given" do
@o1.strict_param_setting = true
l = Symbol.all_symbols.length
proc{@o1.set('sadojafdso' => 1)}.should raise_error(Sequel::Error)
Symbol.all_symbols.length.should == l
end
it "#set should correctly handle cases where an instance method is added to the class" do
@o1.set(:x => 1)
@o1.values.should == {:x => 1}
@c.class_eval do
def z=(v)
self[:z] = v
end
end
@o1.set(:x => 2, :z => 3)
@o1.values.should == {:x => 2, :z=>3}
end
it "#set should correctly handle cases where a singleton method is added to the object" do
@o1.set(:x => 1)
@o1.values.should == {:x => 1}
def @o1.z=(v)
self[:z] = v
end
@o1.set(:x => 2, :z => 3)
@o1.values.should == {:x => 2, :z=>3}
end
it "#set should correctly handle cases where a module with a setter method is included in the class" do
@o1.set(:x => 1)
@o1.values.should == {:x => 1}
@c.send(:include, Module.new do
def z=(v)
self[:z] = v
end
end)
@o1.set(:x => 2, :z => 3)
@o1.values.should == {:x => 2, :z=>3}
end
it "#set should correctly handle cases where the object extends a module with a setter method " do
@o1.set(:x => 1)
@o1.values.should == {:x => 1}
@o1.extend(Module.new do
def z=(v)
self[:z] = v
end
end)
@o1.set(:x => 2, :z => 3)
@o1.values.should == {:x => 2, :z=>3}
end
end
describe Sequel::Model, "#update" do
before do
@c = Class.new(Sequel::Model(:items)) do
set_primary_key :id
columns :x, :y, :id
end
@c.strict_param_setting = false
@o1 = @c.new
@o2 = @c.load(:id => 5)
DB.reset
end
it "should filter the given params using the model columns" do
@o1.update(:x => 1, :z => 2)
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
DB.reset
@o2.update(:y => 1, :abc => 2)
DB.sqls.should == ["UPDATE items SET y = 1 WHERE (id = 5)"]
end
it "should support virtual attributes" do
@c.send(:define_method, :blah=){|v| self.x = v}
@o1.update(:blah => 333)
DB.sqls.should == ["INSERT INTO items (x) VALUES (333)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
it "should not modify the primary key" do
@o1.update(:x => 1, :id => 2)
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
DB.reset
@o2.update('y'=> 1, 'id'=> 2)
@o2.values.should == {:y => 1, :id=> 5}
DB.sqls.should == ["UPDATE items SET y = 1 WHERE (id = 5)"]
end
end
describe Sequel::Model, "#set_fields" do
before do
@c = Class.new(Sequel::Model(:items)) do
set_primary_key :id
columns :x, :y, :z, :id
end
@o1 = @c.new
DB.reset
end
it "should set only the given fields" do
@o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y])
@o1.values.should == {:x => 1, :y => 2}
@o1.set_fields({:x => 9, :y => 8, :z=>6, :id=>7}, [:x, :y, :id])
@o1.values.should == {:x => 9, :y => 8, :id=>7}
DB.sqls.should == []
end
it "should lookup into the hash without checking if the entry exists" do
@o1.set_fields({:x => 1}, [:x, :y])
@o1.values.should == {:x => 1, :y => nil}
@o1.set_fields(Hash.new(2), [:x, :y])
@o1.values.should == {:x => 2, :y => 2}
end
it "should skip missing fields if :missing=>:skip option is used" do
@o1.set_fields({:x => 3}, [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 3}
@o1.set_fields({"x" => 4}, [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 4}
@o1.set_fields(Hash.new(2).merge(:x=>2), [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 2}
@o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 1, :y => 2}
end
it "should raise for missing fields if :missing=>:raise option is used" do
proc{@o1.set_fields({:x => 1}, [:x, :y], :missing=>:raise)}.should raise_error(Sequel::Error)
proc{@o1.set_fields(Hash.new(2).merge(:x=>2), [:x, :y], :missing=>:raise)}.should raise_error(Sequel::Error)
proc{@o1.set_fields({"x" => 1}, [:x, :y], :missing=>:raise)}.should raise_error(Sequel::Error)
@o1.set_fields({:x => 5, "y"=>2}, [:x, :y], :missing=>:raise)
@o1.values.should == {:x => 5, :y => 2}
@o1.set_fields({:x => 1, :y => 3, :z=>3, :id=>4}, [:x, :y], :missing=>:raise)
@o1.values.should == {:x => 1, :y => 3}
end
it "should use default behavior for an unrecognized :missing option" do
@o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y], :missing=>:foo)
@o1.values.should == {:x => 1, :y => 2}
@o1.set_fields({:x => 9, :y => 8, :z=>6, :id=>7}, [:x, :y, :id], :missing=>:foo)
@o1.values.should == {:x => 9, :y => 8, :id=>7}
DB.sqls.should == []
end
it "should respect model's default_set_fields_options" do
@c.default_set_fields_options = {:missing=>:skip}
@o1.set_fields({:x => 3}, [:x, :y])
@o1.values.should == {:x => 3}
@o1.set_fields({:x => 4}, [:x, :y], {})
@o1.values.should == {:x => 4}
proc{@o1.set_fields({:x => 3}, [:x, :y], :missing=>:raise)}.should raise_error(Sequel::Error)
@c.default_set_fields_options = {:missing=>:raise}
proc{@o1.set_fields({:x => 3}, [:x, :y])}.should raise_error(Sequel::Error)
proc{@o1.set_fields({:x => 3}, [:x, :y], {})}.should raise_error(Sequel::Error)
@o1.set_fields({:x => 5}, [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 5}
@o1.set_fields({:x => 5}, [:x, :y], :missing=>nil)
@o1.values.should == {:x => 5, :y=>nil}
DB.sqls.should == []
end
it "should respect model's default_set_fields_options in a subclass" do
@c.default_set_fields_options = {:missing=>:skip}
o = Class.new(@c).new
o.set_fields({:x => 3}, [:x, :y])
o.values.should == {:x => 3}
end
it "should respect set_column_value" do
@c.class_eval do
def set_column_value(c, v)
if c.to_s == 'model='
self[:model] = v
else
send(c, v)
end
end
end
@o1.set_fields({:model=>2, :x=>3}, [:model, :x])
@o1[:model].should == 2
@o1.x.should == 3
end
end
describe Sequel::Model, "#update_fields" do
before do
@c = Class.new(Sequel::Model(:items)) do
set_primary_key :id
columns :x, :y, :z, :id
end
@c.strict_param_setting = true
@o1 = @c.load(:id=>1)
DB.reset
end
it "should set only the given fields, and then save the changes to the record" do
@o1.update_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y])
@o1.values.should == {:x => 1, :y => 2, :id=>1}
sqls = DB.sqls
sqls.pop.should =~ /UPDATE items SET [xy] = [12], [xy] = [12] WHERE \(id = 1\)/
sqls.should == []
@o1.update_fields({:x => 1, :y => 5, :z=>6, :id=>7}, [:x, :y])
@o1.values.should == {:x => 1, :y => 5, :id=>1}
DB.sqls.should == ["UPDATE items SET y = 5 WHERE (id = 1)"]
end
it "should support :missing=>:skip option" do
@o1.update_fields({:x => 1, :z=>3, :id=>4}, [:x, :y], :missing=>:skip)
@o1.values.should == {:x => 1, :id=>1}
DB.sqls.should == ["UPDATE items SET x = 1 WHERE (id = 1)"]
end
it "should support :missing=>:raise option" do
proc{@o1.update_fields({:x => 1}, [:x, :y], :missing=>:raise)}.should raise_error(Sequel::Error)
end
it "should respect model's default_set_fields_options" do
@c.default_set_fields_options = {:missing=>:skip}
@o1.update_fields({:x => 3}, [:x, :y])
@o1.values.should == {:x => 3, :id=>1}
DB.sqls.should == ["UPDATE items SET x = 3 WHERE (id = 1)"]
@c.default_set_fields_options = {:missing=>:raise}
proc{@o1.update_fields({:x => 3}, [:x, :y])}.should raise_error(Sequel::Error)
DB.sqls.should == []
end
end
describe Sequel::Model, "#(set|update)_(all|only)" do
before do
@c = Class.new(Sequel::Model(:items)) do
set_primary_key :id
columns :x, :y, :z, :id
set_allowed_columns :x
end
@c.strict_param_setting = false
@o1 = @c.new
DB.reset
end
it "should raise errors if not all hash fields can be set and strict_param_setting is true" do
@c.strict_param_setting = true
proc{@c.new.set_all(:x => 1, :y => 2, :z=>3, :use_after_commit_rollback => false)}.should raise_error(Sequel::Error)
(o = @c.new).set_all(:x => 1, :y => 2, :z=>3)
o.values.should == {:x => 1, :y => 2, :z=>3}
proc{@c.new.set_only({:x => 1, :y => 2, :z=>3, :id=>4}, :x, :y)}.should raise_error(Sequel::Error)
proc{@c.new.set_only({:x => 1, :y => 2, :z=>3}, :x, :y)}.should raise_error(Sequel::Error)
(o = @c.new).set_only({:x => 1, :y => 2}, :x, :y)
o.values.should == {:x => 1, :y => 2}
end
it "#set_all should set all attributes including the primary key" do
@o1.set_all(:x => 1, :y => 2, :z=>3, :id=>4)
@o1.values.should == {:id =>4, :x => 1, :y => 2, :z=>3}
end
it "#set_all should set not set restricted fields" do
@o1.set_all(:x => 1, :use_after_commit_rollback => false)
@o1.use_after_commit_rollback.should == true
@o1.values.should == {:x => 1}
end
it "#set_only should only set given attributes" do
@o1.set_only({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y])
@o1.values.should == {:x => 1, :y => 2}
@o1.set_only({:x => 4, :y => 5, :z=>6, :id=>7}, :x, :y)
@o1.values.should == {:x => 4, :y => 5}
@o1.set_only({:x => 9, :y => 8, :z=>6, :id=>7}, :x, :y, :id)
@o1.values.should == {:x => 9, :y => 8, :id=>7}
end
it "#update_all should update all attributes" do
@c.new.update_all(:x => 1)
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
@c.new.update_all(:y => 1)
DB.sqls.should == ["INSERT INTO items (y) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
@c.new.update_all(:z => 1)
DB.sqls.should == ["INSERT INTO items (z) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
it "#update_only should only update given attributes" do
@o1.update_only({:x => 1, :y => 2, :z=>3, :id=>4}, [:x])
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
@c.new.update_only({:x => 1, :y => 2, :z=>3, :id=>4}, :x)
DB.sqls.should == ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
end
describe Sequel::Model, "#destroy with filtered dataset" do
before do
@model = Class.new(Sequel::Model(DB[:items].where(:a=>1)))
@model.columns :id, :a
@instance = @model.load(:id => 1234)
DB.reset
end
it "should raise a NoExistingObject exception if the dataset delete call doesn't return 1" do
def (@instance.this).execute_dui(*a) 0 end
proc{@instance.delete}.should raise_error(Sequel::NoExistingObject)
def (@instance.this).execute_dui(*a) 2 end
proc{@instance.delete}.should raise_error(Sequel::NoExistingObject)
def (@instance.this).execute_dui(*a) 1 end
proc{@instance.delete}.should_not raise_error
@instance.require_modification = false
def (@instance.this).execute_dui(*a) 0 end
proc{@instance.delete}.should_not raise_error
def (@instance.this).execute_dui(*a) 2 end
proc{@instance.delete}.should_not raise_error
end
it "should include WHERE clause when deleting" do
@instance.destroy
DB.sqls.should == ["DELETE FROM items WHERE ((a = 1) AND (id = 1234))"]
end
end
describe Sequel::Model, "#destroy" do
before do
@model = Class.new(Sequel::Model(:items))
@model.columns :id
@instance = @model.load(:id => 1234)
DB.reset
end
it "should return self" do
@model.send(:define_method, :after_destroy){3}
@instance.destroy.should == @instance
end
it "should raise a NoExistingObject exception if the dataset delete call doesn't return 1" do
def (@model.dataset).execute_dui(*a) 0 end
proc{@instance.delete}.should raise_error(Sequel::NoExistingObject)
def (@model.dataset).execute_dui(*a) 2 end
proc{@instance.delete}.should raise_error(Sequel::NoExistingObject)
def (@model.dataset).execute_dui(*a) 1 end
proc{@instance.delete}.should_not raise_error
@instance.require_modification = false
def (@model.dataset).execute_dui(*a) 0 end
proc{@instance.delete}.should_not raise_error
def (@model.dataset).execute_dui(*a) 2 end
proc{@instance.delete}.should_not raise_error
end
it "should run within a transaction if use_transactions is true" do
@instance.use_transactions = true
@instance.destroy
DB.sqls.should == ["BEGIN", "DELETE FROM items WHERE id = 1234", "COMMIT"]
end
it "should not run within a transaction if use_transactions is false" do
@instance.use_transactions = false
@instance.destroy
DB.sqls.should == ["DELETE FROM items WHERE id = 1234"]
end
it "should run within a transaction if :transaction option is true" do
@instance.use_transactions = false
@instance.destroy(:transaction => true)
DB.sqls.should == ["BEGIN", "DELETE FROM items WHERE id = 1234", "COMMIT"]
end
it "should not run within a transaction if :transaction option is false" do
@instance.use_transactions = true
@instance.destroy(:transaction => false)
DB.sqls.should == ["DELETE FROM items WHERE id = 1234"]
end
it "should run before_destroy and after_destroy hooks" do
@model.send(:define_method, :before_destroy){DB.execute('before blah')}
@model.send(:define_method, :after_destroy){DB.execute('after blah')}
@instance.destroy
DB.sqls.should == ["before blah", "DELETE FROM items WHERE id = 1234", "after blah"]
end
end
describe Sequel::Model, "#exists?" do
before do
@model = Class.new(Sequel::Model(:items))
@model.instance_dataset._fetch = @model.dataset._fetch = proc{|sql| {:x=>1} if sql =~ /id = 1/}
DB.reset
end
it "should do a query to check if the record exists" do
@model.load(:id=>1).exists?.should == true
DB.sqls.should == ['SELECT 1 AS one FROM items WHERE (id = 1) LIMIT 1']
end
it "should return false when #this.count == 0" do
@model.load(:id=>2).exists?.should == false
DB.sqls.should == ['SELECT 1 AS one FROM items WHERE (id = 2) LIMIT 1']
end
it "should return false without issuing a query if the model object is new" do
@model.new.exists?.should == false
DB.sqls.should == []
end
end
describe Sequel::Model, "#each" do
before do
@model = Class.new(Sequel::Model(:items))
@model.columns :a, :b, :id
@m = @model.load(:a => 1, :b => 2, :id => 4444)
end
specify "should iterate over the values" do
h = {}
@m.each{|k, v| h[k] = v}
h.should == {:a => 1, :b => 2, :id => 4444}
end
end
describe Sequel::Model, "#keys" do
before do
@model = Class.new(Sequel::Model(:items))
@model.columns :a, :b, :id
@m = @model.load(:a => 1, :b => 2, :id => 4444)
end
specify "should return the value keys" do
@m.keys.sort_by{|k| k.to_s}.should == [:a, :b, :id]
@model.new.keys.should == []
end
end
describe Sequel::Model, "#==" do
specify "should compare instances by values" do
z = Class.new(Sequel::Model)
z.columns :id, :x
a = z.load(:id => 1, :x => 3)
b = z.load(:id => 1, :x => 4)
c = z.load(:id => 1, :x => 3)
a.should_not == b
a.should == c
b.should_not == c
end
specify "should be aliased to #eql?" do
z = Class.new(Sequel::Model)
z.columns :id, :x
a = z.load(:id => 1, :x => 3)
b = z.load(:id => 1, :x => 4)
c = z.load(:id => 1, :x => 3)
a.eql?(b).should == false
a.eql?(c).should == true
b.eql?(c).should == false
end
end
describe Sequel::Model, "#===" do
specify "should compare instances by class and pk if pk is not nil" do
z = Class.new(Sequel::Model)
z.columns :id, :x
y = Class.new(Sequel::Model)
y.columns :id, :x
a = z.load(:id => 1, :x => 3)
b = z.load(:id => 1, :x => 4)
c = z.load(:id => 2, :x => 3)
d = y.load(:id => 1, :x => 3)
a.should === b
a.should_not === c
a.should_not === d
end
specify "should always be false if the primary key is nil" do
z = Class.new(Sequel::Model)
z.columns :id, :x
y = Class.new(Sequel::Model)
y.columns :id, :x
a = z.new(:x => 3)
b = z.new(:x => 4)
c = z.new(:x => 3)
d = y.new(:x => 3)
a.should_not === b
a.should_not === c
a.should_not === d
end
end
describe Sequel::Model, "#hash" do
specify "should be the same only for objects with the same class and pk if the pk is not nil" do
z = Class.new(Sequel::Model)
z.columns :id, :x
y = Class.new(Sequel::Model)
y.columns :id, :x
a = z.load(:id => 1, :x => 3)
a.hash.should == z.load(:id => 1, :x => 4).hash
a.hash.should_not == z.load(:id => 2, :x => 3).hash
a.hash.should_not == y.load(:id => 1, :x => 3).hash
end
specify "should be the same only for objects with the same class and values if the pk is nil" do
z = Class.new(Sequel::Model)
z.columns :id, :x
y = Class.new(Sequel::Model)
y.columns :id, :x
a = z.new(:x => 3)
a.hash.should_not == z.new(:x => 4).hash
a.hash.should == z.new(:x => 3).hash
a.hash.should_not == y.new(:x => 3).hash
end
specify "should be the same only for objects with the same class and pk if pk is composite and all values are non-NULL" do
z = Class.new(Sequel::Model)
z.columns :id, :id2, :x
z.set_primary_key([:id, :id2])
y = Class.new(Sequel::Model)
y.columns :id, :id2, :x
y.set_primary_key([:id, :id2])
a = z.load(:id => 1, :id2=>2, :x => 3)
a.hash.should == z.load(:id => 1, :id2=>2, :x => 4).hash
a.hash.should_not == z.load(:id => 2, :id2=>1, :x => 3).hash
a.hash.should_not == y.load(:id => 1, :id2=>1, :x => 3).hash
end
specify "should be the same only for objects with the same class and value if pk is composite and one values is NULL" do
z = Class.new(Sequel::Model)
z.columns :id, :id2, :x
z.set_primary_key([:id, :id2])
y = Class.new(Sequel::Model)
y.columns :id, :id2, :x
y.set_primary_key([:id, :id2])
a = z.load(:id => 1, :id2 => nil, :x => 3)
a.hash.should == z.load(:id => 1, :id2=>nil, :x => 3).hash
a.hash.should_not == z.load(:id => 1, :id2=>nil, :x => 4).hash
a.hash.should_not == y.load(:id => 1, :id2=>nil, :x => 3).hash
a = z.load(:id =>nil, :id2 => nil, :x => 3)
a.hash.should == z.load(:id => nil, :id2=>nil, :x => 3).hash
a.hash.should_not == z.load(:id => nil, :id2=>nil, :x => 4).hash
a.hash.should_not == y.load(:id => nil, :id2=>nil, :x => 3).hash
a = z.load(:id => 1, :x => 3)
a.hash.should == z.load(:id => 1, :x => 3).hash
a.hash.should_not == z.load(:id => 1, :id2=>nil, :x => 3).hash
a.hash.should_not == z.load(:id => 1, :x => 4).hash
a.hash.should_not == y.load(:id => 1, :x => 3).hash
a = z.load(:x => 3)
a.hash.should == z.load(:x => 3).hash
a.hash.should_not == z.load(:id => nil, :id2=>nil, :x => 3).hash
a.hash.should_not == z.load(:x => 4).hash
a.hash.should_not == y.load(:x => 3).hash
end
specify "should be the same only for objects with the same class and values if the no primary key" do
z = Class.new(Sequel::Model)
z.columns :id, :x
z.no_primary_key
y = Class.new(Sequel::Model)
y.columns :id, :x
y.no_primary_key
a = z.new(:x => 3)
a.hash.should_not == z.new(:x => 4).hash
a.hash.should == z.new(:x => 3).hash
a.hash.should_not == y.new(:x => 3).hash
end
end
describe Sequel::Model, "#initialize" do
before do
@c = Class.new(Sequel::Model) do
columns :id, :x
end
@c.strict_param_setting = false
end
specify "should accept values" do
m = @c.new(:x => 2)
m.values.should == {:x => 2}
end
specify "should not modify the primary key" do
m = @c.new(:id => 1, :x => 2)
m.values.should == {:x => 2}
end
specify "should accept no values" do
m = @c.new
m.values.should == {}
end
specify "should accept a block to execute" do
m = @c.new {|o| o[:id] = 1234}
m.id.should == 1234
end
specify "should accept virtual attributes" do
@c.send(:define_method, :blah=){|x| @blah = x}
@c.send(:define_method, :blah){@blah}
m = @c.new(:x => 2, :blah => 3)
m.values.should == {:x => 2}
m.blah.should == 3
end
specify "should convert string keys into symbol keys" do
m = @c.new('x' => 2)
m.values.should == {:x => 2}
end
end
describe Sequel::Model, "#initialize_set" do
before do
@c = Class.new(Sequel::Model){columns :id, :x, :y}
end
specify "should be called by initialize to set the column values" do
@c.send(:define_method, :initialize_set){|h| set(:y => 3)}
@c.new(:x => 2).values.should == {:y => 3}
end
specify "should be called with the hash given to initialize " do
x = nil
@c.send(:define_method, :initialize_set){|y| x = y}
@c.new(:x => 2)
x.should == {:x => 2}
end
specify "should not cause columns modified by the method to be considered as changed" do
@c.send(:define_method, :initialize_set){|h| set(:y => 3)}
@c.new(:x => 2).changed_columns.should == []
end
end
describe Sequel::Model, ".create" do
before do
DB.reset
@c = Class.new(Sequel::Model(:items)) do
unrestrict_primary_key
columns :x
end
end
it "should be able to create rows in the associated table" do
o = @c.create(:x => 1)
o.class.should == @c
DB.sqls.should == ['INSERT INTO items (x) VALUES (1)', "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
it "should be able to create rows without any values specified" do
o = @c.create
o.class.should == @c
DB.sqls.should == ["INSERT INTO items DEFAULT VALUES", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
it "should accept a block and call it" do
o1, o2, o3 = nil, nil, nil
o = @c.create {|o4| o1 = o4; o3 = o4; o2 = :blah; o3.x = 333}
o.class.should == @c
o1.should === o
o3.should === o
o2.should == :blah
DB.sqls.should == ["INSERT INTO items (x) VALUES (333)", "SELECT * FROM items WHERE (id = 10) LIMIT 1"]
end
it "should create a row for a model with custom primary key" do
@c.set_primary_key :x
o = @c.create(:x => 30)
o.class.should == @c
DB.sqls.should == ["INSERT INTO items (x) VALUES (30)", "SELECT * FROM items WHERE (x = 30) LIMIT 1"]
end
end
describe Sequel::Model, "#refresh" do
before do
@c = Class.new(Sequel::Model(:items)) do
unrestrict_primary_key
columns :id, :x
end
DB.reset
end
specify "should reload the instance values from the database" do
@m = @c.new(:id => 555)
@m[:x] = 'blah'
@c.instance_dataset._fetch = @c.dataset._fetch = {:x => 'kaboom', :id => 555}
@m.refresh
@m[:x].should == 'kaboom'
DB.sqls.should == ["SELECT * FROM items WHERE (id = 555) LIMIT 1"]
end
specify "should raise if the instance is not found" do
@m = @c.new(:id => 555)
@c.instance_dataset._fetch =@c.dataset._fetch = []
proc {@m.refresh}.should raise_error(Sequel::Error)
DB.sqls.should == ["SELECT * FROM items WHERE (id = 555) LIMIT 1"]
end
specify "should be aliased by #reload" do
@m = @c.new(:id => 555)
@c.instance_dataset._fetch =@c.dataset._fetch = {:x => 'kaboom', :id => 555}
@m.reload
@m[:x].should == 'kaboom'
DB.sqls.should == ["SELECT * FROM items WHERE (id = 555) LIMIT 1"]
end
end
describe Sequel::Model, "typecasting" do
before do
@c = Class.new(Sequel::Model(:items)) do
columns :x
end
@c.db_schema = {:x=>{:type=>:integer}}
@c.raise_on_typecast_failure = true
DB.reset
end
after do
Sequel.datetime_class = Time
end
specify "should not convert if typecasting is turned off" do
@c.typecast_on_assignment = false
m = @c.new
m.x = '1'
m.x.should == '1'
end
specify "should convert to integer for an integer field" do
@c.db_schema = {:x=>{:type=>:integer}}
m = @c.new
m.x = '1'
m.x.should == 1
m.x = 1
m.x.should == 1
m.x = 1.3
m.x.should == 1
end
specify "should typecast '' to nil unless type is string or blob" do
[:integer, :float, :decimal, :boolean, :date, :time, :datetime].each do |x|
@c.db_schema = {:x=>{:type=>x}}
m = @c.new
m.x = ''
m.x.should == nil
end
[:string, :blob].each do |x|
@c.db_schema = {:x=>{:type=>x}}
m = @c.new
m.x = ''
m.x.should == ''
end
end
specify "should not typecast '' to nil if typecast_empty_string_to_nil is false" do
m = @c.new
m.typecast_empty_string_to_nil = false
proc{m.x = ''}.should raise_error
@c.typecast_empty_string_to_nil = false
proc{@c.new.x = ''}.should raise_error
end
specify "should handle typecasting where == raises an error on the object" do
m = @c.new
o = Object.new
def o.==(v) raise ArgumentError end
def o.to_i() 4 end
m.x = o
m.x.should == 4
end
specify "should not typecast nil if NULLs are allowed" do
@c.db_schema[:x][:allow_null] = true
m = @c.new
m.x = nil
m.x.should == nil
end
specify "should raise an error if attempting to typecast nil and NULLs are not allowed" do
@c.db_schema[:x][:allow_null] = false
proc{@c.new.x = nil}.should raise_error(Sequel::Error)
proc{@c.new.x = ''}.should raise_error(Sequel::Error)
end
specify "should not raise an error if NULLs are not allowed and typecasting is turned off" do
@c.typecast_on_assignment = false
@c.db_schema[:x][:allow_null] = false
m = @c.new
m.x = nil
m.x.should == nil
end
specify "should not raise when typecasting nil to NOT NULL column but raise_on_typecast_failure is off" do
@c.raise_on_typecast_failure = false
@c.typecast_on_assignment = true
m = @c.new
m.x = ''
m.x.should == nil
m.x = nil
m.x.should == nil
end
specify "should raise an error if invalid data is used in an integer field" do
proc{@c.new.x = 'a'}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid integer" do
@c.raise_on_typecast_failure = false
model = @c.new
model.x = '1d'
model.x.should == '1d'
end
specify "should convert to float for a float field" do
@c.db_schema = {:x=>{:type=>:float}}
m = @c.new
m.x = '1.3'
m.x.should == 1.3
m.x = 1
m.x.should == 1.0
m.x = 1.3
m.x.should == 1.3
end
specify "should raise an error if invalid data is used in an float field" do
@c.db_schema = {:x=>{:type=>:float}}
proc{@c.new.x = 'a'}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid float" do
@c.raise_on_typecast_failure = false
@c.db_schema = {:x=>{:type=>:float}}
model = @c.new
model.x = '1d'
model.x.should == '1d'
end
specify "should convert to BigDecimal for a decimal field" do
@c.db_schema = {:x=>{:type=>:decimal}}
m = @c.new
bd = BigDecimal.new('1.0')
m.x = '1.0'
m.x.should == bd
m.x = 1.0
m.x.should == bd
m.x = 1
m.x.should == bd
m.x = bd
m.x.should == bd
m.x = '0'
m.x.should == 0
end
specify "should raise an error if invalid data is used in an decimal field" do
@c.db_schema = {:x=>{:type=>:decimal}}
proc{@c.new.x = Date.today}.should raise_error(Sequel::InvalidValue)
proc{@c.new.x = 'foo'}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid decimal" do
@c.raise_on_typecast_failure = false
@c.db_schema = {:x=>{:type=>:decimal}}
model = @c.new
time = Time.now
model.x = time
model.x.should == time
end
specify "should convert to string for a string field" do
@c.db_schema = {:x=>{:type=>:string}}
m = @c.new
m.x = '1.3'
m.x.should == '1.3'
m.x = 1
m.x.should == '1'
m.x = 1.3
m.x.should == '1.3'
end
specify "should convert to boolean for a boolean field" do
@c.db_schema = {:x=>{:type=>:boolean}}
m = @c.new
m.x = '1.3'
m.x.should == true
m.x = 1
m.x.should == true
m.x = 1.3
m.x.should == true
m.x = 't'
m.x.should == true
m.x = 'T'
m.x.should == true
m.x = 'y'
m.x.should == true
m.x = 'Y'
m.x.should == true
m.x = true
m.x.should == true
m.x = nil
m.x.should == nil
m.x = ''
m.x.should == nil
m.x = []
m.x.should == nil
m.x = 'f'
m.x.should == false
m.x = 'F'
m.x.should == false
m.x = 'false'
m.x.should == false
m.x = 'FALSE'
m.x.should == false
m.x = 'n'
m.x.should == false
m.x = 'N'
m.x.should == false
m.x = 'no'
m.x.should == false
m.x = 'NO'
m.x.should == false
m.x = '0'
m.x.should == false
m.x = 0
m.x.should == false
m.x = false
m.x.should == false
end
specify "should convert to date for a date field" do
@c.db_schema = {:x=>{:type=>:date}}
m = @c.new
y = Date.new(2007,10,21)
m.x = '2007-10-21'
m.x.should == y
m.x = Date.parse('2007-10-21')
m.x.should == y
m.x = Time.parse('2007-10-21')
m.x.should == y
m.x = DateTime.parse('2007-10-21')
m.x.should == y
end
specify "should accept a hash with symbol or string keys for a date field" do
@c.db_schema = {:x=>{:type=>:date}}
m = @c.new
y = Date.new(2007,10,21)
m.x = {:year=>2007, :month=>10, :day=>21}
m.x.should == y
m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21'}
m.x.should == y
end
specify "should raise an error if invalid data is used in a date field" do
@c.db_schema = {:x=>{:type=>:date}}
proc{@c.new.x = 'a'}.should raise_error(Sequel::InvalidValue)
proc{@c.new.x = 100}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid date" do
@c.raise_on_typecast_failure = false
@c.db_schema = {:x=>{:type=>:date}}
model = @c.new
model.x = 4
model.x.should == 4
end
specify "should convert to Sequel::SQLTime for a time field" do
@c.db_schema = {:x=>{:type=>:time}}
m = @c.new
x = '10:20:30'
y = Sequel::SQLTime.parse(x)
m.x = x
m.x.should == y
m.x = y
m.x.should == y
m.x.should be_a_kind_of(Sequel::SQLTime)
end
specify "should accept a hash with symbol or string keys for a time field" do
@c.db_schema = {:x=>{:type=>:time}}
m = @c.new
y = Time.parse('10:20:30')
m.x = {:hour=>10, :minute=>20, :second=>30}
m.x.should == y
m.x = {'hour'=>'10', 'minute'=>'20', 'second'=>'30'}
m.x.should == y
end
specify "should raise an error if invalid data is used in a time field" do
@c.db_schema = {:x=>{:type=>:time}}
proc{@c.new.x = '0000'}.should raise_error
proc{@c.new.x = Date.parse('2008-10-21')}.should raise_error(Sequel::InvalidValue)
proc{@c.new.x = DateTime.parse('2008-10-21')}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid time" do
@c.raise_on_typecast_failure = false
@c.db_schema = {:x=>{:type=>:time}}
model = @c.new
model.x = '0000'
model.x.should == '0000'
end
specify "should convert to the Sequel.datetime_class for a datetime field" do
@c.db_schema = {:x=>{:type=>:datetime}}
m = @c.new
x = '2007-10-21T10:20:30-07:00'
y = Time.parse(x)
m.x = x
m.x.should == y
m.x = DateTime.parse(x)
m.x.should == y
m.x = Time.parse(x)
m.x.should == y
m.x = Date.parse('2007-10-21')
m.x.should == Time.parse('2007-10-21')
Sequel.datetime_class = DateTime
y = DateTime.parse(x)
m.x = x
m.x.should == y
m.x = DateTime.parse(x)
m.x.should == y
m.x = Time.parse(x)
m.x.should == y
m.x = Date.parse('2007-10-21')
m.x.should == DateTime.parse('2007-10-21')
end
specify "should accept a hash with symbol or string keys for a datetime field" do
@c.db_schema = {:x=>{:type=>:datetime}}
m = @c.new
y = Time.parse('2007-10-21 10:20:30')
m.x = {:year=>2007, :month=>10, :day=>21, :hour=>10, :minute=>20, :second=>30}
m.x.should == y
m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21', 'hour'=>'10', 'minute'=>'20', 'second'=>'30'}
m.x.should == y
Sequel.datetime_class = DateTime
y = DateTime.parse('2007-10-21 10:20:30')
m.x = {:year=>2007, :month=>10, :day=>21, :hour=>10, :minute=>20, :second=>30}
m.x.should == y
m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21', 'hour'=>'10', 'minute'=>'20', 'second'=>'30'}
m.x.should == y
end
specify "should raise an error if invalid data is used in a datetime field" do
@c.db_schema = {:x=>{:type=>:datetime}}
proc{@c.new.x = '0000'}.should raise_error(Sequel::InvalidValue)
Sequel.datetime_class = DateTime
proc{@c.new.x = '0000'}.should raise_error(Sequel::InvalidValue)
proc{@c.new.x = 'a'}.should raise_error(Sequel::InvalidValue)
end
specify "should assign value if raise_on_typecast_failure is off and assigning invalid datetime" do
@c.raise_on_typecast_failure = false
@c.db_schema = {:x=>{:type=>:datetime}}
model = @c.new
model.x = '0000'
model.x.should == '0000'
Sequel.datetime_class = DateTime
model = @c.new
model.x = '0000'
model.x.should == '0000'
model.x = 'a'
model.x.should == 'a'
end
end
describe "Model#lock!" do
before do
@c = Class.new(Sequel::Model(:items)) do
columns :id
end
@c.dataset._fetch = {:id=>1}
DB.reset
end
it "should do nothing if the record is a new record" do
o = @c.new
def o._refresh(x) raise Sequel::Error; super(x) end
x = o.lock!
x.should == o
DB.sqls.should == []
end
it "should refresh the record using for_update if it is not a new record" do
o = @c.load(:id => 1)
def o._refresh(x) instance_variable_set(:@a, 1); super(x) end
x = o.lock!
x.should == o
o.instance_variable_get(:@a).should == 1
DB.sqls.should == ["SELECT * FROM items WHERE (id = 1) LIMIT 1 FOR UPDATE"]
end
end
describe "Model#schema_type_class" do
specify "should return the class or array of classes for the given type symbol" do
@c = Class.new(Sequel::Model(:items))
@c.class_eval{@db_schema = {:id=>{:type=>:integer}}}
@c.new.send(:schema_type_class, :id).should == Integer
end
end
| Marketcircle/sequel | spec/model/record_spec.rb | Ruby | mit | 65,212 |
<?php
class f_form_decor_formBody extends f_form_decor_default
{
protected $_separator;
/**
* @return f_form_decor_formBody
*/
public static function _(array $config = array())
{
return new self($config);
}
public function separator($sSeparator = null)
{
if (func_num_args() == 0) {
return $this->_separator;
}
$this->_separator = $sSeparator;
return $this;
}
public function render()
{
$decoration = array();
foreach ($this->object->_ as $element) {
/* @var $element f_form_element */
if ($element->ignoreRender()) {
continue;
}
$decoration[] = $element->render();
}
$this->_decoration = implode($this->_separator, $decoration);
return $this->_render();
}
} | serafin/fine | src/lib/f/form/decor/formBody.php | PHP | mit | 885 |
# frozen_string_literal: true
class MigrateVulnerabilityDismissalFeedback < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
MIGRATION = 'UpdateVulnerabilitiesFromDismissalFeedback'
BATCH_SIZE = 500
DELAY_INTERVAL = 2.minutes.to_i
class Vulnerability < ActiveRecord::Base
self.table_name = 'vulnerabilities'
self.inheritance_column = :_type_disabled
include ::EachBatch
end
def up
return unless Gitlab.ee?
Vulnerability.select('project_id').group(:project_id).each_batch(of: BATCH_SIZE, column: "project_id") do |project_batch, index|
batch_delay = (index - 1) * BATCH_SIZE * DELAY_INTERVAL
project_batch.each_with_index do |project, project_batch_index|
project_delay = project_batch_index * DELAY_INTERVAL
migrate_in(batch_delay + project_delay, MIGRATION, project[:project_id])
end
end
end
def down
# nothing to do
end
end
| mmkassem/gitlabhq | db/post_migrate/20200519201128_migrate_vulnerability_dismissal_feedback.rb | Ruby | mit | 986 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Console\Formatter;
/**
* @author Tien Xuan Vo <tien.xuan.vo@gmail.com>
*/
final class NullOutputFormatter implements OutputFormatterInterface
{
private NullOutputFormatterStyle $style;
/**
* {@inheritdoc}
*/
public function format(?string $message): ?string
{
return null;
}
/**
* {@inheritdoc}
*/
public function getStyle(string $name): OutputFormatterStyleInterface
{
// to comply with the interface we must return a OutputFormatterStyleInterface
return $this->style ?? $this->style = new NullOutputFormatterStyle();
}
/**
* {@inheritdoc}
*/
public function hasStyle(string $name): bool
{
return false;
}
/**
* {@inheritdoc}
*/
public function isDecorated(): bool
{
return false;
}
/**
* {@inheritdoc}
*/
public function setDecorated(bool $decorated): void
{
// do nothing
}
/**
* {@inheritdoc}
*/
public function setStyle(string $name, OutputFormatterStyleInterface $style): void
{
// do nothing
}
}
| OskarStark/symfony | src/Symfony/Component/Console/Formatter/NullOutputFormatter.php | PHP | mit | 1,394 |
/*
Copyright (c) 2013-2014, Maik Schreiber
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using UnityEngine;
namespace NavHud {
/**********************************************************\
* --- DO NOT EDIT BELOW THIS COMMENT --- *
* *
* This file contains classes and interfaces to use the *
* Toolbar Plugin without creating a hard dependency on it. *
* *
* There is nothing in this file that needs to be edited *
* by hand. *
* *
* --- DO NOT EDIT BELOW THIS COMMENT --- *
\**********************************************************/
/// <summary>
/// The global tool bar manager.
/// </summary>
public partial class ToolbarManager : IToolbarManager {
/// <summary>
/// Whether the Toolbar Plugin is available.
/// </summary>
public static bool ToolbarAvailable {
get {
if (toolbarAvailable == null) {
toolbarAvailable = Instance != null;
}
return (bool) toolbarAvailable;
}
}
/// <summary>
/// The global tool bar manager instance.
/// </summary>
public static IToolbarManager Instance {
get {
if ((toolbarAvailable != false) && (instance_ == null)) {
Type type = ToolbarTypes.getType("Toolbar.ToolbarManager");
if (type != null) {
object realToolbarManager = ToolbarTypes.getStaticProperty(type, "Instance").GetValue(null, null);
instance_ = new ToolbarManager(realToolbarManager);
}
}
return instance_;
}
}
}
#region interfaces
/// <summary>
/// A toolbar manager.
/// </summary>
public interface IToolbarManager {
/// <summary>
/// Adds a new button.
/// </summary>
/// <remarks>
/// To replace an existing button, just add a new button using the old button's namespace and ID.
/// Note that the new button will inherit the screen position of the old button.
/// </remarks>
/// <param name="ns">The new button's namespace. This is usually the plugin's name. Must not include special characters like '.'</param>
/// <param name="id">The new button's ID. This ID must be unique across all buttons in the namespace. Must not include special characters like '.'</param>
/// <returns>The button created.</returns>
IButton add(string ns, string id);
}
/// <summary>
/// Represents a clickable button.
/// </summary>
public interface IButton {
/// <summary>
/// The text displayed on the button. Set to null to hide text.
/// </summary>
/// <remarks>
/// The text can be changed at any time to modify the button's appearance. Note that since this will also
/// modify the button's size, this feature should be used sparingly, if at all.
/// </remarks>
/// <seealso cref="TexturePath"/>
string Text {
set;
get;
}
/// <summary>
/// The color the button text is displayed with. Defaults to Color.white.
/// </summary>
/// <remarks>
/// The text color can be changed at any time to modify the button's appearance.
/// </remarks>
Color TextColor {
set;
get;
}
/// <summary>
/// The path of a texture file to display an icon on the button. Set to null to hide icon.
/// </summary>
/// <remarks>
/// <para>
/// A texture path on a button will have precedence over text. That is, if both text and texture path
/// have been set on a button, the button will show the texture, not the text.
/// </para>
/// <para>
/// The texture size must not exceed 24x24 pixels.
/// </para>
/// <para>
/// The texture path must be relative to the "GameData" directory, and must not specify a file name suffix.
/// Valid example: MyAddon/Textures/icon_mybutton
/// </para>
/// <para>
/// The texture path can be changed at any time to modify the button's appearance.
/// </para>
/// </remarks>
/// <seealso cref="Text"/>
string TexturePath {
set;
get;
}
/// <summary>
/// The button's tool tip text. Set to null if no tool tip is desired.
/// </summary>
/// <remarks>
/// Tool Tip Text Should Always Use Headline Style Like This.
/// </remarks>
string ToolTip {
set;
get;
}
/// <summary>
/// Whether this button is currently visible or not. Can be used in addition to or as a replacement for <see cref="Visibility"/>.
/// </summary>
/// <remarks>
/// Setting this property to true does not affect the player's ability to hide the button using the configuration.
/// Conversely, setting this property to false does not enable the player to show the button using the configuration.
/// </remarks>
bool Visible {
set;
get;
}
/// <summary>
/// Determines this button's visibility. Can be used in addition to or as a replacement for <see cref="Visible"/>.
/// </summary>
/// <remarks>
/// The return value from IVisibility.Visible is subject to the same rules as outlined for
/// <see cref="Visible"/>.
/// </remarks>
IVisibility Visibility {
set;
get;
}
/// <summary>
/// Whether this button is currently effectively visible or not. This is a combination of
/// <see cref="Visible"/> and <see cref="Visibility"/>.
/// </summary>
/// <remarks>
/// Note that the toolbar is not visible in certain game scenes, for example the loading screens. This property
/// does not reflect button invisibility in those scenes. In addition, this property does not reflect the
/// player's configuration of the button's visibility.
/// </remarks>
bool EffectivelyVisible {
get;
}
/// <summary>
/// Whether this button is currently enabled (clickable) or not. This does not affect the player's ability to
/// position the button on their toolbar.
/// </summary>
bool Enabled {
set;
get;
}
/// <summary>
/// Whether this button is currently "important." Set to false to return to normal button behaviour.
/// </summary>
/// <remarks>
/// <para>
/// This can be used to temporarily force the button to be shown on screen regardless of the toolbar being
/// currently in auto-hidden mode. For example, a button that signals the arrival of a private message in
/// a chat room could mark itself as "important" as long as the message has not been read.
/// </para>
/// <para>
/// Setting this property does not change the appearance of the button. Use <see cref="TexturePath"/> to
/// change the button's icon.
/// </para>
/// <para>
/// Setting this property to true does not affect the player's ability to hide the button using the
/// configuration.
/// </para>
/// <para>
/// This feature should be used only sparingly, if at all, since it forces the button to be displayed on
/// screen even when it normally wouldn't.
/// </para>
/// </remarks>
bool Important {
set;
get;
}
/// <summary>
/// A drawable that is tied to the current button. This can be anything from a popup menu to
/// an informational window. Set to null to hide the drawable.
/// </summary>
IDrawable Drawable {
set;
get;
}
/// <summary>
/// Event handler that can be registered with to receive "on click" events.
/// </summary>
/// <example>
/// <code>
/// IButton button = ...
/// button.OnClick += (e) => {
/// Debug.Log("button clicked, mouseButton: " + e.MouseButton);
/// };
/// </code>
/// </example>
event ClickHandler OnClick;
/// <summary>
/// Event handler that can be registered with to receive "on mouse enter" events.
/// </summary>
/// <example>
/// <code>
/// IButton button = ...
/// button.OnMouseEnter += (e) => {
/// Debug.Log("mouse entered button");
/// };
/// </code>
/// </example>
event MouseEnterHandler OnMouseEnter;
/// <summary>
/// Event handler that can be registered with to receive "on mouse leave" events.
/// </summary>
/// <example>
/// <code>
/// IButton button = ...
/// button.OnMouseLeave += (e) => {
/// Debug.Log("mouse left button");
/// };
/// </code>
/// </example>
event MouseLeaveHandler OnMouseLeave;
/// <summary>
/// Permanently destroys this button so that it is no longer displayed.
/// Should be used when a plugin is stopped to remove leftover buttons.
/// </summary>
void Destroy();
}
/// <summary>
/// A drawable that is tied to a particular button. This can be anything from a popup menu
/// to an informational window.
/// </summary>
public interface IDrawable {
/// <summary>
/// Update any information. This is called once per frame.
/// </summary>
void Update();
/// <summary>
/// Draws GUI widgets for this drawable. This is the equivalent to the OnGUI() message in
/// <see cref="MonoBehaviour"/>.
/// </summary>
/// <remarks>
/// The drawable will be positioned near its parent toolbar according to the drawable's current
/// width/height.
/// </remarks>
/// <param name="position">The left/top position of where to draw this drawable.</param>
/// <returns>The current width/height of this drawable.</returns>
Vector2 Draw(Vector2 position);
}
#endregion
#region events
/// <summary>
/// Event describing a click on a button.
/// </summary>
public partial class ClickEvent : EventArgs {
/// <summary>
/// The button that has been clicked.
/// </summary>
public readonly IButton Button;
/// <summary>
/// The mouse button which the button was clicked with.
/// </summary>
/// <remarks>
/// Is 0 for left mouse button, 1 for right mouse button, and 2 for middle mouse button.
/// </remarks>
public readonly int MouseButton;
}
/// <summary>
/// An event handler that is invoked whenever a button has been clicked.
/// </summary>
/// <param name="e">An event describing the button click.</param>
public delegate void ClickHandler(ClickEvent e);
/// <summary>
/// Event describing the mouse pointer moving about a button.
/// </summary>
public abstract partial class MouseMoveEvent {
/// <summary>
/// The button in question.
/// </summary>
public readonly IButton button;
}
/// <summary>
/// Event describing the mouse pointer entering a button's area.
/// </summary>
public partial class MouseEnterEvent : MouseMoveEvent {
}
/// <summary>
/// Event describing the mouse pointer leaving a button's area.
/// </summary>
public partial class MouseLeaveEvent : MouseMoveEvent {
}
/// <summary>
/// An event handler that is invoked whenever the mouse pointer enters a button's area.
/// </summary>
/// <param name="e">An event describing the mouse pointer entering.</param>
public delegate void MouseEnterHandler(MouseEnterEvent e);
/// <summary>
/// An event handler that is invoked whenever the mouse pointer leaves a button's area.
/// </summary>
/// <param name="e">An event describing the mouse pointer leaving.</param>
public delegate void MouseLeaveHandler(MouseLeaveEvent e);
#endregion
#region visibility
/// <summary>
/// Determines visibility of a button.
/// </summary>
/// <seealso cref="IButton.Visibility"/>
public interface IVisibility {
/// <summary>
/// Whether a button is currently visible or not.
/// </summary>
/// <seealso cref="IButton.Visible"/>
bool Visible {
get;
}
}
/// <summary>
/// Determines visibility of a button in relation to the currently running game scene.
/// </summary>
/// <example>
/// <code>
/// IButton button = ...
/// button.Visibility = new GameScenesVisibility(GameScenes.EDITOR, GameScenes.SPH);
/// </code>
/// </example>
/// <seealso cref="IButton.Visibility"/>
public class GameScenesVisibility : IVisibility {
public bool Visible {
get {
return (bool) visibleProperty.GetValue(realGameScenesVisibility, null);
}
}
private object realGameScenesVisibility;
private PropertyInfo visibleProperty;
public GameScenesVisibility(params GameScenes[] gameScenes) {
Type gameScenesVisibilityType = ToolbarTypes.getType("Toolbar.GameScenesVisibility");
realGameScenesVisibility = Activator.CreateInstance(gameScenesVisibilityType, new object[] { gameScenes });
visibleProperty = ToolbarTypes.getProperty(gameScenesVisibilityType, "Visible");
}
}
#endregion
#region drawable
/// <summary>
/// A drawable that draws a popup menu.
/// </summary>
public partial class PopupMenuDrawable : IDrawable {
/// <summary>
/// Event handler that can be registered with to receive "any menu option clicked" events.
/// </summary>
public event Action OnAnyOptionClicked {
add {
onAnyOptionClickedEvent.AddEventHandler(realPopupMenuDrawable, value);
}
remove {
onAnyOptionClickedEvent.RemoveEventHandler(realPopupMenuDrawable, value);
}
}
private object realPopupMenuDrawable;
private MethodInfo updateMethod;
private MethodInfo drawMethod;
private MethodInfo addOptionMethod;
private MethodInfo addSeparatorMethod;
private MethodInfo destroyMethod;
private EventInfo onAnyOptionClickedEvent;
public PopupMenuDrawable() {
Type popupMenuDrawableType = ToolbarTypes.getType("Toolbar.PopupMenuDrawable");
realPopupMenuDrawable = Activator.CreateInstance(popupMenuDrawableType, null);
updateMethod = ToolbarTypes.getMethod(popupMenuDrawableType, "Update");
drawMethod = ToolbarTypes.getMethod(popupMenuDrawableType, "Draw");
addOptionMethod = ToolbarTypes.getMethod(popupMenuDrawableType, "AddOption");
addSeparatorMethod = ToolbarTypes.getMethod(popupMenuDrawableType, "AddSeparator");
destroyMethod = ToolbarTypes.getMethod(popupMenuDrawableType, "Destroy");
onAnyOptionClickedEvent = ToolbarTypes.getEvent(popupMenuDrawableType, "OnAnyOptionClicked");
}
public void Update() {
updateMethod.Invoke(realPopupMenuDrawable, null);
}
public Vector2 Draw(Vector2 position) {
return (Vector2) drawMethod.Invoke(realPopupMenuDrawable, new object[] { position });
}
/// <summary>
/// Adds a new option to the popup menu.
/// </summary>
/// <param name="text">The text of the option.</param>
/// <returns>A button that can be used to register clicks on the menu option.</returns>
public IButton AddOption(string text) {
object realButton = addOptionMethod.Invoke(realPopupMenuDrawable, new object[] { text });
return new Button(realButton, new ToolbarTypes());
}
/// <summary>
/// Adds a separator to the popup menu.
/// </summary>
public void AddSeparator() {
addSeparatorMethod.Invoke(realPopupMenuDrawable, null);
}
/// <summary>
/// Destroys this drawable. This must always be called before disposing of this drawable.
/// </summary>
public void Destroy() {
destroyMethod.Invoke(realPopupMenuDrawable, null);
}
}
#endregion
#region private implementations
public partial class ToolbarManager : IToolbarManager {
private static bool? toolbarAvailable = null;
private static IToolbarManager instance_;
private object realToolbarManager;
private MethodInfo addMethod;
private Dictionary<object, IButton> buttons = new Dictionary<object, IButton>();
private ToolbarTypes types = new ToolbarTypes();
private ToolbarManager(object realToolbarManager) {
this.realToolbarManager = realToolbarManager;
addMethod = ToolbarTypes.getMethod(types.iToolbarManagerType, "add");
}
public IButton add(string ns, string id) {
object realButton = addMethod.Invoke(realToolbarManager, new object[] { ns, id });
IButton button = new Button(realButton, types);
buttons.Add(realButton, button);
return button;
}
}
internal class Button : IButton {
private object realButton;
private ToolbarTypes types;
private Delegate realClickHandler;
private Delegate realMouseEnterHandler;
private Delegate realMouseLeaveHandler;
internal Button(object realButton, ToolbarTypes types) {
this.realButton = realButton;
this.types = types;
realClickHandler = attachEventHandler(types.button.onClickEvent, "clicked", realButton);
realMouseEnterHandler = attachEventHandler(types.button.onMouseEnterEvent, "mouseEntered", realButton);
realMouseLeaveHandler = attachEventHandler(types.button.onMouseLeaveEvent, "mouseLeft", realButton);
}
private Delegate attachEventHandler(EventInfo @event, string methodName, object realButton) {
MethodInfo method = GetType().GetMethod(methodName, BindingFlags.NonPublic | BindingFlags.Instance);
Delegate d = Delegate.CreateDelegate(@event.EventHandlerType, this, method);
@event.AddEventHandler(realButton, d);
return d;
}
public string Text {
set {
types.button.textProperty.SetValue(realButton, value, null);
}
get {
return (string) types.button.textProperty.GetValue(realButton, null);
}
}
public Color TextColor {
set {
types.button.textColorProperty.SetValue(realButton, value, null);
}
get {
return (Color) types.button.textColorProperty.GetValue(realButton, null);
}
}
public string TexturePath {
set {
types.button.texturePathProperty.SetValue(realButton, value, null);
}
get {
return (string) types.button.texturePathProperty.GetValue(realButton, null);
}
}
public string ToolTip {
set {
types.button.toolTipProperty.SetValue(realButton, value, null);
}
get {
return (string) types.button.toolTipProperty.GetValue(realButton, null);
}
}
public bool Visible {
set {
types.button.visibleProperty.SetValue(realButton, value, null);
}
get {
return (bool) types.button.visibleProperty.GetValue(realButton, null);
}
}
public IVisibility Visibility {
set {
object functionVisibility = null;
if (value != null) {
functionVisibility = Activator.CreateInstance(types.functionVisibilityType, new object[] { new Func<bool>(() => value.Visible) });
}
types.button.visibilityProperty.SetValue(realButton, functionVisibility, null);
visibility_ = value;
}
get {
return visibility_;
}
}
private IVisibility visibility_;
public bool EffectivelyVisible {
get {
return (bool) types.button.effectivelyVisibleProperty.GetValue(realButton, null);
}
}
public bool Enabled {
set {
types.button.enabledProperty.SetValue(realButton, value, null);
}
get {
return (bool) types.button.enabledProperty.GetValue(realButton, null);
}
}
public bool Important {
set {
types.button.importantProperty.SetValue(realButton, value, null);
}
get {
return (bool) types.button.importantProperty.GetValue(realButton, null);
}
}
public IDrawable Drawable {
set {
object functionDrawable = null;
if (value != null) {
functionDrawable = Activator.CreateInstance(types.functionDrawableType, new object[] {
new Action(() => value.Update()),
new Func<Vector2, Vector2>((pos) => value.Draw(pos))
});
}
types.button.drawableProperty.SetValue(realButton, functionDrawable, null);
drawable_ = value;
}
get {
return drawable_;
}
}
private IDrawable drawable_;
public event ClickHandler OnClick;
private void clicked(object realEvent) {
if (OnClick != null) {
OnClick(new ClickEvent(realEvent, this));
}
}
public event MouseEnterHandler OnMouseEnter;
private void mouseEntered(object realEvent) {
if (OnMouseEnter != null) {
OnMouseEnter(new MouseEnterEvent(this));
}
}
public event MouseLeaveHandler OnMouseLeave;
private void mouseLeft(object realEvent) {
if (OnMouseLeave != null) {
OnMouseLeave(new MouseLeaveEvent(this));
}
}
public void Destroy() {
detachEventHandler(types.button.onClickEvent, realClickHandler, realButton);
detachEventHandler(types.button.onMouseEnterEvent, realMouseEnterHandler, realButton);
detachEventHandler(types.button.onMouseLeaveEvent, realMouseLeaveHandler, realButton);
types.button.destroyMethod.Invoke(realButton, null);
}
private void detachEventHandler(EventInfo @event, Delegate d, object realButton) {
@event.RemoveEventHandler(realButton, d);
}
}
public partial class ClickEvent : EventArgs {
internal ClickEvent(object realEvent, IButton button) {
Type type = realEvent.GetType();
Button = button;
MouseButton = (int) type.GetField("MouseButton", BindingFlags.Public | BindingFlags.Instance).GetValue(realEvent);
}
}
public abstract partial class MouseMoveEvent : EventArgs {
internal MouseMoveEvent(IButton button) {
this.button = button;
}
}
public partial class MouseEnterEvent : MouseMoveEvent {
internal MouseEnterEvent(IButton button)
: base(button) {
}
}
public partial class MouseLeaveEvent : MouseMoveEvent {
internal MouseLeaveEvent(IButton button)
: base(button) {
}
}
internal class ToolbarTypes {
internal readonly Type iToolbarManagerType;
internal readonly Type functionVisibilityType;
internal readonly Type functionDrawableType;
internal readonly ButtonTypes button;
internal ToolbarTypes() {
iToolbarManagerType = getType("Toolbar.IToolbarManager");
functionVisibilityType = getType("Toolbar.FunctionVisibility");
functionDrawableType = getType("Toolbar.FunctionDrawable");
Type iButtonType = getType("Toolbar.IButton");
button = new ButtonTypes(iButtonType);
}
internal static Type getType(string name) {
return AssemblyLoader.loadedAssemblies
.SelectMany(a => a.assembly.GetExportedTypes())
.SingleOrDefault(t => t.FullName == name);
}
internal static PropertyInfo getProperty(Type type, string name) {
return type.GetProperty(name, BindingFlags.Public | BindingFlags.Instance);
}
internal static PropertyInfo getStaticProperty(Type type, string name) {
return type.GetProperty(name, BindingFlags.Public | BindingFlags.Static);
}
internal static EventInfo getEvent(Type type, string name) {
return type.GetEvent(name, BindingFlags.Public | BindingFlags.Instance);
}
internal static MethodInfo getMethod(Type type, string name) {
return type.GetMethod(name, BindingFlags.Public | BindingFlags.Instance);
}
}
internal class ButtonTypes {
internal readonly Type iButtonType;
internal readonly PropertyInfo textProperty;
internal readonly PropertyInfo textColorProperty;
internal readonly PropertyInfo texturePathProperty;
internal readonly PropertyInfo toolTipProperty;
internal readonly PropertyInfo visibleProperty;
internal readonly PropertyInfo visibilityProperty;
internal readonly PropertyInfo effectivelyVisibleProperty;
internal readonly PropertyInfo enabledProperty;
internal readonly PropertyInfo importantProperty;
internal readonly PropertyInfo drawableProperty;
internal readonly EventInfo onClickEvent;
internal readonly EventInfo onMouseEnterEvent;
internal readonly EventInfo onMouseLeaveEvent;
internal readonly MethodInfo destroyMethod;
internal ButtonTypes(Type iButtonType) {
this.iButtonType = iButtonType;
textProperty = ToolbarTypes.getProperty(iButtonType, "Text");
textColorProperty = ToolbarTypes.getProperty(iButtonType, "TextColor");
texturePathProperty = ToolbarTypes.getProperty(iButtonType, "TexturePath");
toolTipProperty = ToolbarTypes.getProperty(iButtonType, "ToolTip");
visibleProperty = ToolbarTypes.getProperty(iButtonType, "Visible");
visibilityProperty = ToolbarTypes.getProperty(iButtonType, "Visibility");
effectivelyVisibleProperty = ToolbarTypes.getProperty(iButtonType, "EffectivelyVisible");
enabledProperty = ToolbarTypes.getProperty(iButtonType, "Enabled");
importantProperty = ToolbarTypes.getProperty(iButtonType, "Important");
drawableProperty = ToolbarTypes.getProperty(iButtonType, "Drawable");
onClickEvent = ToolbarTypes.getEvent(iButtonType, "OnClick");
onMouseEnterEvent = ToolbarTypes.getEvent(iButtonType, "OnMouseEnter");
onMouseLeaveEvent = ToolbarTypes.getEvent(iButtonType, "OnMouseLeave");
destroyMethod = ToolbarTypes.getMethod(iButtonType, "Destroy");
}
}
#endregion
}
| RealGrep/NavHud | Source/ToolbarWrapper.cs | C# | mit | 25,318 |