entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "# Copyright (c) 2013 JeongHoon Byun aka \"Outsider\", <http://blog.outsider.ne.kr/>\n# L",
"end": 35,
"score": 0.9998809695243835,
"start": 21,
"tag": "NAME",
"value": "JeongHoon Byun"
},
{
"context": "# Copyright (c) 2013 JeongHoon Byun aka \"Outsider\", <http://blog.outsider.ne.kr/>\n# Licensed under ",
"end": 49,
"score": 0.990684986114502,
"start": 41,
"tag": "USERNAME",
"value": "Outsider"
}
] | test/parser/js-parser.test.coffee | uppalapatisujitha/CodingConventionofCommitHistory | 421 | # Copyright (c) 2013 JeongHoon Byun aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
should = require 'should'
parser = require '../../src/parser/js-parser'
describe 'js-parser >', ->
describe 'comma >', ->
it 'check first comma #1', ->
convention = parser.comma ',fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #2', ->
convention = parser.comma ' ,fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #3', ->
convention = parser.comma ' fs = , require(\'fs\'),', {}
convention.comma.first.should.equal 0
it 'check first comma #4', ->
convention = parser.comma ' , fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check last comma #1', ->
convention = parser.comma 'fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #2', ->
convention = parser.comma ' fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #3', ->
convention = parser.comma ' fs = require(\'fs\'), ', {}
convention.comma.last.should.equal 1
it 'check last comma #4', ->
convention = parser.comma ' ,fs = ,require(\'fs\'),', {}
convention.comma.last.should.equal 1
describe 'indent >', ->
it 'check space indent #1', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.space.should.equal 0
it 'check space indent #2', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #3', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #4', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check tab indent #1', ->
convention = parser.indent '\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #2', ->
convention = parser.indent '\t\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #3', ->
convention = parser.indent '\t\t var a = 1; ', {}
convention.indent.tab.should.equal 1
it 'check tab indent #4', ->
convention = parser.indent ' \tvar a = 1;', {}
convention.indent.tab.should.equal 0
it 'check tab indent #5', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.tab.should.equal 0
describe 'functiondef >', ->
it 'check function definition followed by no space #1', ->
convention = parser.functiondef 'var a = function() {', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #2', ->
convention = parser.functiondef 'var a = function() { return 1; };', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #3', ->
convention = parser.functiondef 'function a() {}', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #5', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.nospace.should.equal 0
it 'check function definition followed by one space #1', ->
convention = parser.functiondef 'var a = function () { return 1; };', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #2', ->
convention = parser.functiondef 'function a () {}', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #3', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.onespace.should.equal 0
describe 'argumentdef >', ->
it 'check argument definition with one space #1', ->
convention = parser.argumentdef 'function a( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #2', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #3', ->
convention = parser.argumentdef 'a.fn(function( arg1, arg2 ) {})', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.onespace.should.equal 0
it 'check argument definition with no space #1', ->
convention = parser.argumentdef 'var a = function(arg1, arg2) {', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #2', ->
convention = parser.argumentdef 'var a = function (arg1, arg2) { return 1; };', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #3', ->
convention = parser.argumentdef 'function a(arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #5', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 0
it 'check argument definition with no space #6', ->
convention = parser.argumentdef ' }//if -', {}
convention.argumentdef.nospace.should.equal 0
convention.argumentdef.onespace.should.equal 0
describe 'literaldef >', ->
it 'check object literal definition with trace space #1', ->
convention = parser.literaldef ' init: "value",', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #2', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #3', ->
convention = parser.literaldef '{ key: value, key: value }', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #4', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.tracespace.should.equal 0
it 'check object literal definition with both space #1', ->
convention = parser.literaldef ' init : "value",', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #2', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #3', ->
convention = parser.literaldef '{ key : value, key: value }', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #4', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.bothspace.should.equal 0
it 'check object literal definition with no space #1', ->
convention = parser.literaldef ' init:"value",', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #2', ->
convention = parser.literaldef ' init:function() { ', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #3', ->
convention = parser.literaldef '{ key:value, key: value }', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #4', ->
convention = parser.literaldef ' init :function() { ', {}
convention.literaldef.nospace.should.equal 0
describe 'conditionstatement >', ->
it 'check condition statement with one space #1', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #2', ->
convention = parser.conditionstatement 'while ( ture ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #3', ->
convention = parser.conditionstatement 'switch ( a ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #4', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 0
it 'check condition statement with no space #1', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #2', ->
convention = parser.conditionstatement 'while( ture ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #3', ->
convention = parser.conditionstatement 'switch( a ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #4', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 0
describe 'blockstatement >', ->
it 'check block statement with one space #1', ->
convention = parser.blockstatement 'if (true) { return; }', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #2', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #3', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #4', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #5', ->
convention = parser.blockstatement 'if (true){ return; }', {}
convention.blockstatement.onespace.should.equal 0
it 'check block statement with no space #1', ->
convention = parser.blockstatement 'if (true){ return (); }', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #2', ->
convention = parser.blockstatement '}else if (true){', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #3', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement with no space #4', ->
convention = parser.blockstatement '} else if(true) {', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement at new line #1', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #2', ->
convention = parser.blockstatement 'if (true) // comment', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #3', ->
convention = parser.blockstatement 'if (true)/* */', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #4', ->
convention = parser.blockstatement 'else if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #5', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #6', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.newline.should.equal 0
describe 'linelength >', ->
it 'line length is 80 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #3', ->
convention = parser.linelength '\t\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 0
it 'line length is 120 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #3', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char120.should.equal 0
it 'line length is 150 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; } //afijfjeovjfiejffjeifjidjvosjfiejfioejovfjeifjiejfosjfioejfoiejfoi', {}
convention.linelength.char150.should.equal 1
describe 'quotes >', ->
it 'single quote #1', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.single.should.equal 1
it 'single quote #2', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #3', ->
convention = parser.quotes """ var foo = '<div id=\'bar\'>baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #4', ->
convention = parser.quotes """ 'key': 'value' """
convention.quotes.single.should.equal 1
it 'single quote #5', ->
convention = parser.quotes """ 'key': true """
convention.quotes.single.should.equal 1
it 'single quote #6', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.single.should.equal 0
it 'single quote #7', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.single.should.equal 0
it 'single quote #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.single.should.equal 0
it 'double quotes #1', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.double.should.equal 1
it 'double quotes #2', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #3', ->
convention = parser.quotes """ var foo = "<div id=\"bar\">baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #4', ->
convention = parser.quotes """ "key": "value" """
convention.quotes.double.should.equal 1
it 'double quotes #5', ->
convention = parser.quotes """ "key": true """
convention.quotes.double.should.equal 1
it 'double quotes #6', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.double.should.equal 0
it 'double quotes #7', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.double.should.equal 0
it 'double quotes #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.double.should.equal 0 | 98104 | # Copyright (c) 2013 <NAME> aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
should = require 'should'
parser = require '../../src/parser/js-parser'
describe 'js-parser >', ->
describe 'comma >', ->
it 'check first comma #1', ->
convention = parser.comma ',fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #2', ->
convention = parser.comma ' ,fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #3', ->
convention = parser.comma ' fs = , require(\'fs\'),', {}
convention.comma.first.should.equal 0
it 'check first comma #4', ->
convention = parser.comma ' , fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check last comma #1', ->
convention = parser.comma 'fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #2', ->
convention = parser.comma ' fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #3', ->
convention = parser.comma ' fs = require(\'fs\'), ', {}
convention.comma.last.should.equal 1
it 'check last comma #4', ->
convention = parser.comma ' ,fs = ,require(\'fs\'),', {}
convention.comma.last.should.equal 1
describe 'indent >', ->
it 'check space indent #1', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.space.should.equal 0
it 'check space indent #2', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #3', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #4', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check tab indent #1', ->
convention = parser.indent '\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #2', ->
convention = parser.indent '\t\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #3', ->
convention = parser.indent '\t\t var a = 1; ', {}
convention.indent.tab.should.equal 1
it 'check tab indent #4', ->
convention = parser.indent ' \tvar a = 1;', {}
convention.indent.tab.should.equal 0
it 'check tab indent #5', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.tab.should.equal 0
describe 'functiondef >', ->
it 'check function definition followed by no space #1', ->
convention = parser.functiondef 'var a = function() {', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #2', ->
convention = parser.functiondef 'var a = function() { return 1; };', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #3', ->
convention = parser.functiondef 'function a() {}', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #5', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.nospace.should.equal 0
it 'check function definition followed by one space #1', ->
convention = parser.functiondef 'var a = function () { return 1; };', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #2', ->
convention = parser.functiondef 'function a () {}', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #3', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.onespace.should.equal 0
describe 'argumentdef >', ->
it 'check argument definition with one space #1', ->
convention = parser.argumentdef 'function a( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #2', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #3', ->
convention = parser.argumentdef 'a.fn(function( arg1, arg2 ) {})', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.onespace.should.equal 0
it 'check argument definition with no space #1', ->
convention = parser.argumentdef 'var a = function(arg1, arg2) {', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #2', ->
convention = parser.argumentdef 'var a = function (arg1, arg2) { return 1; };', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #3', ->
convention = parser.argumentdef 'function a(arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #5', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 0
it 'check argument definition with no space #6', ->
convention = parser.argumentdef ' }//if -', {}
convention.argumentdef.nospace.should.equal 0
convention.argumentdef.onespace.should.equal 0
describe 'literaldef >', ->
it 'check object literal definition with trace space #1', ->
convention = parser.literaldef ' init: "value",', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #2', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #3', ->
convention = parser.literaldef '{ key: value, key: value }', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #4', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.tracespace.should.equal 0
it 'check object literal definition with both space #1', ->
convention = parser.literaldef ' init : "value",', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #2', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #3', ->
convention = parser.literaldef '{ key : value, key: value }', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #4', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.bothspace.should.equal 0
it 'check object literal definition with no space #1', ->
convention = parser.literaldef ' init:"value",', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #2', ->
convention = parser.literaldef ' init:function() { ', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #3', ->
convention = parser.literaldef '{ key:value, key: value }', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #4', ->
convention = parser.literaldef ' init :function() { ', {}
convention.literaldef.nospace.should.equal 0
describe 'conditionstatement >', ->
it 'check condition statement with one space #1', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #2', ->
convention = parser.conditionstatement 'while ( ture ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #3', ->
convention = parser.conditionstatement 'switch ( a ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #4', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 0
it 'check condition statement with no space #1', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #2', ->
convention = parser.conditionstatement 'while( ture ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #3', ->
convention = parser.conditionstatement 'switch( a ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #4', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 0
describe 'blockstatement >', ->
it 'check block statement with one space #1', ->
convention = parser.blockstatement 'if (true) { return; }', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #2', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #3', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #4', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #5', ->
convention = parser.blockstatement 'if (true){ return; }', {}
convention.blockstatement.onespace.should.equal 0
it 'check block statement with no space #1', ->
convention = parser.blockstatement 'if (true){ return (); }', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #2', ->
convention = parser.blockstatement '}else if (true){', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #3', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement with no space #4', ->
convention = parser.blockstatement '} else if(true) {', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement at new line #1', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #2', ->
convention = parser.blockstatement 'if (true) // comment', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #3', ->
convention = parser.blockstatement 'if (true)/* */', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #4', ->
convention = parser.blockstatement 'else if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #5', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #6', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.newline.should.equal 0
describe 'linelength >', ->
it 'line length is 80 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #3', ->
convention = parser.linelength '\t\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 0
it 'line length is 120 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #3', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char120.should.equal 0
it 'line length is 150 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; } //afijfjeovjfiejffjeifjidjvosjfiejfioejovfjeifjiejfosjfioejfoiejfoi', {}
convention.linelength.char150.should.equal 1
describe 'quotes >', ->
it 'single quote #1', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.single.should.equal 1
it 'single quote #2', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #3', ->
convention = parser.quotes """ var foo = '<div id=\'bar\'>baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #4', ->
convention = parser.quotes """ 'key': 'value' """
convention.quotes.single.should.equal 1
it 'single quote #5', ->
convention = parser.quotes """ 'key': true """
convention.quotes.single.should.equal 1
it 'single quote #6', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.single.should.equal 0
it 'single quote #7', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.single.should.equal 0
it 'single quote #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.single.should.equal 0
it 'double quotes #1', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.double.should.equal 1
it 'double quotes #2', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #3', ->
convention = parser.quotes """ var foo = "<div id=\"bar\">baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #4', ->
convention = parser.quotes """ "key": "value" """
convention.quotes.double.should.equal 1
it 'double quotes #5', ->
convention = parser.quotes """ "key": true """
convention.quotes.double.should.equal 1
it 'double quotes #6', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.double.should.equal 0
it 'double quotes #7', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.double.should.equal 0
it 'double quotes #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.double.should.equal 0 | true | # Copyright (c) 2013 PI:NAME:<NAME>END_PI aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
should = require 'should'
parser = require '../../src/parser/js-parser'
describe 'js-parser >', ->
describe 'comma >', ->
it 'check first comma #1', ->
convention = parser.comma ',fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #2', ->
convention = parser.comma ' ,fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check first comma #3', ->
convention = parser.comma ' fs = , require(\'fs\'),', {}
convention.comma.first.should.equal 0
it 'check first comma #4', ->
convention = parser.comma ' , fs = require(\'fs\')', {}
convention.comma.first.should.equal 1
it 'check last comma #1', ->
convention = parser.comma 'fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #2', ->
convention = parser.comma ' fs = require(\'fs\'),', {}
convention.comma.last.should.equal 1
it 'check last comma #3', ->
convention = parser.comma ' fs = require(\'fs\'), ', {}
convention.comma.last.should.equal 1
it 'check last comma #4', ->
convention = parser.comma ' ,fs = ,require(\'fs\'),', {}
convention.comma.last.should.equal 1
describe 'indent >', ->
it 'check space indent #1', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.space.should.equal 0
it 'check space indent #2', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #3', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check space indent #4', ->
convention = parser.indent ' var a = 1;', {}
convention.indent.space.should.equal 1
it 'check tab indent #1', ->
convention = parser.indent '\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #2', ->
convention = parser.indent '\t\tvar a = 1;', {}
convention.indent.tab.should.equal 1
it 'check tab indent #3', ->
convention = parser.indent '\t\t var a = 1; ', {}
convention.indent.tab.should.equal 1
it 'check tab indent #4', ->
convention = parser.indent ' \tvar a = 1;', {}
convention.indent.tab.should.equal 0
it 'check tab indent #5', ->
convention = parser.indent 'var a = 1;', {}
convention.indent.tab.should.equal 0
describe 'functiondef >', ->
it 'check function definition followed by no space #1', ->
convention = parser.functiondef 'var a = function() {', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #2', ->
convention = parser.functiondef 'var a = function() { return 1; };', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #3', ->
convention = parser.functiondef 'function a() {}', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.nospace.should.equal 1
it 'check function definition followed by no space #5', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.nospace.should.equal 0
it 'check function definition followed by one space #1', ->
convention = parser.functiondef 'var a = function () { return 1; };', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #2', ->
convention = parser.functiondef 'function a () {}', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #3', ->
convention = parser.functiondef 'a.fn(function () {})', {}
convention.functiondef.onespace.should.equal 1
it 'check function definition followed by one space #4', ->
convention = parser.functiondef 'a.fn(function() {})', {}
convention.functiondef.onespace.should.equal 0
describe 'argumentdef >', ->
it 'check argument definition with one space #1', ->
convention = parser.argumentdef 'function a( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #2', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #3', ->
convention = parser.argumentdef 'a.fn(function( arg1, arg2 ) {})', {}
convention.argumentdef.onespace.should.equal 1
it 'check argument definition with one space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.onespace.should.equal 0
it 'check argument definition with no space #1', ->
convention = parser.argumentdef 'var a = function(arg1, arg2) {', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #2', ->
convention = parser.argumentdef 'var a = function (arg1, arg2) { return 1; };', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #3', ->
convention = parser.argumentdef 'function a(arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #4', ->
convention = parser.argumentdef 'a.fn(function (arg1, arg2) {})', {}
convention.argumentdef.nospace.should.equal 1
it 'check argument definition with no space #5', ->
convention = parser.argumentdef 'function a ( arg1, arg2 ) {}', {}
convention.argumentdef.nospace.should.equal 0
it 'check argument definition with no space #6', ->
convention = parser.argumentdef ' }//if -', {}
convention.argumentdef.nospace.should.equal 0
convention.argumentdef.onespace.should.equal 0
describe 'literaldef >', ->
it 'check object literal definition with trace space #1', ->
convention = parser.literaldef ' init: "value",', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #2', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #3', ->
convention = parser.literaldef '{ key: value, key: value }', {}
convention.literaldef.tracespace.should.equal 1
it 'check object literal definition with trace space #4', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.tracespace.should.equal 0
it 'check object literal definition with both space #1', ->
convention = parser.literaldef ' init : "value",', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #2', ->
convention = parser.literaldef ' init : function() { ', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #3', ->
convention = parser.literaldef '{ key : value, key: value }', {}
convention.literaldef.bothspace.should.equal 1
it 'check object literal definition with both space #4', ->
convention = parser.literaldef ' init: function() { ', {}
convention.literaldef.bothspace.should.equal 0
it 'check object literal definition with no space #1', ->
convention = parser.literaldef ' init:"value",', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #2', ->
convention = parser.literaldef ' init:function() { ', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #3', ->
convention = parser.literaldef '{ key:value, key: value }', {}
convention.literaldef.nospace.should.equal 1
it 'check object literal definition with no space #4', ->
convention = parser.literaldef ' init :function() { ', {}
convention.literaldef.nospace.should.equal 0
describe 'conditionstatement >', ->
it 'check condition statement with one space #1', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #2', ->
convention = parser.conditionstatement 'while ( ture ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #3', ->
convention = parser.conditionstatement 'switch ( a ) {', {}
convention.conditionstatement.onespace.should.equal 1
it 'check condition statement with one space #4', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.onespace.should.equal 0
it 'check condition statement with no space #1', ->
convention = parser.conditionstatement 'if( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #2', ->
convention = parser.conditionstatement 'while( ture ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #3', ->
convention = parser.conditionstatement 'switch( a ) {', {}
convention.conditionstatement.nospace.should.equal 1
it 'check condition statement with no space #4', ->
convention = parser.conditionstatement 'if ( a = 1) {', {}
convention.conditionstatement.nospace.should.equal 0
describe 'blockstatement >', ->
it 'check block statement with one space #1', ->
convention = parser.blockstatement 'if (true) { return; }', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #2', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #3', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #4', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.onespace.should.equal 1
it 'check block statement with one space #5', ->
convention = parser.blockstatement 'if (true){ return; }', {}
convention.blockstatement.onespace.should.equal 0
it 'check block statement with no space #1', ->
convention = parser.blockstatement 'if (true){ return (); }', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #2', ->
convention = parser.blockstatement '}else if (true){', {}
convention.blockstatement.nospace.should.equal 1
it 'check block statement with no space #3', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement with no space #4', ->
convention = parser.blockstatement '} else if(true) {', {}
convention.blockstatement.nospace.should.equal 0
it 'check block statement at new line #1', ->
convention = parser.blockstatement 'if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #2', ->
convention = parser.blockstatement 'if (true) // comment', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #3', ->
convention = parser.blockstatement 'if (true)/* */', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #4', ->
convention = parser.blockstatement 'else if (true)', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #5', ->
convention = parser.blockstatement 'else if (true) {', {}
convention.blockstatement.newline.should.equal 1
it 'check block statement at new line #6', ->
convention = parser.blockstatement '} else if ( true ) {', {}
convention.blockstatement.newline.should.equal 0
describe 'linelength >', ->
it 'line length is 80 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 1
it 'line length is 80 characters #3', ->
convention = parser.linelength '\t\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char80.should.equal 0
it 'line length is 120 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #2', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age, String job) { return \"a\"; }', {}
convention.linelength.char120.should.equal 1
it 'line length is 120 characters #3', ->
convention = parser.linelength '\t\tpublic String findFirstName( String name, String age) { return \"a\"; }', {}
convention.linelength.char120.should.equal 0
it 'line length is 150 characters #1', ->
convention = parser.linelength ' public String findFirstName( String name, String age, String job) { return \"a\"; } //afijfjeovjfiejffjeifjidjvosjfiejfioejovfjeifjiejfosjfioejfoiejfoi', {}
convention.linelength.char150.should.equal 1
describe 'quotes >', ->
it 'single quote #1', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.single.should.equal 1
it 'single quote #2', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #3', ->
convention = parser.quotes """ var foo = '<div id=\'bar\'>baz</div>';"""
convention.quotes.single.should.equal 1
it 'single quote #4', ->
convention = parser.quotes """ 'key': 'value' """
convention.quotes.single.should.equal 1
it 'single quote #5', ->
convention = parser.quotes """ 'key': true """
convention.quotes.single.should.equal 1
it 'single quote #6', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.single.should.equal 0
it 'single quote #7', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.single.should.equal 0
it 'single quote #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.single.should.equal 0
it 'double quotes #1', ->
convention = parser.quotes """ var foo = "bar";"""
convention.quotes.double.should.equal 1
it 'double quotes #2', ->
convention = parser.quotes """ var foo = "<div id='bar'>baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #3', ->
convention = parser.quotes """ var foo = "<div id=\"bar\">baz</div>";"""
convention.quotes.double.should.equal 1
it 'double quotes #4', ->
convention = parser.quotes """ "key": "value" """
convention.quotes.double.should.equal 1
it 'double quotes #5', ->
convention = parser.quotes """ "key": true """
convention.quotes.double.should.equal 1
it 'double quotes #6', ->
convention = parser.quotes """ var foo = 'bar';"""
convention.quotes.double.should.equal 0
it 'double quotes #7', ->
convention = parser.quotes """ var foo = '<div id="bar">baz</div>';"""
convention.quotes.double.should.equal 0
it 'double quotes #8', ->
convention = parser.quotes """ 'key': "value" """
convention.quotes.double.should.equal 0 |
[
{
"context": "b}.{b}.{c}.{c}.{d}\"\nglobalContext =\n my_name: \"Wojciech Fraczak\"\n n: 172\n\nfn = buildTranslatorFn dict, globalC",
"end": 608,
"score": 0.9998134970664978,
"start": 592,
"tag": "NAME",
"value": "Wojciech Fraczak"
}
] | test.coffee | fraczak/re-write-js | 1 | {buildTranslatorFn} = require "./index.coffee"
build_dict = require "./build-dict.coffee"
dict =
"{}": (x) -> x
"1" : -> "one"
"I am {}": (x) ->
"I am #{x.toLocaleString()}"
"Success" : -> "Success"
"Today is {}" : ->
"Today is #{new Date().toLocaleString()}"
"{} apple(s)": (n) ->
return "#{n} apples" if n > 1
return "one apple" if n is 1
return "one apple" if n is "1"
return "no apple"
"{a} plus {b} equals {c}" : "{c} = {a} + {b}"
"{a}{b}{c}{d}":"{a}.{a}.{b}.{b}.{c}.{c}.{d}"
globalContext =
my_name: "Wojciech Fraczak"
n: 172
fn = buildTranslatorFn dict, globalContext
[
'fn("{=as is}")'
'fn("I am {my_name}")'
'fn("I am {name}",{name:"myself"})'
'fn("Today is {}")'
'fn("{x} apple(s)",{x:1})'
'fn("{n} apple(s)")'
'fn("{=1} apple(s)")'
'fn("{=2} plus {=3} equals {=5}")'
'fn("{=2}{=3}{n}{=5}")'
'fn("{=2}{=3}{nn}{=5}")'
'fn("{=2}{=3}{nn}{=5}",{nn:0})'
]. map (x) ->
console.log " > #{x} => #{eval x}"
| 129404 | {buildTranslatorFn} = require "./index.coffee"
build_dict = require "./build-dict.coffee"
dict =
"{}": (x) -> x
"1" : -> "one"
"I am {}": (x) ->
"I am #{x.toLocaleString()}"
"Success" : -> "Success"
"Today is {}" : ->
"Today is #{new Date().toLocaleString()}"
"{} apple(s)": (n) ->
return "#{n} apples" if n > 1
return "one apple" if n is 1
return "one apple" if n is "1"
return "no apple"
"{a} plus {b} equals {c}" : "{c} = {a} + {b}"
"{a}{b}{c}{d}":"{a}.{a}.{b}.{b}.{c}.{c}.{d}"
globalContext =
my_name: "<NAME>"
n: 172
fn = buildTranslatorFn dict, globalContext
[
'fn("{=as is}")'
'fn("I am {my_name}")'
'fn("I am {name}",{name:"myself"})'
'fn("Today is {}")'
'fn("{x} apple(s)",{x:1})'
'fn("{n} apple(s)")'
'fn("{=1} apple(s)")'
'fn("{=2} plus {=3} equals {=5}")'
'fn("{=2}{=3}{n}{=5}")'
'fn("{=2}{=3}{nn}{=5}")'
'fn("{=2}{=3}{nn}{=5}",{nn:0})'
]. map (x) ->
console.log " > #{x} => #{eval x}"
| true | {buildTranslatorFn} = require "./index.coffee"
build_dict = require "./build-dict.coffee"
dict =
"{}": (x) -> x
"1" : -> "one"
"I am {}": (x) ->
"I am #{x.toLocaleString()}"
"Success" : -> "Success"
"Today is {}" : ->
"Today is #{new Date().toLocaleString()}"
"{} apple(s)": (n) ->
return "#{n} apples" if n > 1
return "one apple" if n is 1
return "one apple" if n is "1"
return "no apple"
"{a} plus {b} equals {c}" : "{c} = {a} + {b}"
"{a}{b}{c}{d}":"{a}.{a}.{b}.{b}.{c}.{c}.{d}"
globalContext =
my_name: "PI:NAME:<NAME>END_PI"
n: 172
fn = buildTranslatorFn dict, globalContext
[
'fn("{=as is}")'
'fn("I am {my_name}")'
'fn("I am {name}",{name:"myself"})'
'fn("Today is {}")'
'fn("{x} apple(s)",{x:1})'
'fn("{n} apple(s)")'
'fn("{=1} apple(s)")'
'fn("{=2} plus {=3} equals {=5}")'
'fn("{=2}{=3}{n}{=5}")'
'fn("{=2}{=3}{nn}{=5}")'
'fn("{=2}{=3}{nn}{=5}",{nn:0})'
]. map (x) ->
console.log " > #{x} => #{eval x}"
|
[
{
"context": "###\nCopyright 2016 Balena\n\nLicensed under the Apache License, Version 2.0 (",
"end": 25,
"score": 0.9804084300994873,
"start": 19,
"tag": "NAME",
"value": "Balena"
}
] | lib/utils.coffee | resin-io-modules/resin-device-init | 3 | ###
Copyright 2016 Balena
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
Promise = require('bluebird')
_ = require('lodash')
rindle = Promise.promisifyAll(require('rindle'))
path = require('path')
stringToStream = require('string-to-stream')
imagefs = require('balena-image-fs')
###*
# @summary Get device type manifest of an image
# @function
#
# @param {String} image - path to image
# @returns {Promise<Object | null>} device type manifest or null
#
# @example
# utils.getImageManifest('path/to/image.img', 'raspberry-pi').then (manifest) ->
# console.log(manifest)
###
exports.getImageManifest = (image) ->
# Attempt to read manifest from the first
# partition, but fallback to the API if
# we encounter any errors along the way.
Promise.resolve imagefs.interact(
image
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/device-type.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.catchReturn(null)
###*
# @summary Convert a device type file definition to resin-image-fs v4 format
# @function
# @protected
#
# @param {Object} definition - write definition
#
# @returns {Object} a converted write definition
#
# @example
# utils.convertFileDefinition
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.convertFilePathDefinition = convertFilePathDefinition = (inputDefinition) ->
definition = _.cloneDeep(inputDefinition)
if _.isObject(definition.partition)
# Partition numbering is now numerical, following the linux
# conventions in 5.95 of the TLDP's system admin guide:
# http://www.tldp.org/LDP/sag/html/partitions.html#DEV-FILES-PARTS
if definition.partition.logical?
definition.partition = definition.partition.logical + 4
else
definition.partition = definition.partition.primary
return definition
###*
# @summary Add image info to a device type config definition
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} definition - write definition
#
# @returns {Object} a write definition
#
# @example
# utils.definitionForImage 'my/rpi.img',
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.definitionForImage = definitionForImage = (image, configDefinition) ->
configDefinition = _.cloneDeep(configDefinition)
if configDefinition.image?
# Sometimes (e.g. edison) our 'image' is a folder of images, and the
# config specifies which one within that we should be using
configDefinition.image = path.join(image, configDefinition.image)
else
configDefinition.image = image
return configDefinition
###*
# @summary Get image OS version
# @function
#
# @param {String} image - path to image
# @param {Object} manifest - device type manifest
# @returns {Promise<string|null>} ResinOS version, or null if it could not be determined
#
# @example
# utils.getImageOsVersion('path/to/image.img', manifest).then (version) ->
# console.log(version)
###
exports.getImageOsVersion = (image, manifest) ->
# Try to determine the location where os-release is stored. This is always
# stored alongside "config.json" so look into the manifest if given, and
# fallback to a sensible default if not. This should be able to handle a
# wide range of regular images with several partitions as well as cases like
# with Edison where "image" points to a folder structure.
definition = manifest?.configuration.config ? { partition: 1 }
definition = definitionForImage(image, definition)
definition = convertFilePathDefinition(definition)
definition.path = '/os-release'
return Promise.resolve(
imagefs.interact(
definition.image
definition.partition
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync(definition.path, { encoding: 'utf8' })
)
)
.then (osReleaseString) ->
parsedOsRelease = _(osReleaseString)
.split('\n')
.map (line) ->
match = line.match(/(.*)=(.*)/)
if match
return [
match[1],
match[2].replace(/^"(.*)"$/, '$1').replace(/^'(.*)'$/, '$1')
]
else
return false
.filter()
.fromPairs()
.value()
if parsedOsRelease.NAME != 'Resin OS' and parsedOsRelease.NAME != 'balenaOS'
return null
else
return parsedOsRelease.VERSION || null
.catchReturn(null)
###*
# @summary Write config.json to image
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} config - config.json object
# @param {Object} definition - write definition
#
# @returns {Promise}
#
# @example
# utils.writeConfigJSON 'my/rpi.img',
# hello: 'world'
# ,
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.writeConfigJSON = (image, config, definition) ->
config = JSON.stringify(config)
definition = exports.definitionForImage(image, definition)
return imagefs.interact(
definition.image
definition.partition
(_fs) ->
writeFileAsync = Promise.promisify(_fs.writeFile)
return writeFileAsync(definition.path, config)
)
| 212003 | ###
Copyright 2016 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
Promise = require('bluebird')
_ = require('lodash')
rindle = Promise.promisifyAll(require('rindle'))
path = require('path')
stringToStream = require('string-to-stream')
imagefs = require('balena-image-fs')
###*
# @summary Get device type manifest of an image
# @function
#
# @param {String} image - path to image
# @returns {Promise<Object | null>} device type manifest or null
#
# @example
# utils.getImageManifest('path/to/image.img', 'raspberry-pi').then (manifest) ->
# console.log(manifest)
###
exports.getImageManifest = (image) ->
# Attempt to read manifest from the first
# partition, but fallback to the API if
# we encounter any errors along the way.
Promise.resolve imagefs.interact(
image
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/device-type.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.catchReturn(null)
###*
# @summary Convert a device type file definition to resin-image-fs v4 format
# @function
# @protected
#
# @param {Object} definition - write definition
#
# @returns {Object} a converted write definition
#
# @example
# utils.convertFileDefinition
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.convertFilePathDefinition = convertFilePathDefinition = (inputDefinition) ->
definition = _.cloneDeep(inputDefinition)
if _.isObject(definition.partition)
# Partition numbering is now numerical, following the linux
# conventions in 5.95 of the TLDP's system admin guide:
# http://www.tldp.org/LDP/sag/html/partitions.html#DEV-FILES-PARTS
if definition.partition.logical?
definition.partition = definition.partition.logical + 4
else
definition.partition = definition.partition.primary
return definition
###*
# @summary Add image info to a device type config definition
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} definition - write definition
#
# @returns {Object} a write definition
#
# @example
# utils.definitionForImage 'my/rpi.img',
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.definitionForImage = definitionForImage = (image, configDefinition) ->
configDefinition = _.cloneDeep(configDefinition)
if configDefinition.image?
# Sometimes (e.g. edison) our 'image' is a folder of images, and the
# config specifies which one within that we should be using
configDefinition.image = path.join(image, configDefinition.image)
else
configDefinition.image = image
return configDefinition
###*
# @summary Get image OS version
# @function
#
# @param {String} image - path to image
# @param {Object} manifest - device type manifest
# @returns {Promise<string|null>} ResinOS version, or null if it could not be determined
#
# @example
# utils.getImageOsVersion('path/to/image.img', manifest).then (version) ->
# console.log(version)
###
exports.getImageOsVersion = (image, manifest) ->
# Try to determine the location where os-release is stored. This is always
# stored alongside "config.json" so look into the manifest if given, and
# fallback to a sensible default if not. This should be able to handle a
# wide range of regular images with several partitions as well as cases like
# with Edison where "image" points to a folder structure.
definition = manifest?.configuration.config ? { partition: 1 }
definition = definitionForImage(image, definition)
definition = convertFilePathDefinition(definition)
definition.path = '/os-release'
return Promise.resolve(
imagefs.interact(
definition.image
definition.partition
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync(definition.path, { encoding: 'utf8' })
)
)
.then (osReleaseString) ->
parsedOsRelease = _(osReleaseString)
.split('\n')
.map (line) ->
match = line.match(/(.*)=(.*)/)
if match
return [
match[1],
match[2].replace(/^"(.*)"$/, '$1').replace(/^'(.*)'$/, '$1')
]
else
return false
.filter()
.fromPairs()
.value()
if parsedOsRelease.NAME != 'Resin OS' and parsedOsRelease.NAME != 'balenaOS'
return null
else
return parsedOsRelease.VERSION || null
.catchReturn(null)
###*
# @summary Write config.json to image
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} config - config.json object
# @param {Object} definition - write definition
#
# @returns {Promise}
#
# @example
# utils.writeConfigJSON 'my/rpi.img',
# hello: 'world'
# ,
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.writeConfigJSON = (image, config, definition) ->
config = JSON.stringify(config)
definition = exports.definitionForImage(image, definition)
return imagefs.interact(
definition.image
definition.partition
(_fs) ->
writeFileAsync = Promise.promisify(_fs.writeFile)
return writeFileAsync(definition.path, config)
)
| true | ###
Copyright 2016 PI:NAME:<NAME>END_PI
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
Promise = require('bluebird')
_ = require('lodash')
rindle = Promise.promisifyAll(require('rindle'))
path = require('path')
stringToStream = require('string-to-stream')
imagefs = require('balena-image-fs')
###*
# @summary Get device type manifest of an image
# @function
#
# @param {String} image - path to image
# @returns {Promise<Object | null>} device type manifest or null
#
# @example
# utils.getImageManifest('path/to/image.img', 'raspberry-pi').then (manifest) ->
# console.log(manifest)
###
exports.getImageManifest = (image) ->
# Attempt to read manifest from the first
# partition, but fallback to the API if
# we encounter any errors along the way.
Promise.resolve imagefs.interact(
image
1
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync('/device-type.json', { encoding: 'utf8' })
)
.then(JSON.parse)
.catchReturn(null)
###*
# @summary Convert a device type file definition to resin-image-fs v4 format
# @function
# @protected
#
# @param {Object} definition - write definition
#
# @returns {Object} a converted write definition
#
# @example
# utils.convertFileDefinition
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.convertFilePathDefinition = convertFilePathDefinition = (inputDefinition) ->
definition = _.cloneDeep(inputDefinition)
if _.isObject(definition.partition)
# Partition numbering is now numerical, following the linux
# conventions in 5.95 of the TLDP's system admin guide:
# http://www.tldp.org/LDP/sag/html/partitions.html#DEV-FILES-PARTS
if definition.partition.logical?
definition.partition = definition.partition.logical + 4
else
definition.partition = definition.partition.primary
return definition
###*
# @summary Add image info to a device type config definition
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} definition - write definition
#
# @returns {Object} a write definition
#
# @example
# utils.definitionForImage 'my/rpi.img',
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.definitionForImage = definitionForImage = (image, configDefinition) ->
configDefinition = _.cloneDeep(configDefinition)
if configDefinition.image?
# Sometimes (e.g. edison) our 'image' is a folder of images, and the
# config specifies which one within that we should be using
configDefinition.image = path.join(image, configDefinition.image)
else
configDefinition.image = image
return configDefinition
###*
# @summary Get image OS version
# @function
#
# @param {String} image - path to image
# @param {Object} manifest - device type manifest
# @returns {Promise<string|null>} ResinOS version, or null if it could not be determined
#
# @example
# utils.getImageOsVersion('path/to/image.img', manifest).then (version) ->
# console.log(version)
###
exports.getImageOsVersion = (image, manifest) ->
# Try to determine the location where os-release is stored. This is always
# stored alongside "config.json" so look into the manifest if given, and
# fallback to a sensible default if not. This should be able to handle a
# wide range of regular images with several partitions as well as cases like
# with Edison where "image" points to a folder structure.
definition = manifest?.configuration.config ? { partition: 1 }
definition = definitionForImage(image, definition)
definition = convertFilePathDefinition(definition)
definition.path = '/os-release'
return Promise.resolve(
imagefs.interact(
definition.image
definition.partition
(_fs) ->
readFileAsync = Promise.promisify(_fs.readFile)
return readFileAsync(definition.path, { encoding: 'utf8' })
)
)
.then (osReleaseString) ->
parsedOsRelease = _(osReleaseString)
.split('\n')
.map (line) ->
match = line.match(/(.*)=(.*)/)
if match
return [
match[1],
match[2].replace(/^"(.*)"$/, '$1').replace(/^'(.*)'$/, '$1')
]
else
return false
.filter()
.fromPairs()
.value()
if parsedOsRelease.NAME != 'Resin OS' and parsedOsRelease.NAME != 'balenaOS'
return null
else
return parsedOsRelease.VERSION || null
.catchReturn(null)
###*
# @summary Write config.json to image
# @function
# @protected
#
# @param {String} image - image path
# @param {Object} config - config.json object
# @param {Object} definition - write definition
#
# @returns {Promise}
#
# @example
# utils.writeConfigJSON 'my/rpi.img',
# hello: 'world'
# ,
# partition:
# primary: 4
# logical: 1
# path: '/config.json'
###
exports.writeConfigJSON = (image, config, definition) ->
config = JSON.stringify(config)
definition = exports.definitionForImage(image, definition)
return imagefs.interact(
definition.image
definition.partition
(_fs) ->
writeFileAsync = Promise.promisify(_fs.writeFile)
return writeFileAsync(definition.path, config)
)
|
[
{
"context": "h) ->\n $scope.user =\n name: null\n password: null\n\n $scope.signup = (user) ->\n Auth.signup(user",
"end": 140,
"score": 0.9963943958282471,
"start": 136,
"tag": "PASSWORD",
"value": "null"
}
] | app/pages/signup/controller.coffee | asartalo/axya | 0 | "use strict"
angular.module 'axyaApp'
.controller 'SignupCtrl', ($scope, $state, Auth) ->
$scope.user =
name: null
password: null
$scope.signup = (user) ->
Auth.signup(user).then(
(resp) ->
$state.go('dashboard')
(err) ->
console.log err, err.data
)
| 97456 | "use strict"
angular.module 'axyaApp'
.controller 'SignupCtrl', ($scope, $state, Auth) ->
$scope.user =
name: null
password: <PASSWORD>
$scope.signup = (user) ->
Auth.signup(user).then(
(resp) ->
$state.go('dashboard')
(err) ->
console.log err, err.data
)
| true | "use strict"
angular.module 'axyaApp'
.controller 'SignupCtrl', ($scope, $state, Auth) ->
$scope.user =
name: null
password: PI:PASSWORD:<PASSWORD>END_PI
$scope.signup = (user) ->
Auth.signup(user).then(
(resp) ->
$state.go('dashboard')
(err) ->
console.log err, err.data
)
|
[
{
"context": "###\n backbone-orm.js 0.5.12\n Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm\n Lice",
"end": 58,
"score": 0.9990522861480713,
"start": 50,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": " Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm\n License: MIT (http://www.opensourc",
"end": 88,
"score": 0.9997290968894958,
"start": 80,
"tag": "USERNAME",
"value": "vidigami"
}
] | src/cache/memory_store.coffee | michaelBenin/backbone-orm | 1 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
LRU = require 'lru-cache'
inflection = require 'inflection'
module.exports = class MemoryStore
constructor: (options={}) ->
normalized_options = {}
for key, value of options
if key is 'destroy'
normalized_options.dispose = value
else
normalized_options[@_normalizeKey(key)] = value
@cache = new LRU(normalized_options)
set: (key, value, callback) =>
return callback?(null, value) or @ if value._orm_never_cache # skip cache
@cache.set(key, value)
callback?(null, value)
return @
get: (key, callback) =>
value = @cache.get(key)
callback?(null, value)
return value
destroy: (key, callback) =>
@cache.del(key)
callback?()
return @
reset: (callback) =>
@cache.reset()
callback?()
return @
# @private
_normalizeKey: (key) ->
key = inflection.underscore(key)
return key.toLowerCase() if key.indexOf('_') < 0
return inflection.camelize(key)
forEach: (callback) => @cache.forEach(callback)
| 81968 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 <NAME> - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
LRU = require 'lru-cache'
inflection = require 'inflection'
module.exports = class MemoryStore
constructor: (options={}) ->
normalized_options = {}
for key, value of options
if key is 'destroy'
normalized_options.dispose = value
else
normalized_options[@_normalizeKey(key)] = value
@cache = new LRU(normalized_options)
set: (key, value, callback) =>
return callback?(null, value) or @ if value._orm_never_cache # skip cache
@cache.set(key, value)
callback?(null, value)
return @
get: (key, callback) =>
value = @cache.get(key)
callback?(null, value)
return value
destroy: (key, callback) =>
@cache.del(key)
callback?()
return @
reset: (callback) =>
@cache.reset()
callback?()
return @
# @private
_normalizeKey: (key) ->
key = inflection.underscore(key)
return key.toLowerCase() if key.indexOf('_') < 0
return inflection.camelize(key)
forEach: (callback) => @cache.forEach(callback)
| true | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 PI:NAME:<NAME>END_PI - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
LRU = require 'lru-cache'
inflection = require 'inflection'
module.exports = class MemoryStore
constructor: (options={}) ->
normalized_options = {}
for key, value of options
if key is 'destroy'
normalized_options.dispose = value
else
normalized_options[@_normalizeKey(key)] = value
@cache = new LRU(normalized_options)
set: (key, value, callback) =>
return callback?(null, value) or @ if value._orm_never_cache # skip cache
@cache.set(key, value)
callback?(null, value)
return @
get: (key, callback) =>
value = @cache.get(key)
callback?(null, value)
return value
destroy: (key, callback) =>
@cache.del(key)
callback?()
return @
reset: (callback) =>
@cache.reset()
callback?()
return @
# @private
_normalizeKey: (key) ->
key = inflection.underscore(key)
return key.toLowerCase() if key.indexOf('_') < 0
return inflection.camelize(key)
forEach: (callback) => @cache.forEach(callback)
|
[
{
"context": "vc: cvcDots\n expiry: expiryDots\n name: 'Full Name'\n\n componentWillMount: ->\n # add special styl",
"end": 593,
"score": 0.9973412752151489,
"start": 584,
"tag": "NAME",
"value": "Full Name"
}
] | src/components/card-react-component.cjsx | redsift/card-react | 145 | React = require 'react'
createReactClass = require 'create-react-class'
Payment = require 'payment'
ClassNames = require 'classnames'
dot = String.fromCharCode('8226')
ccDots = dot + dot + dot + dot
cvcDots = dot + dot + dot
expiryDots = dot + dot + '/' + dot + dot
CardReact = createReactClass
displayName: "CardReact"
getDefaultProps: ->
messages:
validDate: 'valid\nthru'
monthYear: 'month/year'
baseWidth: 350
defaultValues:
number: ccDots + " " + ccDots + " " + ccDots + " " + ccDots
cvc: cvcDots
expiry: expiryDots
name: 'Full Name'
componentWillMount: ->
# add special styling according to specific browser
@cardBrowserClass = @_addBrowserClass()
_addBrowserClass: ->
# safari can't handle transparent radial gradient right now
if navigator?.userAgent
ua = navigator.userAgent.toLowerCase()
if ua.indexOf('safari') != -1 and ua.indexOf('chrome') == -1
return 'jp-card-safari'
if (/MSIE 10\./i.test(navigator.userAgent))
return 'jp-card-ie-10'
# ie 11 does not support conditional compilation, use user agent instead
if (/rv:11.0/i.test(navigator.userAgent))
return 'jp-card-ie-11'
else return ''
_getCardType: (cardNumber)->
@cardType = Payment.fns.cardType cardNumber
if @cardType and @cardType isnt 'unknown'
return "jp-card-#{@cardType} jp-card-identified"
_isFocusedInput: (inputName)->
currentInputValue = @props.formInputsNames[inputName]
# focused styling is applied to the currently focused element
# and all other elements with user input values
return ((@props.focusedInput is currentInputValue) or @props.inputsValues[currentInputValue])
_getInputValidationState: (inputName)->
validationState = @props.inputsValidationClass[@props.formInputsNames[inputName]]
if typeof validationState is 'undefined'
''
else
validationState
render: ->
containerStyle = {}
# scale the card according to the width prop
if @props.width
scaleWidth = "scale(#{@props.width / @props.baseWidth})"
containerStyle =
WebkitTransform: scaleWidth
MozTransform: scaleWidth
msTransform: scaleWidth
OTransform: scaleWidth
transform: scaleWidth
# format the expiry value
expiryValue = @props.inputsValues[@props.formInputsNames["expiry"]]
if expiryValue
expiryValue = expiryValue.replace(/\s+/g, '')
else
expiryValue = @props.defaultValues.expiry
return (
<div className="jp-card-container" style={containerStyle}>
<div className={ClassNames("jp-card", @cardBrowserClass, @_getCardType(@props.inputsValues[@props.formInputsNames["number"]]), 'jp-card-flipped': @props.cardFlipped)}>
<div className="jp-card-front">
<div className="jp-card-logo jp-card-visa">visa</div>
<div className="jp-card-logo jp-card-mastercard">MasterCard</div>
<div className="jp-card-logo jp-card-maestro">Maestro</div>
<div className="jp-card-logo jp-card-amex"></div>
<div className="jp-card-logo jp-card-discover">discover</div>
<div className="jp-card-logo jp-card-dankort"><div className="dk"><div className="d"></div><div className="k"></div></div></div>
<div className="jp-card-lower">
<div className="jp-card-shiny"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className={ClassNames("jp-card-number", "jp-card-display", "jp-card-focused": @_isFocusedInput("number"), @_getInputValidationState("number"))}>
{@props.inputsValues[@props.formInputsNames["number"]] or @props.defaultValues.number}
</div>
<div className={ClassNames("jp-card-name", "jp-card-display", "jp-card-focused": @_isFocusedInput("name"), @_getInputValidationState("name"))}>
{@props.inputsValues[@props.formInputsNames["name"]] or @props.defaultValues.name}
</div>
<div className={ClassNames("jp-card-expiry", "jp-card-display", "jp-card-focused": @_isFocusedInput("expiry"), @_getInputValidationState("expiry"))}
data-before={@props.messages.monthYear}
data-after={@props.messages.validDate}>
{expiryValue}
</div>
</div>
</div>
<div className="jp-card-back">
<div className="jp-card-bar"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className="jp-card-shiny"></div>
</div>
</div>
</div>
)
module.exports = CardReact
| 146696 | React = require 'react'
createReactClass = require 'create-react-class'
Payment = require 'payment'
ClassNames = require 'classnames'
dot = String.fromCharCode('8226')
ccDots = dot + dot + dot + dot
cvcDots = dot + dot + dot
expiryDots = dot + dot + '/' + dot + dot
CardReact = createReactClass
displayName: "CardReact"
getDefaultProps: ->
messages:
validDate: 'valid\nthru'
monthYear: 'month/year'
baseWidth: 350
defaultValues:
number: ccDots + " " + ccDots + " " + ccDots + " " + ccDots
cvc: cvcDots
expiry: expiryDots
name: '<NAME>'
componentWillMount: ->
# add special styling according to specific browser
@cardBrowserClass = @_addBrowserClass()
_addBrowserClass: ->
# safari can't handle transparent radial gradient right now
if navigator?.userAgent
ua = navigator.userAgent.toLowerCase()
if ua.indexOf('safari') != -1 and ua.indexOf('chrome') == -1
return 'jp-card-safari'
if (/MSIE 10\./i.test(navigator.userAgent))
return 'jp-card-ie-10'
# ie 11 does not support conditional compilation, use user agent instead
if (/rv:11.0/i.test(navigator.userAgent))
return 'jp-card-ie-11'
else return ''
_getCardType: (cardNumber)->
@cardType = Payment.fns.cardType cardNumber
if @cardType and @cardType isnt 'unknown'
return "jp-card-#{@cardType} jp-card-identified"
_isFocusedInput: (inputName)->
currentInputValue = @props.formInputsNames[inputName]
# focused styling is applied to the currently focused element
# and all other elements with user input values
return ((@props.focusedInput is currentInputValue) or @props.inputsValues[currentInputValue])
_getInputValidationState: (inputName)->
validationState = @props.inputsValidationClass[@props.formInputsNames[inputName]]
if typeof validationState is 'undefined'
''
else
validationState
render: ->
containerStyle = {}
# scale the card according to the width prop
if @props.width
scaleWidth = "scale(#{@props.width / @props.baseWidth})"
containerStyle =
WebkitTransform: scaleWidth
MozTransform: scaleWidth
msTransform: scaleWidth
OTransform: scaleWidth
transform: scaleWidth
# format the expiry value
expiryValue = @props.inputsValues[@props.formInputsNames["expiry"]]
if expiryValue
expiryValue = expiryValue.replace(/\s+/g, '')
else
expiryValue = @props.defaultValues.expiry
return (
<div className="jp-card-container" style={containerStyle}>
<div className={ClassNames("jp-card", @cardBrowserClass, @_getCardType(@props.inputsValues[@props.formInputsNames["number"]]), 'jp-card-flipped': @props.cardFlipped)}>
<div className="jp-card-front">
<div className="jp-card-logo jp-card-visa">visa</div>
<div className="jp-card-logo jp-card-mastercard">MasterCard</div>
<div className="jp-card-logo jp-card-maestro">Maestro</div>
<div className="jp-card-logo jp-card-amex"></div>
<div className="jp-card-logo jp-card-discover">discover</div>
<div className="jp-card-logo jp-card-dankort"><div className="dk"><div className="d"></div><div className="k"></div></div></div>
<div className="jp-card-lower">
<div className="jp-card-shiny"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className={ClassNames("jp-card-number", "jp-card-display", "jp-card-focused": @_isFocusedInput("number"), @_getInputValidationState("number"))}>
{@props.inputsValues[@props.formInputsNames["number"]] or @props.defaultValues.number}
</div>
<div className={ClassNames("jp-card-name", "jp-card-display", "jp-card-focused": @_isFocusedInput("name"), @_getInputValidationState("name"))}>
{@props.inputsValues[@props.formInputsNames["name"]] or @props.defaultValues.name}
</div>
<div className={ClassNames("jp-card-expiry", "jp-card-display", "jp-card-focused": @_isFocusedInput("expiry"), @_getInputValidationState("expiry"))}
data-before={@props.messages.monthYear}
data-after={@props.messages.validDate}>
{expiryValue}
</div>
</div>
</div>
<div className="jp-card-back">
<div className="jp-card-bar"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className="jp-card-shiny"></div>
</div>
</div>
</div>
)
module.exports = CardReact
| true | React = require 'react'
createReactClass = require 'create-react-class'
Payment = require 'payment'
ClassNames = require 'classnames'
dot = String.fromCharCode('8226')
ccDots = dot + dot + dot + dot
cvcDots = dot + dot + dot
expiryDots = dot + dot + '/' + dot + dot
CardReact = createReactClass
displayName: "CardReact"
getDefaultProps: ->
messages:
validDate: 'valid\nthru'
monthYear: 'month/year'
baseWidth: 350
defaultValues:
number: ccDots + " " + ccDots + " " + ccDots + " " + ccDots
cvc: cvcDots
expiry: expiryDots
name: 'PI:NAME:<NAME>END_PI'
componentWillMount: ->
# add special styling according to specific browser
@cardBrowserClass = @_addBrowserClass()
_addBrowserClass: ->
# safari can't handle transparent radial gradient right now
if navigator?.userAgent
ua = navigator.userAgent.toLowerCase()
if ua.indexOf('safari') != -1 and ua.indexOf('chrome') == -1
return 'jp-card-safari'
if (/MSIE 10\./i.test(navigator.userAgent))
return 'jp-card-ie-10'
# ie 11 does not support conditional compilation, use user agent instead
if (/rv:11.0/i.test(navigator.userAgent))
return 'jp-card-ie-11'
else return ''
_getCardType: (cardNumber)->
@cardType = Payment.fns.cardType cardNumber
if @cardType and @cardType isnt 'unknown'
return "jp-card-#{@cardType} jp-card-identified"
_isFocusedInput: (inputName)->
currentInputValue = @props.formInputsNames[inputName]
# focused styling is applied to the currently focused element
# and all other elements with user input values
return ((@props.focusedInput is currentInputValue) or @props.inputsValues[currentInputValue])
_getInputValidationState: (inputName)->
validationState = @props.inputsValidationClass[@props.formInputsNames[inputName]]
if typeof validationState is 'undefined'
''
else
validationState
render: ->
containerStyle = {}
# scale the card according to the width prop
if @props.width
scaleWidth = "scale(#{@props.width / @props.baseWidth})"
containerStyle =
WebkitTransform: scaleWidth
MozTransform: scaleWidth
msTransform: scaleWidth
OTransform: scaleWidth
transform: scaleWidth
# format the expiry value
expiryValue = @props.inputsValues[@props.formInputsNames["expiry"]]
if expiryValue
expiryValue = expiryValue.replace(/\s+/g, '')
else
expiryValue = @props.defaultValues.expiry
return (
<div className="jp-card-container" style={containerStyle}>
<div className={ClassNames("jp-card", @cardBrowserClass, @_getCardType(@props.inputsValues[@props.formInputsNames["number"]]), 'jp-card-flipped': @props.cardFlipped)}>
<div className="jp-card-front">
<div className="jp-card-logo jp-card-visa">visa</div>
<div className="jp-card-logo jp-card-mastercard">MasterCard</div>
<div className="jp-card-logo jp-card-maestro">Maestro</div>
<div className="jp-card-logo jp-card-amex"></div>
<div className="jp-card-logo jp-card-discover">discover</div>
<div className="jp-card-logo jp-card-dankort"><div className="dk"><div className="d"></div><div className="k"></div></div></div>
<div className="jp-card-lower">
<div className="jp-card-shiny"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className={ClassNames("jp-card-number", "jp-card-display", "jp-card-focused": @_isFocusedInput("number"), @_getInputValidationState("number"))}>
{@props.inputsValues[@props.formInputsNames["number"]] or @props.defaultValues.number}
</div>
<div className={ClassNames("jp-card-name", "jp-card-display", "jp-card-focused": @_isFocusedInput("name"), @_getInputValidationState("name"))}>
{@props.inputsValues[@props.formInputsNames["name"]] or @props.defaultValues.name}
</div>
<div className={ClassNames("jp-card-expiry", "jp-card-display", "jp-card-focused": @_isFocusedInput("expiry"), @_getInputValidationState("expiry"))}
data-before={@props.messages.monthYear}
data-after={@props.messages.validDate}>
{expiryValue}
</div>
</div>
</div>
<div className="jp-card-back">
<div className="jp-card-bar"></div>
<div className={ClassNames("jp-card-cvc", "jp-card-display", "jp-card-focused": @_isFocusedInput("cvc"), @_getInputValidationState("cvc"))}>
{@props.inputsValues[@props.formInputsNames["cvc"]] or @props.defaultValues.cvc}
</div>
<div className="jp-card-shiny"></div>
</div>
</div>
</div>
)
module.exports = CardReact
|
[
{
"context": "# Copyright (c) 2008-2013 Michael Dvorkin and contributors.\n#\n# Fat Free CRM is freely dist",
"end": 41,
"score": 0.9998599290847778,
"start": 26,
"tag": "NAME",
"value": "Michael Dvorkin"
}
] | app/assets/javascripts/admin/fields.js.coffee | cassioscabral/fat_free_crm | 1 | # Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
$('.fields select[name="field[as]"]').live 'change', ->
$.ajax(
url: '/admin/fields/subform?' + $(this).parents('form').serialize()
dataType: 'html'
context: $(this).closest('form').find('.subform')
success: (data) ->
$(this).html(data)
$(this).find('input').first().focus()
)
$('.fields a.create').live 'click', ->
$('.edit_field').hide()
field_group = $(this).closest('.field_group')
field_group.find('.empty').hide()
field_group.find('.arrow').html(crm.EXPANDED)
field_group.find('.create_field').slideDown().find('input[name="field[label]"]').focus()
false
$('.create_field a.close, .create_field a.cancel').live 'click', ->
$(this).closest('.create_field').hide()
$(this).closest('.field_group').find('.empty').show()
$(this).closest('.field_group').find('.arrow').html(crm.COLLAPSED)
false
$('.fields a.edit').live 'click', ->
$('.edit_field').hide()
$.ajax(
url: $(this).attr('href')
context: $(this).closest('li').find('div.edit_field')
success: (data) ->
$(this).replaceWith(data).first().focus()
)
false
$('.edit_field a.close, .edit_field a.cancel').live 'click', ->
$(this).closest('.edit_field').hide()
false
false
) jQuery
| 62139 | # Copyright (c) 2008-2013 <NAME> and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
$('.fields select[name="field[as]"]').live 'change', ->
$.ajax(
url: '/admin/fields/subform?' + $(this).parents('form').serialize()
dataType: 'html'
context: $(this).closest('form').find('.subform')
success: (data) ->
$(this).html(data)
$(this).find('input').first().focus()
)
$('.fields a.create').live 'click', ->
$('.edit_field').hide()
field_group = $(this).closest('.field_group')
field_group.find('.empty').hide()
field_group.find('.arrow').html(crm.EXPANDED)
field_group.find('.create_field').slideDown().find('input[name="field[label]"]').focus()
false
$('.create_field a.close, .create_field a.cancel').live 'click', ->
$(this).closest('.create_field').hide()
$(this).closest('.field_group').find('.empty').show()
$(this).closest('.field_group').find('.arrow').html(crm.COLLAPSED)
false
$('.fields a.edit').live 'click', ->
$('.edit_field').hide()
$.ajax(
url: $(this).attr('href')
context: $(this).closest('li').find('div.edit_field')
success: (data) ->
$(this).replaceWith(data).first().focus()
)
false
$('.edit_field a.close, .edit_field a.cancel').live 'click', ->
$(this).closest('.edit_field').hide()
false
false
) jQuery
| true | # Copyright (c) 2008-2013 PI:NAME:<NAME>END_PI and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
$('.fields select[name="field[as]"]').live 'change', ->
$.ajax(
url: '/admin/fields/subform?' + $(this).parents('form').serialize()
dataType: 'html'
context: $(this).closest('form').find('.subform')
success: (data) ->
$(this).html(data)
$(this).find('input').first().focus()
)
$('.fields a.create').live 'click', ->
$('.edit_field').hide()
field_group = $(this).closest('.field_group')
field_group.find('.empty').hide()
field_group.find('.arrow').html(crm.EXPANDED)
field_group.find('.create_field').slideDown().find('input[name="field[label]"]').focus()
false
$('.create_field a.close, .create_field a.cancel').live 'click', ->
$(this).closest('.create_field').hide()
$(this).closest('.field_group').find('.empty').show()
$(this).closest('.field_group').find('.arrow').html(crm.COLLAPSED)
false
$('.fields a.edit').live 'click', ->
$('.edit_field').hide()
$.ajax(
url: $(this).attr('href')
context: $(this).closest('li').find('div.edit_field')
success: (data) ->
$(this).replaceWith(data).first().focus()
)
false
$('.edit_field a.close, .edit_field a.cancel').live 'click', ->
$(this).closest('.edit_field').hide()
false
false
) jQuery
|
[
{
"context": "age'\nbus = require('./event-bus')()\n\nTOKEN_KEY = 'commit-live:token'\nID_KEY = 'commit-live:id'\n\nmodule.exports = toke",
"end": 103,
"score": 0.9732964634895325,
"start": 86,
"tag": "KEY",
"value": "commit-live:token"
},
{
"context": "us')()\n\nTOKEN_KEY = 'commit-live:token'\nID_KEY = 'commit-live:id'\n\nmodule.exports = token = {\n get: ->\n localS",
"end": 129,
"score": 0.9865595102310181,
"start": 115,
"tag": "KEY",
"value": "commit-live:id"
}
] | lib/token.coffee | commit-live-admin/dev-greyatom-ide | 5 | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = 'commit-live:token'
ID_KEY = 'commit-live:id'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
getID: ->
localStorage.get(ID_KEY)
getInstanceID: ->
JSON.parse(localStorage.get('commit-live:user-info')).instanceId
setID: (value) ->
localStorage.set(ID_KEY, value)
bus.emit(ID_KEY, value)
unsetID: ->
localStorage.delete(ID_KEY)
bus.emit(ID_KEY, undefined)
observeID: (callback) ->
callback(geID.get())
bus.on(ID_KEY, callback)
}
| 175163 | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = '<KEY>'
ID_KEY = '<KEY>'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
getID: ->
localStorage.get(ID_KEY)
getInstanceID: ->
JSON.parse(localStorage.get('commit-live:user-info')).instanceId
setID: (value) ->
localStorage.set(ID_KEY, value)
bus.emit(ID_KEY, value)
unsetID: ->
localStorage.delete(ID_KEY)
bus.emit(ID_KEY, undefined)
observeID: (callback) ->
callback(geID.get())
bus.on(ID_KEY, callback)
}
| true | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = 'PI:KEY:<KEY>END_PI'
ID_KEY = 'PI:KEY:<KEY>END_PI'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
getID: ->
localStorage.get(ID_KEY)
getInstanceID: ->
JSON.parse(localStorage.get('commit-live:user-info')).instanceId
setID: (value) ->
localStorage.set(ID_KEY, value)
bus.emit(ID_KEY, value)
unsetID: ->
localStorage.delete(ID_KEY)
bus.emit(ID_KEY, undefined)
observeID: (callback) ->
callback(geID.get())
bus.on(ID_KEY, callback)
}
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9979621171951294,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/clientFilePage/clientAlerts.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# PROTOTYPE FEATURE
# We assume for now there will only be 1 alert, as a simple textarea (content),
# which is initially created, and updated.
# As a full feature, this will be an itemized list of individual alerts,
# so the dataModel is modelled on that eventuality.
Imm = require 'immutable'
Moment = require 'moment'
Async = require 'async'
Persist = require '../persist'
load = (win) ->
React = win.React
R = React.DOM
Bootbox = win.bootbox
CrashHandler = require('../crashHandler').load(win)
WithTooltip = require('../withTooltip').load(win)
{FaIcon, renderLineBreaks} = require('../utils').load(win)
ClientAlerts = React.createFactory React.createClass
displayName: 'ClientAlerts'
mixins: [React.addons.PureRenderMixin]
propTypes: {
alerts: React.PropTypes.instanceOf(Imm.List).isRequired
clientFileId: React.PropTypes.string.isRequired
isDisabled: React.PropTypes.bool.isRequired
}
_getSingleAlert: ->
# We currently assume only 1 alert is in the alerts List(),
# which is the only one created and updated
return @props.alerts.first() or Imm.Map()
getInitialState: ->
content = @_getSingleAlert().get('content')
return {
content: content or ''
beginTimestamp: Moment()
isEditing: null
}
componentDidUpdate: (newProps) ->
# Reset component when alert data changes
# TODO: Account for hasChanges/isEditing
if not Imm.is newProps.alerts, @props.alerts
@_reset()
hasChanges: ->
originalContent = @_getSingleAlert().get('content') or ''
return @state.content isnt originalContent
render: ->
R.div({
className: 'clientAlerts animated fadeInUp'
onClick: @_beginEditing unless @state.isEditing
},
R.h3({className: 'animated fadeInUp'},
"Alerts"
)
R.div({id: 'alertsContainer'},
(if @state.isEditing
R.div({id: 'isEditingContent'},
R.textarea({
className: 'alertsTextarea'
ref: 'textarea'
rows: 7
maxLength: 150
value: @state.content
onChange: @_updateContent
})
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-sm btn-default'
onClick: @_cancelEditing
}, "Cancel")
R.button({
className: 'btn btn-sm btn-success'
disabled: not @hasChanges()
onClick: @_save
},
"Save"
' '
FaIcon('check')
)
)
)
else
WithTooltip({
title: "Click here to add/update alerts" if @state.content
placement: 'right'
container: '#container'
},
R.div({id: 'staticContent'},
renderLineBreaks(@state.content or "Click here to add an alert")
)
)
)
)
)
_updateContent: (event) ->
content = event.target.value
@setState {content}
_beginEditing: ->
return if @props.isDisabled
isEditing = true
beginTimestamp = Moment()
@setState {isEditing, beginTimestamp}, =>
@refs.textarea.focus() if @refs.textarea?
_cancelEditing: ->
if @hasChanges()
Bootbox.confirm "Discard changes to this alert?", (ok) =>
if ok then @_reset()
else
@_reset()
_reset: ->
@setState @getInitialState()
_save: ->
clientFileId = @props.clientFileId
content = @state.content
isExistingAlert = @_getSingleAlert().has('id')
saveAlert = if isExistingAlert then @_updateAlert else @_createAlert
saveAlert (err) =>
if err
if err instanceof Persist.IOError
Bootbox.alert """
An error occurred. Please check your network connection and try again.
"""
return
CrashHandler.handle err
return
# Component state will automatically reset when @props.alerts changes
_createAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = Imm.fromJS {
content
clientFileId
status: 'default'
authorProgramId
}
createdAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Reason for the new alert (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.create alert, (err, result) ->
if err
cb err
return
createdAlert = result
cb()
(cb) =>
@_generateQuickNote createdAlert, cb
], cb
_updateAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = @_getSingleAlert()
.set 'clientFileId', clientFileId
.set 'content', content
.set 'authorProgramId', authorProgramId
.remove 'updateReason'
updatedAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Explanation for the alert update (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.createRevision alert, (err, result) ->
if err
cb err
return
updatedAlert = result
cb()
(cb) =>
@_generateQuickNote updatedAlert, cb
], cb
_generateQuickNote: (alert, cb) ->
notes = "Alert info changed to: #{alert.get('content')}"
# Append updateReason to quickNote if exists
if alert.has('updateReason')
notes += "\n\n(Reason: #{alert.get('updateReason')})"
authorProgramId = ActiveSession.programId or ''
beginTimestamp = @state.beginTimestamp.format(Persist.TimestampFormat)
clientFileId = @props.clientFileId
quickNote = Imm.fromJS {
type: 'basic' # aka "Quick Notes"
status: 'default'
notes
backdate: ''
authorProgramId
beginTimestamp
clientFileId
}
ActiveSession.persist.progNotes.create quickNote, cb
return ClientAlerts
module.exports = {load}
| 149633 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# PROTOTYPE FEATURE
# We assume for now there will only be 1 alert, as a simple textarea (content),
# which is initially created, and updated.
# As a full feature, this will be an itemized list of individual alerts,
# so the dataModel is modelled on that eventuality.
Imm = require 'immutable'
Moment = require 'moment'
Async = require 'async'
Persist = require '../persist'
load = (win) ->
React = win.React
R = React.DOM
Bootbox = win.bootbox
CrashHandler = require('../crashHandler').load(win)
WithTooltip = require('../withTooltip').load(win)
{FaIcon, renderLineBreaks} = require('../utils').load(win)
ClientAlerts = React.createFactory React.createClass
displayName: 'ClientAlerts'
mixins: [React.addons.PureRenderMixin]
propTypes: {
alerts: React.PropTypes.instanceOf(Imm.List).isRequired
clientFileId: React.PropTypes.string.isRequired
isDisabled: React.PropTypes.bool.isRequired
}
_getSingleAlert: ->
# We currently assume only 1 alert is in the alerts List(),
# which is the only one created and updated
return @props.alerts.first() or Imm.Map()
getInitialState: ->
content = @_getSingleAlert().get('content')
return {
content: content or ''
beginTimestamp: Moment()
isEditing: null
}
componentDidUpdate: (newProps) ->
# Reset component when alert data changes
# TODO: Account for hasChanges/isEditing
if not Imm.is newProps.alerts, @props.alerts
@_reset()
hasChanges: ->
originalContent = @_getSingleAlert().get('content') or ''
return @state.content isnt originalContent
render: ->
R.div({
className: 'clientAlerts animated fadeInUp'
onClick: @_beginEditing unless @state.isEditing
},
R.h3({className: 'animated fadeInUp'},
"Alerts"
)
R.div({id: 'alertsContainer'},
(if @state.isEditing
R.div({id: 'isEditingContent'},
R.textarea({
className: 'alertsTextarea'
ref: 'textarea'
rows: 7
maxLength: 150
value: @state.content
onChange: @_updateContent
})
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-sm btn-default'
onClick: @_cancelEditing
}, "Cancel")
R.button({
className: 'btn btn-sm btn-success'
disabled: not @hasChanges()
onClick: @_save
},
"Save"
' '
FaIcon('check')
)
)
)
else
WithTooltip({
title: "Click here to add/update alerts" if @state.content
placement: 'right'
container: '#container'
},
R.div({id: 'staticContent'},
renderLineBreaks(@state.content or "Click here to add an alert")
)
)
)
)
)
_updateContent: (event) ->
content = event.target.value
@setState {content}
_beginEditing: ->
return if @props.isDisabled
isEditing = true
beginTimestamp = Moment()
@setState {isEditing, beginTimestamp}, =>
@refs.textarea.focus() if @refs.textarea?
_cancelEditing: ->
if @hasChanges()
Bootbox.confirm "Discard changes to this alert?", (ok) =>
if ok then @_reset()
else
@_reset()
_reset: ->
@setState @getInitialState()
_save: ->
clientFileId = @props.clientFileId
content = @state.content
isExistingAlert = @_getSingleAlert().has('id')
saveAlert = if isExistingAlert then @_updateAlert else @_createAlert
saveAlert (err) =>
if err
if err instanceof Persist.IOError
Bootbox.alert """
An error occurred. Please check your network connection and try again.
"""
return
CrashHandler.handle err
return
# Component state will automatically reset when @props.alerts changes
_createAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = Imm.fromJS {
content
clientFileId
status: 'default'
authorProgramId
}
createdAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Reason for the new alert (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.create alert, (err, result) ->
if err
cb err
return
createdAlert = result
cb()
(cb) =>
@_generateQuickNote createdAlert, cb
], cb
_updateAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = @_getSingleAlert()
.set 'clientFileId', clientFileId
.set 'content', content
.set 'authorProgramId', authorProgramId
.remove 'updateReason'
updatedAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Explanation for the alert update (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.createRevision alert, (err, result) ->
if err
cb err
return
updatedAlert = result
cb()
(cb) =>
@_generateQuickNote updatedAlert, cb
], cb
_generateQuickNote: (alert, cb) ->
notes = "Alert info changed to: #{alert.get('content')}"
# Append updateReason to quickNote if exists
if alert.has('updateReason')
notes += "\n\n(Reason: #{alert.get('updateReason')})"
authorProgramId = ActiveSession.programId or ''
beginTimestamp = @state.beginTimestamp.format(Persist.TimestampFormat)
clientFileId = @props.clientFileId
quickNote = Imm.fromJS {
type: 'basic' # aka "Quick Notes"
status: 'default'
notes
backdate: ''
authorProgramId
beginTimestamp
clientFileId
}
ActiveSession.persist.progNotes.create quickNote, cb
return ClientAlerts
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# PROTOTYPE FEATURE
# We assume for now there will only be 1 alert, as a simple textarea (content),
# which is initially created, and updated.
# As a full feature, this will be an itemized list of individual alerts,
# so the dataModel is modelled on that eventuality.
Imm = require 'immutable'
Moment = require 'moment'
Async = require 'async'
Persist = require '../persist'
load = (win) ->
React = win.React
R = React.DOM
Bootbox = win.bootbox
CrashHandler = require('../crashHandler').load(win)
WithTooltip = require('../withTooltip').load(win)
{FaIcon, renderLineBreaks} = require('../utils').load(win)
ClientAlerts = React.createFactory React.createClass
displayName: 'ClientAlerts'
mixins: [React.addons.PureRenderMixin]
propTypes: {
alerts: React.PropTypes.instanceOf(Imm.List).isRequired
clientFileId: React.PropTypes.string.isRequired
isDisabled: React.PropTypes.bool.isRequired
}
_getSingleAlert: ->
# We currently assume only 1 alert is in the alerts List(),
# which is the only one created and updated
return @props.alerts.first() or Imm.Map()
getInitialState: ->
content = @_getSingleAlert().get('content')
return {
content: content or ''
beginTimestamp: Moment()
isEditing: null
}
componentDidUpdate: (newProps) ->
# Reset component when alert data changes
# TODO: Account for hasChanges/isEditing
if not Imm.is newProps.alerts, @props.alerts
@_reset()
hasChanges: ->
originalContent = @_getSingleAlert().get('content') or ''
return @state.content isnt originalContent
render: ->
R.div({
className: 'clientAlerts animated fadeInUp'
onClick: @_beginEditing unless @state.isEditing
},
R.h3({className: 'animated fadeInUp'},
"Alerts"
)
R.div({id: 'alertsContainer'},
(if @state.isEditing
R.div({id: 'isEditingContent'},
R.textarea({
className: 'alertsTextarea'
ref: 'textarea'
rows: 7
maxLength: 150
value: @state.content
onChange: @_updateContent
})
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-sm btn-default'
onClick: @_cancelEditing
}, "Cancel")
R.button({
className: 'btn btn-sm btn-success'
disabled: not @hasChanges()
onClick: @_save
},
"Save"
' '
FaIcon('check')
)
)
)
else
WithTooltip({
title: "Click here to add/update alerts" if @state.content
placement: 'right'
container: '#container'
},
R.div({id: 'staticContent'},
renderLineBreaks(@state.content or "Click here to add an alert")
)
)
)
)
)
_updateContent: (event) ->
content = event.target.value
@setState {content}
_beginEditing: ->
return if @props.isDisabled
isEditing = true
beginTimestamp = Moment()
@setState {isEditing, beginTimestamp}, =>
@refs.textarea.focus() if @refs.textarea?
_cancelEditing: ->
if @hasChanges()
Bootbox.confirm "Discard changes to this alert?", (ok) =>
if ok then @_reset()
else
@_reset()
_reset: ->
@setState @getInitialState()
_save: ->
clientFileId = @props.clientFileId
content = @state.content
isExistingAlert = @_getSingleAlert().has('id')
saveAlert = if isExistingAlert then @_updateAlert else @_createAlert
saveAlert (err) =>
if err
if err instanceof Persist.IOError
Bootbox.alert """
An error occurred. Please check your network connection and try again.
"""
return
CrashHandler.handle err
return
# Component state will automatically reset when @props.alerts changes
_createAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = Imm.fromJS {
content
clientFileId
status: 'default'
authorProgramId
}
createdAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Reason for the new alert (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.create alert, (err, result) ->
if err
cb err
return
createdAlert = result
cb()
(cb) =>
@_generateQuickNote createdAlert, cb
], cb
_updateAlert: (cb) ->
clientFileId = @props.clientFileId
content = @state.content
authorProgramId = ActiveSession.programId or ''
alert = @_getSingleAlert()
.set 'clientFileId', clientFileId
.set 'content', content
.set 'authorProgramId', authorProgramId
.remove 'updateReason'
updatedAlert = null
Async.series [
(cb) =>
Bootbox.prompt "Explanation for the alert update (optional)", (updateReason) ->
if updateReason
alert = alert.set('updateReason', updateReason)
cb()
(cb) =>
ActiveSession.persist.alerts.createRevision alert, (err, result) ->
if err
cb err
return
updatedAlert = result
cb()
(cb) =>
@_generateQuickNote updatedAlert, cb
], cb
_generateQuickNote: (alert, cb) ->
notes = "Alert info changed to: #{alert.get('content')}"
# Append updateReason to quickNote if exists
if alert.has('updateReason')
notes += "\n\n(Reason: #{alert.get('updateReason')})"
authorProgramId = ActiveSession.programId or ''
beginTimestamp = @state.beginTimestamp.format(Persist.TimestampFormat)
clientFileId = @props.clientFileId
quickNote = Imm.fromJS {
type: 'basic' # aka "Quick Notes"
status: 'default'
notes
backdate: ''
authorProgramId
beginTimestamp
clientFileId
}
ActiveSession.persist.progNotes.create quickNote, cb
return ClientAlerts
module.exports = {load}
|
[
{
"context": "iverableRegExp.exec doc['_id']\n key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]\n\n emit genK",
"end": 259,
"score": 0.5922443270683289,
"start": 257,
"tag": "KEY",
"value": "bs"
},
{
"context": "eRegExp.exec doc['_id']\n key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]\n\n emit genKey(doc), doc\n",
"end": 272,
"score": 0.5212906002998352,
"start": 265,
"tag": "KEY",
"value": "Number("
},
{
"context": "xec doc['_id']\n key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]\n\n emit genKey(doc), doc\n",
"end": 275,
"score": 0.6437734365463257,
"start": 273,
"tag": "KEY",
"value": "bs"
},
{
"context": "['_id']\n key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]\n\n emit genKey(doc), doc\n",
"end": 288,
"score": 0.5861356258392334,
"start": 281,
"tag": "KEY",
"value": "Number("
},
{
"context": " key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]\n\n emit genKey(doc), doc\n",
"end": 291,
"score": 0.6220285892486572,
"start": 289,
"tag": "KEY",
"value": "bs"
}
] | views/deliverablesByWbs/map.coffee | IKS/Proggis | 2 | (doc) ->
return unless doc['@type'] is 'deliverable'
return if doc['ignore']
deliverableRegExp = new RegExp "http://iks-project.eu/deliverable/(\\d+)\.(\\d+)\.(\\d*)"
genKey = (doc) ->
wbs = deliverableRegExp.exec doc['_id']
key = [Number(wbs[1]), Number(wbs[2]), Number(wbs[3])]
emit genKey(doc), doc
| 58872 | (doc) ->
return unless doc['@type'] is 'deliverable'
return if doc['ignore']
deliverableRegExp = new RegExp "http://iks-project.eu/deliverable/(\\d+)\.(\\d+)\.(\\d*)"
genKey = (doc) ->
wbs = deliverableRegExp.exec doc['_id']
key = [Number(w<KEY>[1]), <KEY>w<KEY>[2]), <KEY>w<KEY>[3])]
emit genKey(doc), doc
| true | (doc) ->
return unless doc['@type'] is 'deliverable'
return if doc['ignore']
deliverableRegExp = new RegExp "http://iks-project.eu/deliverable/(\\d+)\.(\\d+)\.(\\d*)"
genKey = (doc) ->
wbs = deliverableRegExp.exec doc['_id']
key = [Number(wPI:KEY:<KEY>END_PI[1]), PI:KEY:<KEY>END_PIwPI:KEY:<KEY>END_PI[2]), PI:KEY:<KEY>END_PIwPI:KEY:<KEY>END_PI[3])]
emit genKey(doc), doc
|
[
{
"context": "rPassword: \"Zaktualizuj swoje hasło\"\n password: \"Hasło\"\n usernameOrEmail: \"Nazwa użytkownika lub email\"",
"end": 391,
"score": 0.758083701133728,
"start": 386,
"tag": "PASSWORD",
"value": "Hasło"
},
{
"context": "je hasło\"\n password: \"Hasło\"\n usernameOrEmail: \"Nazwa użytkownika lub email\"\n email: \"Email\"\n ifYouAlreadyHaveAnA",
"end": 430,
"score": 0.933379054069519,
"start": 413,
"tag": "NAME",
"value": "Nazwa użytkownika"
},
{
"context": "rejestruj się używając adresu email\"\n username: \"Nazwa użytkownika\"\n optional: \"Nieobowiązkowe\"\n signu",
"end": 600,
"score": 0.7463271617889404,
"start": 595,
"tag": "NAME",
"value": "Nazwa"
},
{
"context": "truj się używając adresu email\"\n username: \"Nazwa użytkownika\"\n optional: \"Nieobowiązkowe\"\n signupCode: \"K",
"end": 609,
"score": 0.8054618239402771,
"start": 601,
"tag": "USERNAME",
"value": "użytkown"
},
{
"context": "używając adresu email\"\n username: \"Nazwa użytkownika\"\n optional: \"Nieobowiązkowe\"\n signupCode: \"Kod ",
"end": 612,
"score": 0.8539727926254272,
"start": 609,
"tag": "NAME",
"value": "ika"
},
{
"context": "cyfrę.\"\n usernameRequired: \"Wymagana jest nazwa użytkownika.\"\n emailRequired: \"Wymagany jest adres email.\"",
"end": 1375,
"score": 0.7509393692016602,
"start": 1364,
"tag": "NAME",
"value": "użytkownika"
}
] | client/t9n/polish.coffee | JutoApp/accounts-entry | 167 | pl =
signIn: "Zaloguj się"
signin: "zaloguj się"
signOut: "Wyloguj się"
signUp: "Zarejestruj się"
OR: "LUB"
forgotPassword: "Zapomniałeś hasła?"
emailAddress: "Adres email"
emailResetLink: "Wyślij email z linkiem do zmiany hasła"
dontHaveAnAccount: "Nie masz konta?"
resetYourPassword: "Ustaw nowe hasło"
updateYourPassword: "Zaktualizuj swoje hasło"
password: "Hasło"
usernameOrEmail: "Nazwa użytkownika lub email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Jeżeli już masz konto"
signUpWithYourEmailAddress: "Zarejestruj się używając adresu email"
username: "Nazwa użytkownika"
optional: "Nieobowiązkowe"
signupCode: "Kod rejestracji"
clickAgree: "Klikając na Zarejestruj się zgadzasz się z naszą"
privacyPolicy: "polityką prywatności"
terms: "warunkami korzystania z serwisu"
sign: "Podpisz"
configure: "Konfiguruj"
with: "z"
createAccount: "Utwórz konto"
verificationPending: "Confirm your email address"
verificationPendingDetails: "A confirmation email has been sent to the email address you provided. Click on the confirmation link in the email to activate your account."
and: "i"
error:
minChar: "7 znaków to minimalna długość hasła."
pwOneLetter: "Hasło musi zawierać 1 literę."
pwOneDigit: "Hasło musi zawierać przynajmniej jedną cyfrę."
usernameRequired: "Wymagana jest nazwa użytkownika."
emailRequired: "Wymagany jest adres email."
signupCodeRequired: "Wymagany jest kod rejestracji."
signupCodeIncorrect: "Kod rejestracji jest nieprawidłowy."
signInRequired: "Musisz być zalogowany, aby to zrobić."
usernameIsEmail: "Nazwa użytkownika nie może być adres e-mail."
T9n.map "pl", pl
| 80537 | pl =
signIn: "Zaloguj się"
signin: "zaloguj się"
signOut: "Wyloguj się"
signUp: "Zarejestruj się"
OR: "LUB"
forgotPassword: "Zapomniałeś hasła?"
emailAddress: "Adres email"
emailResetLink: "Wyślij email z linkiem do zmiany hasła"
dontHaveAnAccount: "Nie masz konta?"
resetYourPassword: "Ustaw nowe hasło"
updateYourPassword: "Zaktualizuj swoje hasło"
password: "<PASSWORD>"
usernameOrEmail: "<NAME> lub email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Jeżeli już masz konto"
signUpWithYourEmailAddress: "Zarejestruj się używając adresu email"
username: "<NAME> użytkown<NAME>"
optional: "Nieobowiązkowe"
signupCode: "Kod rejestracji"
clickAgree: "Klikając na Zarejestruj się zgadzasz się z naszą"
privacyPolicy: "polityką prywatności"
terms: "warunkami korzystania z serwisu"
sign: "Podpisz"
configure: "Konfiguruj"
with: "z"
createAccount: "Utwórz konto"
verificationPending: "Confirm your email address"
verificationPendingDetails: "A confirmation email has been sent to the email address you provided. Click on the confirmation link in the email to activate your account."
and: "i"
error:
minChar: "7 znaków to minimalna długość hasła."
pwOneLetter: "Hasło musi zawierać 1 literę."
pwOneDigit: "Hasło musi zawierać przynajmniej jedną cyfrę."
usernameRequired: "Wymagana jest nazwa <NAME>."
emailRequired: "Wymagany jest adres email."
signupCodeRequired: "Wymagany jest kod rejestracji."
signupCodeIncorrect: "Kod rejestracji jest nieprawidłowy."
signInRequired: "Musisz być zalogowany, aby to zrobić."
usernameIsEmail: "Nazwa użytkownika nie może być adres e-mail."
T9n.map "pl", pl
| true | pl =
signIn: "Zaloguj się"
signin: "zaloguj się"
signOut: "Wyloguj się"
signUp: "Zarejestruj się"
OR: "LUB"
forgotPassword: "Zapomniałeś hasła?"
emailAddress: "Adres email"
emailResetLink: "Wyślij email z linkiem do zmiany hasła"
dontHaveAnAccount: "Nie masz konta?"
resetYourPassword: "Ustaw nowe hasło"
updateYourPassword: "Zaktualizuj swoje hasło"
password: "PI:PASSWORD:<PASSWORD>END_PI"
usernameOrEmail: "PI:NAME:<NAME>END_PI lub email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Jeżeli już masz konto"
signUpWithYourEmailAddress: "Zarejestruj się używając adresu email"
username: "PI:NAME:<NAME>END_PI użytkownPI:NAME:<NAME>END_PI"
optional: "Nieobowiązkowe"
signupCode: "Kod rejestracji"
clickAgree: "Klikając na Zarejestruj się zgadzasz się z naszą"
privacyPolicy: "polityką prywatności"
terms: "warunkami korzystania z serwisu"
sign: "Podpisz"
configure: "Konfiguruj"
with: "z"
createAccount: "Utwórz konto"
verificationPending: "Confirm your email address"
verificationPendingDetails: "A confirmation email has been sent to the email address you provided. Click on the confirmation link in the email to activate your account."
and: "i"
error:
minChar: "7 znaków to minimalna długość hasła."
pwOneLetter: "Hasło musi zawierać 1 literę."
pwOneDigit: "Hasło musi zawierać przynajmniej jedną cyfrę."
usernameRequired: "Wymagana jest nazwa PI:NAME:<NAME>END_PI."
emailRequired: "Wymagany jest adres email."
signupCodeRequired: "Wymagany jest kod rejestracji."
signupCodeIncorrect: "Kod rejestracji jest nieprawidłowy."
signInRequired: "Musisz być zalogowany, aby to zrobić."
usernameIsEmail: "Nazwa użytkownika nie może być adres e-mail."
T9n.map "pl", pl
|
[
{
"context": "exposedProperties\nconfigmodule.clientSecretKey = \"b964dfab0b7d9fe43022e9584fd782f83e5fab11bb1a409464f5fbdebc62df6f\"\nconfigmodule.clientPublicKey = \"cab958a8f0402ebf",
"end": 487,
"score": 0.9997613430023193,
"start": 423,
"tag": "KEY",
"value": "b964dfab0b7d9fe43022e9584fd782f83e5fab11bb1a409464f5fbdebc62df6f"
},
{
"context": "64f5fbdebc62df6f\"\nconfigmodule.clientPublicKey = \"cab958a8f0402ebf305535935da74f756a685adb6db9bb15257a64389f641a35\"\n\nconfigmodule.masterSecretKey = \"003464606f47e65",
"end": 585,
"score": 0.9997639656066895,
"start": 521,
"tag": "KEY",
"value": "cab958a8f0402ebf305535935da74f756a685adb6db9bb15257a64389f641a35"
},
{
"context": "57a64389f641a35\"\n\nconfigmodule.masterSecretKey = \"003464606f47e65cf514c2c8f1f007d432a46fbeba616781bbff52def803e6e8\"\nconfigmodule.masterPublicKey = \"7102da6282ec3169",
"end": 684,
"score": 0.9997554421424866,
"start": 620,
"tag": "KEY",
"value": "003464606f47e65cf514c2c8f1f007d432a46fbeba616781bbff52def803e6e8"
},
{
"context": "bbff52def803e6e8\"\nconfigmodule.masterPublicKey = \"7102da6282ec316974e7ea6ad7d24bd077f70a969ffe865c8b99b12b314f644c\"\n\nconfigmodule.serverSecretKey = \"447dd15e1d5ded3",
"end": 782,
"score": 0.9997607469558716,
"start": 718,
"tag": "KEY",
"value": "7102da6282ec316974e7ea6ad7d24bd077f70a969ffe865c8b99b12b314f644c"
},
{
"context": "b99b12b314f644c\"\n\nconfigmodule.serverSecretKey = \"447dd15e1d5ded3d662c26e0e61bf299ccab60b2d4bc86f1ec07e4561a09df63\"\nconfigmodule.serverPublicKey = \"dc29860abee9d578",
"end": 881,
"score": 0.9997571110725403,
"start": 817,
"tag": "KEY",
"value": "447dd15e1d5ded3d662c26e0e61bf299ccab60b2d4bc86f1ec07e4561a09df63"
},
{
"context": "ec07e4561a09df63\"\nconfigmodule.serverPublicKey = \"dc29860abee9d5783d1689ef249f0d3aa18bda443a524ae610e3c3f71c6b5bca\"\n\nconfigmodule.timestampFrameMS = 10000\n\nconfigmo",
"end": 979,
"score": 0.9997656345367432,
"start": 915,
"tag": "KEY",
"value": "dc29860abee9d5783d1689ef249f0d3aa18bda443a524ae610e3c3f71c6b5bca"
}
] | source/configmodule/configmodule.coffee | JhonnyJason/interface-tester-pwa-sources | 0 | configmodule = {name: "configmodule", uimodule: false}
#region logPrintFunctions
log = (arg) ->
if allModules.debugmodule.modulesToDebug["configmodule"]? then console.log "[configmodule]: " + arg
return
#endregion
########################################################
configmodule.initialize = () ->
log "configmodule.initialize"
return
#region exposedProperties
configmodule.clientSecretKey = "b964dfab0b7d9fe43022e9584fd782f83e5fab11bb1a409464f5fbdebc62df6f"
configmodule.clientPublicKey = "cab958a8f0402ebf305535935da74f756a685adb6db9bb15257a64389f641a35"
configmodule.masterSecretKey = "003464606f47e65cf514c2c8f1f007d432a46fbeba616781bbff52def803e6e8"
configmodule.masterPublicKey = "7102da6282ec316974e7ea6ad7d24bd077f70a969ffe865c8b99b12b314f644c"
configmodule.serverSecretKey = "447dd15e1d5ded3d662c26e0e61bf299ccab60b2d4bc86f1ec07e4561a09df63"
configmodule.serverPublicKey = "dc29860abee9d5783d1689ef249f0d3aa18bda443a524ae610e3c3f71c6b5bca"
configmodule.timestampFrameMS = 10000
configmodule.interfaceServers =
authenticationinterface: "https://localhost:6999"
#endregion
export default configmodule
| 157485 | configmodule = {name: "configmodule", uimodule: false}
#region logPrintFunctions
log = (arg) ->
if allModules.debugmodule.modulesToDebug["configmodule"]? then console.log "[configmodule]: " + arg
return
#endregion
########################################################
configmodule.initialize = () ->
log "configmodule.initialize"
return
#region exposedProperties
configmodule.clientSecretKey = "<KEY>"
configmodule.clientPublicKey = "<KEY>"
configmodule.masterSecretKey = "<KEY>"
configmodule.masterPublicKey = "<KEY>"
configmodule.serverSecretKey = "<KEY>"
configmodule.serverPublicKey = "<KEY>"
configmodule.timestampFrameMS = 10000
configmodule.interfaceServers =
authenticationinterface: "https://localhost:6999"
#endregion
export default configmodule
| true | configmodule = {name: "configmodule", uimodule: false}
#region logPrintFunctions
log = (arg) ->
if allModules.debugmodule.modulesToDebug["configmodule"]? then console.log "[configmodule]: " + arg
return
#endregion
########################################################
configmodule.initialize = () ->
log "configmodule.initialize"
return
#region exposedProperties
configmodule.clientSecretKey = "PI:KEY:<KEY>END_PI"
configmodule.clientPublicKey = "PI:KEY:<KEY>END_PI"
configmodule.masterSecretKey = "PI:KEY:<KEY>END_PI"
configmodule.masterPublicKey = "PI:KEY:<KEY>END_PI"
configmodule.serverSecretKey = "PI:KEY:<KEY>END_PI"
configmodule.serverPublicKey = "PI:KEY:<KEY>END_PI"
configmodule.timestampFrameMS = 10000
configmodule.interfaceServers =
authenticationinterface: "https://localhost:6999"
#endregion
export default configmodule
|
[
{
"context": "CODE\n# ==============================\n\nharry = new Harry\ntom = new Tom harry\ndick = new Dick harry\ncon",
"end": 193,
"score": 0.5277975797653198,
"start": 192,
"tag": "NAME",
"value": "H"
},
{
"context": "\n\nharry = new Harry\ntom = new Tom harry\ndick = new Dick harry\nconversation = \"\"\n\nconversation += tom.s",
"end": 230,
"score": 0.6673738956451416,
"start": 229,
"tag": "NAME",
"value": "D"
}
] | GoF/classic/Behavioral/Mediator/CoffeeScript/client.coffee | irynaO/JavaScript-Design-Patterns | 293 | 'use strict'
Harry = require './API/Harry'
Tom = require './API/Tom'
Dick = require './API/Dick'
# ==============================
# CLIENT CODE
# ==============================
harry = new Harry
tom = new Tom harry
dick = new Dick harry
conversation = ""
conversation += tom.send "Could you lower the volume of your music, please?"
conversation += dick.send "Are you serious? The volume is actually very low..."
console.log conversation
| 161894 | 'use strict'
Harry = require './API/Harry'
Tom = require './API/Tom'
Dick = require './API/Dick'
# ==============================
# CLIENT CODE
# ==============================
harry = new <NAME>arry
tom = new Tom harry
dick = new <NAME>ick harry
conversation = ""
conversation += tom.send "Could you lower the volume of your music, please?"
conversation += dick.send "Are you serious? The volume is actually very low..."
console.log conversation
| true | 'use strict'
Harry = require './API/Harry'
Tom = require './API/Tom'
Dick = require './API/Dick'
# ==============================
# CLIENT CODE
# ==============================
harry = new PI:NAME:<NAME>END_PIarry
tom = new Tom harry
dick = new PI:NAME:<NAME>END_PIick harry
conversation = ""
conversation += tom.send "Could you lower the volume of your music, please?"
conversation += dick.send "Are you serious? The volume is actually very low..."
console.log conversation
|
[
{
"context": " label : 'underline',\n key : 'bold',\n string : 'green',\n number : ",
"end": 4001,
"score": 0.9536123871803284,
"start": 3997,
"tag": "KEY",
"value": "bold"
}
] | src/vows/stylize.coffee | amccollum/ender-vows | 1 | vows.stylizers = stylizers = {}
vows.stylize = (ob) ->
s = new vows.stylizer(ob)
for arg in Array.prototype.slice.call(arguments)[1..]
s.stylize(arg)
return s
vows.format = (str) ->
str = str.replace /`([^`]+)`/g, (_, part) => vows.stylize(part).italic()
str = str.replace /\*([^*]+)\*/g, (_, part) => vows.stylize(part).bold()
str = str.replace /_([^_]+)_/g, (_, str) => vows.stylize(part).underline()
return str
_stack = []
vows.stringify = (obj) ->
len = (obj) ->
if typeof obj != 'object' then 0
else if 'length' of obj then obj.length
else Object.keys(obj).length
typeOf = (value) ->
s = typeof value
types = [Object, Array, String, RegExp, Number, Function, Boolean, Date]
if s == 'object' or s == 'function'
if value?
for type in types
if value instanceof type
s = type.name.toLowerCase()
else
s = 'null'
return s
if obj in _stack
before = _stack.length - _stack.indexOf(obj)
return vows.stylize(('.' for i in [0..before]).join(''), 'special')
_stack.push(obj)
result = switch typeOf(obj)
when 'regexp' then vows.stylize('/' + obj.source + '/', 'regexp')
when 'number' then vows.stylize(obj.toString(), 'number')
when 'boolean' then vows.stylize(obj.toString(), 'boolean')
when 'null' then vows.stylize('null', 'special')
when 'undefined' then vows.stylize('undefined', 'special')
when 'function' then vows.stylize('[Function]', 'function')
when 'date' then vows.stylize(obj.toUTCString(), 'default')
when 'string'
obj = if /'/.test(obj) then "\"#{obj}\"" else "'#{obj}'"
obj = obj.replace(/\\/g, '\\\\')
.replace(/\n/g, '\\n')
.replace(/[\u0001-\u001F]/g, (match) -> '\\0' + match[0].charCodeAt(0).toString(8))
vows.stylize(obj, 'string')
when 'array'
pretty = len(obj) > 4 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stringify(o) for o in obj).join(sep)
if contents then "[#{start}#{contents}#{end}]" else '[]'
when 'object'
pretty = len(obj) > 2 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stylize(k).key() + ': ' + vows.stringify(v) for k, v of obj).join(sep)
if contents then "{#{start}#{contents}#{end}}" else '{}'
_stack.pop()
return result
class stylizers.BaseStylizer
constructor: (ob) -> @str = '' + ob
toString: () -> @str
class stylizers.ConsoleStylizer extends stylizers.BaseStylizer
styles: {
plain : null,
bold : [1, 22],
light : [2, 22], # not widely supported
italic : [3, 23], # not widely supported
underline : [4, 24],
negative : [7, 27],
concealed : [8, 28],
struck : [9, 29],
black : [30, 39],
red : [31, 39],
green : [32, 39],
yellow : [33, 39],
blue : [34, 39],
magenta : [35, 39],
cyan : [36, 39],
white : [37, 39],
grey : [90, 39],
}
mapping: {
success : 'green',
error : 'red',
warning : 'yellow',
pending : 'cyan',
message : 'grey',
result : 'plain',
label : 'underline',
key : 'bold',
string : 'green',
number : 'magenta',
boolean : 'blue',
special : 'grey',
regexp : 'green',
function : 'negative',
comment : 'cyan',
}
for k, v of @::mapping
@::styles[k] = @::styles[v]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
@str = "\033[#{@styles[style][0]}m#{@str}\033[#{@styles[style][1]}m" if @styles[style]
return this
class stylizers.HTMLStylizer extends stylizers.BaseStylizer
styles: {
bold : ['b', null],
italic : ['i', null],
underline : ['u', null],
}
divs: [
'success',
'error',
'warning',
'pending',
'result',
'message',
]
spans: [
'label',
'key',
'string',
'number',
'boolean',
'special',
'regexp',
'function',
'comment',
]
for c in @::divs
@::styles[c] = ['div', c]
for c in @::spans
@::styles[c] = ['span', c]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
[tagName, className] = @styles[style]
classAttr = if className then " class=\"#{className}\"" else ""
@str = "<#{tagName}#{classAttr}>#{@str}</#{tagName}>"
return this
| 179856 | vows.stylizers = stylizers = {}
vows.stylize = (ob) ->
s = new vows.stylizer(ob)
for arg in Array.prototype.slice.call(arguments)[1..]
s.stylize(arg)
return s
vows.format = (str) ->
str = str.replace /`([^`]+)`/g, (_, part) => vows.stylize(part).italic()
str = str.replace /\*([^*]+)\*/g, (_, part) => vows.stylize(part).bold()
str = str.replace /_([^_]+)_/g, (_, str) => vows.stylize(part).underline()
return str
_stack = []
vows.stringify = (obj) ->
len = (obj) ->
if typeof obj != 'object' then 0
else if 'length' of obj then obj.length
else Object.keys(obj).length
typeOf = (value) ->
s = typeof value
types = [Object, Array, String, RegExp, Number, Function, Boolean, Date]
if s == 'object' or s == 'function'
if value?
for type in types
if value instanceof type
s = type.name.toLowerCase()
else
s = 'null'
return s
if obj in _stack
before = _stack.length - _stack.indexOf(obj)
return vows.stylize(('.' for i in [0..before]).join(''), 'special')
_stack.push(obj)
result = switch typeOf(obj)
when 'regexp' then vows.stylize('/' + obj.source + '/', 'regexp')
when 'number' then vows.stylize(obj.toString(), 'number')
when 'boolean' then vows.stylize(obj.toString(), 'boolean')
when 'null' then vows.stylize('null', 'special')
when 'undefined' then vows.stylize('undefined', 'special')
when 'function' then vows.stylize('[Function]', 'function')
when 'date' then vows.stylize(obj.toUTCString(), 'default')
when 'string'
obj = if /'/.test(obj) then "\"#{obj}\"" else "'#{obj}'"
obj = obj.replace(/\\/g, '\\\\')
.replace(/\n/g, '\\n')
.replace(/[\u0001-\u001F]/g, (match) -> '\\0' + match[0].charCodeAt(0).toString(8))
vows.stylize(obj, 'string')
when 'array'
pretty = len(obj) > 4 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stringify(o) for o in obj).join(sep)
if contents then "[#{start}#{contents}#{end}]" else '[]'
when 'object'
pretty = len(obj) > 2 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stylize(k).key() + ': ' + vows.stringify(v) for k, v of obj).join(sep)
if contents then "{#{start}#{contents}#{end}}" else '{}'
_stack.pop()
return result
class stylizers.BaseStylizer
constructor: (ob) -> @str = '' + ob
toString: () -> @str
class stylizers.ConsoleStylizer extends stylizers.BaseStylizer
styles: {
plain : null,
bold : [1, 22],
light : [2, 22], # not widely supported
italic : [3, 23], # not widely supported
underline : [4, 24],
negative : [7, 27],
concealed : [8, 28],
struck : [9, 29],
black : [30, 39],
red : [31, 39],
green : [32, 39],
yellow : [33, 39],
blue : [34, 39],
magenta : [35, 39],
cyan : [36, 39],
white : [37, 39],
grey : [90, 39],
}
mapping: {
success : 'green',
error : 'red',
warning : 'yellow',
pending : 'cyan',
message : 'grey',
result : 'plain',
label : 'underline',
key : '<KEY>',
string : 'green',
number : 'magenta',
boolean : 'blue',
special : 'grey',
regexp : 'green',
function : 'negative',
comment : 'cyan',
}
for k, v of @::mapping
@::styles[k] = @::styles[v]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
@str = "\033[#{@styles[style][0]}m#{@str}\033[#{@styles[style][1]}m" if @styles[style]
return this
class stylizers.HTMLStylizer extends stylizers.BaseStylizer
styles: {
bold : ['b', null],
italic : ['i', null],
underline : ['u', null],
}
divs: [
'success',
'error',
'warning',
'pending',
'result',
'message',
]
spans: [
'label',
'key',
'string',
'number',
'boolean',
'special',
'regexp',
'function',
'comment',
]
for c in @::divs
@::styles[c] = ['div', c]
for c in @::spans
@::styles[c] = ['span', c]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
[tagName, className] = @styles[style]
classAttr = if className then " class=\"#{className}\"" else ""
@str = "<#{tagName}#{classAttr}>#{@str}</#{tagName}>"
return this
| true | vows.stylizers = stylizers = {}
vows.stylize = (ob) ->
s = new vows.stylizer(ob)
for arg in Array.prototype.slice.call(arguments)[1..]
s.stylize(arg)
return s
vows.format = (str) ->
str = str.replace /`([^`]+)`/g, (_, part) => vows.stylize(part).italic()
str = str.replace /\*([^*]+)\*/g, (_, part) => vows.stylize(part).bold()
str = str.replace /_([^_]+)_/g, (_, str) => vows.stylize(part).underline()
return str
_stack = []
vows.stringify = (obj) ->
len = (obj) ->
if typeof obj != 'object' then 0
else if 'length' of obj then obj.length
else Object.keys(obj).length
typeOf = (value) ->
s = typeof value
types = [Object, Array, String, RegExp, Number, Function, Boolean, Date]
if s == 'object' or s == 'function'
if value?
for type in types
if value instanceof type
s = type.name.toLowerCase()
else
s = 'null'
return s
if obj in _stack
before = _stack.length - _stack.indexOf(obj)
return vows.stylize(('.' for i in [0..before]).join(''), 'special')
_stack.push(obj)
result = switch typeOf(obj)
when 'regexp' then vows.stylize('/' + obj.source + '/', 'regexp')
when 'number' then vows.stylize(obj.toString(), 'number')
when 'boolean' then vows.stylize(obj.toString(), 'boolean')
when 'null' then vows.stylize('null', 'special')
when 'undefined' then vows.stylize('undefined', 'special')
when 'function' then vows.stylize('[Function]', 'function')
when 'date' then vows.stylize(obj.toUTCString(), 'default')
when 'string'
obj = if /'/.test(obj) then "\"#{obj}\"" else "'#{obj}'"
obj = obj.replace(/\\/g, '\\\\')
.replace(/\n/g, '\\n')
.replace(/[\u0001-\u001F]/g, (match) -> '\\0' + match[0].charCodeAt(0).toString(8))
vows.stylize(obj, 'string')
when 'array'
pretty = len(obj) > 4 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stringify(o) for o in obj).join(sep)
if contents then "[#{start}#{contents}#{end}]" else '[]'
when 'object'
pretty = len(obj) > 2 or (o for o in obj when len(o) > 0).length
start = if pretty then '\n' + (' ' for i in [0..4*_stack.length]).join('') else ' '
end = if pretty then ws.slice(0, -4) else ' '
sep = ",#{start}"
contents = (vows.stylize(k).key() + ': ' + vows.stringify(v) for k, v of obj).join(sep)
if contents then "{#{start}#{contents}#{end}}" else '{}'
_stack.pop()
return result
class stylizers.BaseStylizer
constructor: (ob) -> @str = '' + ob
toString: () -> @str
class stylizers.ConsoleStylizer extends stylizers.BaseStylizer
styles: {
plain : null,
bold : [1, 22],
light : [2, 22], # not widely supported
italic : [3, 23], # not widely supported
underline : [4, 24],
negative : [7, 27],
concealed : [8, 28],
struck : [9, 29],
black : [30, 39],
red : [31, 39],
green : [32, 39],
yellow : [33, 39],
blue : [34, 39],
magenta : [35, 39],
cyan : [36, 39],
white : [37, 39],
grey : [90, 39],
}
mapping: {
success : 'green',
error : 'red',
warning : 'yellow',
pending : 'cyan',
message : 'grey',
result : 'plain',
label : 'underline',
key : 'PI:KEY:<KEY>END_PI',
string : 'green',
number : 'magenta',
boolean : 'blue',
special : 'grey',
regexp : 'green',
function : 'negative',
comment : 'cyan',
}
for k, v of @::mapping
@::styles[k] = @::styles[v]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
@str = "\033[#{@styles[style][0]}m#{@str}\033[#{@styles[style][1]}m" if @styles[style]
return this
class stylizers.HTMLStylizer extends stylizers.BaseStylizer
styles: {
bold : ['b', null],
italic : ['i', null],
underline : ['u', null],
}
divs: [
'success',
'error',
'warning',
'pending',
'result',
'message',
]
spans: [
'label',
'key',
'string',
'number',
'boolean',
'special',
'regexp',
'function',
'comment',
]
for c in @::divs
@::styles[c] = ['div', c]
for c in @::spans
@::styles[c] = ['span', c]
for style of @::styles
do (style) =>
@::[style] = () -> @stylize(style)
stylize: (style) ->
[tagName, className] = @styles[style]
classAttr = if className then " class=\"#{className}\"" else ""
@str = "<#{tagName}#{classAttr}>#{@str}</#{tagName}>"
return this
|
[
{
"context": " b: 6\n name: \"child3\"\n }\n ]\n",
"end": 3294,
"score": 0.6852139234542847,
"start": 3288,
"tag": "NAME",
"value": "child3"
}
] | tree/test-tree.coffee | jneuendorf/js_utils | 1 | describe "Tree", () ->
describe "Tree", () ->
describe "creating a tree", () ->
it "Tree.new (== Tree.fromRecursive), defaults to Tree.newByChildRef", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
expect tree.data.name
.toBe "root"
expect tree.children.length
.toBe 3
expect (node.data.name for node in tree.children)
.toEqual ["child1", "child2", "child3"]
expect tree.children[0].data.a
.toBe 1
expect tree.children[1].children[0].data.name
.toBe "child2-child"
xit "Tree.newByParentRef", () ->
it "magically access properties on the data object (attached to a node)", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
]
}
expect tree.data.name
.toBe tree.name
expect tree.data.a
.toBe tree.a
expect tree.children[0].data.name
.toBe tree.children[0].name
describe "modifying a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "addChild (== appendChild)", () ->
expect @tree.getDescendants().length
.toBe 4
expect @tree.children.length
.toBe 3
@tree.addChild {
newNode: true
}
expect @tree.children.length
.toBe 4
expect @tree.children[3].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 5
@tree.addChild {prop: "asdf"}, 1
expect @tree.children.length
.toBe 5
expect @tree.children[1].prop
.toBe "asdf"
expect @tree.children[4].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 6
it "addChildren (== appendChildren)", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.addChildren(
[
{
name: "new node 1"
}
{
name: "new node 2"
}
]
1
)
expect @tree.children.length
.toBe 5
expect @tree.children[0].name
.toBe "child1"
expect @tree.children[1].name
.toBe "new node 1"
expect @tree.children[2].name
.toBe "new node 2"
expect @tree.children[3].name
.toBe "child2"
expect @tree.children[4].name
.toBe "child3"
expect @tree.getDescendants().length
.toBe 6
it "setChildren", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.setChildren [
{
name: "new child 1"
}
{
name: "new child 2"
}
]
expect @tree.children.length
.toBe 2
expect @tree.children[0].name
.toBe "new child 1"
expect @tree.children[1].name
.toBe "new child 2"
expect @tree.getDescendants().length
.toBe 2
it "moveTo (== appendTo)", () ->
@tree.children[1].children[0].moveTo @tree, 0
expect @tree.children[0].name
.toBe "child2-child"
expect @tree.children[1].children.length
.toBe 0
expect @tree.children.length
.toBe 4
it "remove", () ->
removed = @tree.children[1].remove()
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
expect removed.children[0].name
.toBe "child2-child"
it "removeChild", () ->
@tree.removeChild 1
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
@tree.removeChild @tree.children[0]
expect @tree.children.length
.toBe 1
expect @tree.children[0].name
.toBe "child3"
describe "traversing a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "postorder (== each)", () ->
result = []
@tree.postorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
result = []
@tree.each (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
it "preorder", () ->
result = []
@tree.preorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child2-child", "child3"]
it "inorder", () ->
result = []
@tree.inorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "root", "child2-child", "child2", "child3"]
it "levelorder", () ->
result = []
@tree.levelorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child3", "child2-child"]
describe "getting information about a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "getDepth()", () ->
expect @tree.getDepth()
.toBe 2
@tree.children[1].children[0].remove()
expect @tree.getDepth()
.toBe 1
it "getSize()", () ->
expect @tree.getSize()
.toBe 5
it "getLevel()", () ->
expect @tree.getLevel()
.toBe 0
expect @tree.children[1].getLevel()
.toBe 1
expect @tree.children[1].children[0].getLevel()
.toBe 2
@tree.children[1].children[0].addChild {
name: "child2-child-child"
}
expect @tree.children[1].children[0].children[0].getLevel()
.toBe 3
it "getRoot()", () ->
expect @tree.getRoot()
.toBe @tree
expect @tree.children[1].getRoot()
.toBe @tree
expect @tree.children[1].children[0].getRoot()
.toBe @tree
it "hasNode", () ->
expect @tree.hasNode(@tree)
.toBe true
expect @tree.hasNode(@tree.children[0])
.toBe true
expect @tree.hasNode(@tree.children[1])
.toBe true
expect @tree.hasNode(@tree.children[1].children[0])
.toBe true
expect @tree.hasNode(@tree.children[2])
.toBe true
expect @tree.hasNode(JSUtils.Tree.new())
.toBe false
it "findNode (== findDescendant)", () ->
expect @tree.findNode (node) -> node.name is "child2"
.toBe @tree.children[1]
it "findNodes (== findDescendants)", () ->
nodes = @tree.findNodes (node) ->
return node.name.length > 4
nodes.sort (a, b) ->
return a - b
names = (node.name for node in nodes)
expect names
.toEqual ["child1", "child2", "child3", "child2-child"]
it "getLeaves", () ->
expect (leaf.name for leaf in @tree.getLeaves())
.toEqual ["child1", "child2-child", "child3"]
it "isLeaf", () ->
leaves = @tree.getLeaves()
for leaf in leaves
expect leaf.isLeaf()
.toBe true
expect @tree.children[1].isLeaf()
.toBe false
it "getSiblings", () ->
# @tree.children[1].addChild {name: "child2-child2"}
expect @tree.children[1].children[0].getSiblings()
.toEqual []
expect (node.name for node in @tree.children[1].getSiblings())
.toEqual ["child1", "child3"]
expect @tree.getSiblings()
.toEqual []
it "getLevelSiblings", () ->
# subset -> siblings with same parent
expect (node.name for node in @tree.children[1].getLevelSiblings())
.toEqual ["child1", "child3"]
# siblings with different parents
@tree.children[0].addChild {name: "child1-child"}
expect @tree.children[0].children[0].getLevelSiblings()[0].name
.toBe "child2-child"
it "getParent", () ->
expect @tree.children[0].getParent()
.toBe @tree.children[0].parent
it "getChildren", () ->
expect @tree.getChildren()
.toBe @tree.children
it "pathToRoot", () ->
expect (node.name for node in @tree.pathToRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathToRoot())
.toEqual ["child1", "root"]
expect (node.name for node in @tree.children[1].children[0].pathToRoot())
.toEqual ["child2-child", "child2", "root"]
it "pathFromRoot", () ->
expect (node.name for node in @tree.pathFromRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathFromRoot())
.toEqual ["root", "child1"]
expect (node.name for node in @tree.children[1].children[0].pathFromRoot())
.toEqual ["root", "child2", "child2-child"]
it "closest", () ->
expect @tree.closest(() -> "something that doesn't matter because there is no parent anyway")
.toEqual null
expect @tree.children[1].children[0].closest((node) -> return node.name is "root")
.toEqual @tree
expect @tree.children[1].children[0].closest((node) -> return node.name is "child2")
.toEqual @tree.children[1]
expect @tree.children[1].children[0].closest((node) -> return node.name is "child3")
.toEqual null
describe "converting a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new({
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
})
it "serialize (== toObject)", () ->
expect @tree.serialize (node, children) -> $.extend({children}, node.data)
.toEqual {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "deserialize (a.k.a. in-place constructing)", () ->
@tree.deserialize {
a: 10
name: "new root"
children: [
{
a: 1
name: "new child1"
children: []
}
{
name: "new child2"
children: [
{
name: "new child2-child"
children: []
}
]
}
]
}
expect @tree.name
.toBe "new root"
for child, idx in @tree.children
expect child.name
.toBe "new child#{idx + 1}"
expect child.parent
.toBe @tree
expect @tree.children[1].children[0].name
.toBe "new child2-child"
expect @tree.children[1].children[0].parent
.toBe @tree.children[1]
| 222038 | describe "Tree", () ->
describe "Tree", () ->
describe "creating a tree", () ->
it "Tree.new (== Tree.fromRecursive), defaults to Tree.newByChildRef", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
expect tree.data.name
.toBe "root"
expect tree.children.length
.toBe 3
expect (node.data.name for node in tree.children)
.toEqual ["child1", "child2", "child3"]
expect tree.children[0].data.a
.toBe 1
expect tree.children[1].children[0].data.name
.toBe "child2-child"
xit "Tree.newByParentRef", () ->
it "magically access properties on the data object (attached to a node)", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
]
}
expect tree.data.name
.toBe tree.name
expect tree.data.a
.toBe tree.a
expect tree.children[0].data.name
.toBe tree.children[0].name
describe "modifying a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "<NAME>"
}
]
}
it "addChild (== appendChild)", () ->
expect @tree.getDescendants().length
.toBe 4
expect @tree.children.length
.toBe 3
@tree.addChild {
newNode: true
}
expect @tree.children.length
.toBe 4
expect @tree.children[3].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 5
@tree.addChild {prop: "asdf"}, 1
expect @tree.children.length
.toBe 5
expect @tree.children[1].prop
.toBe "asdf"
expect @tree.children[4].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 6
it "addChildren (== appendChildren)", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.addChildren(
[
{
name: "new node 1"
}
{
name: "new node 2"
}
]
1
)
expect @tree.children.length
.toBe 5
expect @tree.children[0].name
.toBe "child1"
expect @tree.children[1].name
.toBe "new node 1"
expect @tree.children[2].name
.toBe "new node 2"
expect @tree.children[3].name
.toBe "child2"
expect @tree.children[4].name
.toBe "child3"
expect @tree.getDescendants().length
.toBe 6
it "setChildren", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.setChildren [
{
name: "new child 1"
}
{
name: "new child 2"
}
]
expect @tree.children.length
.toBe 2
expect @tree.children[0].name
.toBe "new child 1"
expect @tree.children[1].name
.toBe "new child 2"
expect @tree.getDescendants().length
.toBe 2
it "moveTo (== appendTo)", () ->
@tree.children[1].children[0].moveTo @tree, 0
expect @tree.children[0].name
.toBe "child2-child"
expect @tree.children[1].children.length
.toBe 0
expect @tree.children.length
.toBe 4
it "remove", () ->
removed = @tree.children[1].remove()
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
expect removed.children[0].name
.toBe "child2-child"
it "removeChild", () ->
@tree.removeChild 1
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
@tree.removeChild @tree.children[0]
expect @tree.children.length
.toBe 1
expect @tree.children[0].name
.toBe "child3"
describe "traversing a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "postorder (== each)", () ->
result = []
@tree.postorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
result = []
@tree.each (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
it "preorder", () ->
result = []
@tree.preorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child2-child", "child3"]
it "inorder", () ->
result = []
@tree.inorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "root", "child2-child", "child2", "child3"]
it "levelorder", () ->
result = []
@tree.levelorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child3", "child2-child"]
describe "getting information about a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "getDepth()", () ->
expect @tree.getDepth()
.toBe 2
@tree.children[1].children[0].remove()
expect @tree.getDepth()
.toBe 1
it "getSize()", () ->
expect @tree.getSize()
.toBe 5
it "getLevel()", () ->
expect @tree.getLevel()
.toBe 0
expect @tree.children[1].getLevel()
.toBe 1
expect @tree.children[1].children[0].getLevel()
.toBe 2
@tree.children[1].children[0].addChild {
name: "child2-child-child"
}
expect @tree.children[1].children[0].children[0].getLevel()
.toBe 3
it "getRoot()", () ->
expect @tree.getRoot()
.toBe @tree
expect @tree.children[1].getRoot()
.toBe @tree
expect @tree.children[1].children[0].getRoot()
.toBe @tree
it "hasNode", () ->
expect @tree.hasNode(@tree)
.toBe true
expect @tree.hasNode(@tree.children[0])
.toBe true
expect @tree.hasNode(@tree.children[1])
.toBe true
expect @tree.hasNode(@tree.children[1].children[0])
.toBe true
expect @tree.hasNode(@tree.children[2])
.toBe true
expect @tree.hasNode(JSUtils.Tree.new())
.toBe false
it "findNode (== findDescendant)", () ->
expect @tree.findNode (node) -> node.name is "child2"
.toBe @tree.children[1]
it "findNodes (== findDescendants)", () ->
nodes = @tree.findNodes (node) ->
return node.name.length > 4
nodes.sort (a, b) ->
return a - b
names = (node.name for node in nodes)
expect names
.toEqual ["child1", "child2", "child3", "child2-child"]
it "getLeaves", () ->
expect (leaf.name for leaf in @tree.getLeaves())
.toEqual ["child1", "child2-child", "child3"]
it "isLeaf", () ->
leaves = @tree.getLeaves()
for leaf in leaves
expect leaf.isLeaf()
.toBe true
expect @tree.children[1].isLeaf()
.toBe false
it "getSiblings", () ->
# @tree.children[1].addChild {name: "child2-child2"}
expect @tree.children[1].children[0].getSiblings()
.toEqual []
expect (node.name for node in @tree.children[1].getSiblings())
.toEqual ["child1", "child3"]
expect @tree.getSiblings()
.toEqual []
it "getLevelSiblings", () ->
# subset -> siblings with same parent
expect (node.name for node in @tree.children[1].getLevelSiblings())
.toEqual ["child1", "child3"]
# siblings with different parents
@tree.children[0].addChild {name: "child1-child"}
expect @tree.children[0].children[0].getLevelSiblings()[0].name
.toBe "child2-child"
it "getParent", () ->
expect @tree.children[0].getParent()
.toBe @tree.children[0].parent
it "getChildren", () ->
expect @tree.getChildren()
.toBe @tree.children
it "pathToRoot", () ->
expect (node.name for node in @tree.pathToRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathToRoot())
.toEqual ["child1", "root"]
expect (node.name for node in @tree.children[1].children[0].pathToRoot())
.toEqual ["child2-child", "child2", "root"]
it "pathFromRoot", () ->
expect (node.name for node in @tree.pathFromRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathFromRoot())
.toEqual ["root", "child1"]
expect (node.name for node in @tree.children[1].children[0].pathFromRoot())
.toEqual ["root", "child2", "child2-child"]
it "closest", () ->
expect @tree.closest(() -> "something that doesn't matter because there is no parent anyway")
.toEqual null
expect @tree.children[1].children[0].closest((node) -> return node.name is "root")
.toEqual @tree
expect @tree.children[1].children[0].closest((node) -> return node.name is "child2")
.toEqual @tree.children[1]
expect @tree.children[1].children[0].closest((node) -> return node.name is "child3")
.toEqual null
describe "converting a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new({
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
})
it "serialize (== toObject)", () ->
expect @tree.serialize (node, children) -> $.extend({children}, node.data)
.toEqual {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "deserialize (a.k.a. in-place constructing)", () ->
@tree.deserialize {
a: 10
name: "new root"
children: [
{
a: 1
name: "new child1"
children: []
}
{
name: "new child2"
children: [
{
name: "new child2-child"
children: []
}
]
}
]
}
expect @tree.name
.toBe "new root"
for child, idx in @tree.children
expect child.name
.toBe "new child#{idx + 1}"
expect child.parent
.toBe @tree
expect @tree.children[1].children[0].name
.toBe "new child2-child"
expect @tree.children[1].children[0].parent
.toBe @tree.children[1]
| true | describe "Tree", () ->
describe "Tree", () ->
describe "creating a tree", () ->
it "Tree.new (== Tree.fromRecursive), defaults to Tree.newByChildRef", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
expect tree.data.name
.toBe "root"
expect tree.children.length
.toBe 3
expect (node.data.name for node in tree.children)
.toEqual ["child1", "child2", "child3"]
expect tree.children[0].data.a
.toBe 1
expect tree.children[1].children[0].data.name
.toBe "child2-child"
xit "Tree.newByParentRef", () ->
it "magically access properties on the data object (attached to a node)", () ->
tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
]
}
expect tree.data.name
.toBe tree.name
expect tree.data.a
.toBe tree.a
expect tree.children[0].data.name
.toBe tree.children[0].name
describe "modifying a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "PI:NAME:<NAME>END_PI"
}
]
}
it "addChild (== appendChild)", () ->
expect @tree.getDescendants().length
.toBe 4
expect @tree.children.length
.toBe 3
@tree.addChild {
newNode: true
}
expect @tree.children.length
.toBe 4
expect @tree.children[3].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 5
@tree.addChild {prop: "asdf"}, 1
expect @tree.children.length
.toBe 5
expect @tree.children[1].prop
.toBe "asdf"
expect @tree.children[4].newNode
.toBe true
expect @tree.getDescendants().length
.toBe 6
it "addChildren (== appendChildren)", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.addChildren(
[
{
name: "new node 1"
}
{
name: "new node 2"
}
]
1
)
expect @tree.children.length
.toBe 5
expect @tree.children[0].name
.toBe "child1"
expect @tree.children[1].name
.toBe "new node 1"
expect @tree.children[2].name
.toBe "new node 2"
expect @tree.children[3].name
.toBe "child2"
expect @tree.children[4].name
.toBe "child3"
expect @tree.getDescendants().length
.toBe 6
it "setChildren", () ->
expect @tree.children.length
.toBe 3
expect @tree.getDescendants().length
.toBe 4
@tree.setChildren [
{
name: "new child 1"
}
{
name: "new child 2"
}
]
expect @tree.children.length
.toBe 2
expect @tree.children[0].name
.toBe "new child 1"
expect @tree.children[1].name
.toBe "new child 2"
expect @tree.getDescendants().length
.toBe 2
it "moveTo (== appendTo)", () ->
@tree.children[1].children[0].moveTo @tree, 0
expect @tree.children[0].name
.toBe "child2-child"
expect @tree.children[1].children.length
.toBe 0
expect @tree.children.length
.toBe 4
it "remove", () ->
removed = @tree.children[1].remove()
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
expect removed.children[0].name
.toBe "child2-child"
it "removeChild", () ->
@tree.removeChild 1
expect @tree.children.length
.toBe 2
expect @tree.children[1].name
.toBe "child3"
@tree.removeChild @tree.children[0]
expect @tree.children.length
.toBe 1
expect @tree.children[0].name
.toBe "child3"
describe "traversing a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "postorder (== each)", () ->
result = []
@tree.postorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
result = []
@tree.each (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "child2-child", "child2", "child3", "root"]
it "preorder", () ->
result = []
@tree.preorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child2-child", "child3"]
it "inorder", () ->
result = []
@tree.inorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["child1", "root", "child2-child", "child2", "child3"]
it "levelorder", () ->
result = []
@tree.levelorder (node, relativeLevel, index) ->
result.push node.name
expect result
.toEqual ["root", "child1", "child2", "child3", "child2-child"]
describe "getting information about a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "getDepth()", () ->
expect @tree.getDepth()
.toBe 2
@tree.children[1].children[0].remove()
expect @tree.getDepth()
.toBe 1
it "getSize()", () ->
expect @tree.getSize()
.toBe 5
it "getLevel()", () ->
expect @tree.getLevel()
.toBe 0
expect @tree.children[1].getLevel()
.toBe 1
expect @tree.children[1].children[0].getLevel()
.toBe 2
@tree.children[1].children[0].addChild {
name: "child2-child-child"
}
expect @tree.children[1].children[0].children[0].getLevel()
.toBe 3
it "getRoot()", () ->
expect @tree.getRoot()
.toBe @tree
expect @tree.children[1].getRoot()
.toBe @tree
expect @tree.children[1].children[0].getRoot()
.toBe @tree
it "hasNode", () ->
expect @tree.hasNode(@tree)
.toBe true
expect @tree.hasNode(@tree.children[0])
.toBe true
expect @tree.hasNode(@tree.children[1])
.toBe true
expect @tree.hasNode(@tree.children[1].children[0])
.toBe true
expect @tree.hasNode(@tree.children[2])
.toBe true
expect @tree.hasNode(JSUtils.Tree.new())
.toBe false
it "findNode (== findDescendant)", () ->
expect @tree.findNode (node) -> node.name is "child2"
.toBe @tree.children[1]
it "findNodes (== findDescendants)", () ->
nodes = @tree.findNodes (node) ->
return node.name.length > 4
nodes.sort (a, b) ->
return a - b
names = (node.name for node in nodes)
expect names
.toEqual ["child1", "child2", "child3", "child2-child"]
it "getLeaves", () ->
expect (leaf.name for leaf in @tree.getLeaves())
.toEqual ["child1", "child2-child", "child3"]
it "isLeaf", () ->
leaves = @tree.getLeaves()
for leaf in leaves
expect leaf.isLeaf()
.toBe true
expect @tree.children[1].isLeaf()
.toBe false
it "getSiblings", () ->
# @tree.children[1].addChild {name: "child2-child2"}
expect @tree.children[1].children[0].getSiblings()
.toEqual []
expect (node.name for node in @tree.children[1].getSiblings())
.toEqual ["child1", "child3"]
expect @tree.getSiblings()
.toEqual []
it "getLevelSiblings", () ->
# subset -> siblings with same parent
expect (node.name for node in @tree.children[1].getLevelSiblings())
.toEqual ["child1", "child3"]
# siblings with different parents
@tree.children[0].addChild {name: "child1-child"}
expect @tree.children[0].children[0].getLevelSiblings()[0].name
.toBe "child2-child"
it "getParent", () ->
expect @tree.children[0].getParent()
.toBe @tree.children[0].parent
it "getChildren", () ->
expect @tree.getChildren()
.toBe @tree.children
it "pathToRoot", () ->
expect (node.name for node in @tree.pathToRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathToRoot())
.toEqual ["child1", "root"]
expect (node.name for node in @tree.children[1].children[0].pathToRoot())
.toEqual ["child2-child", "child2", "root"]
it "pathFromRoot", () ->
expect (node.name for node in @tree.pathFromRoot())
.toEqual ["root"]
expect (node.name for node in @tree.children[0].pathFromRoot())
.toEqual ["root", "child1"]
expect (node.name for node in @tree.children[1].children[0].pathFromRoot())
.toEqual ["root", "child2", "child2-child"]
it "closest", () ->
expect @tree.closest(() -> "something that doesn't matter because there is no parent anyway")
.toEqual null
expect @tree.children[1].children[0].closest((node) -> return node.name is "root")
.toEqual @tree
expect @tree.children[1].children[0].closest((node) -> return node.name is "child2")
.toEqual @tree.children[1]
expect @tree.children[1].children[0].closest((node) -> return node.name is "child3")
.toEqual null
describe "converting a tree", () ->
beforeEach () ->
@tree = JSUtils.Tree.new({
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
})
it "serialize (== toObject)", () ->
expect @tree.serialize (node, children) -> $.extend({children}, node.data)
.toEqual {
a: 10
b: 20
name: "root"
children: [
{
a: 1
b: 2
name: "child1"
}
{
a: 3
b: 4
name: "child2"
children: [
{
a: 0
b: 0
name: "child2-child"
}
]
}
{
a: 5
b: 6
name: "child3"
}
]
}
it "deserialize (a.k.a. in-place constructing)", () ->
@tree.deserialize {
a: 10
name: "new root"
children: [
{
a: 1
name: "new child1"
children: []
}
{
name: "new child2"
children: [
{
name: "new child2-child"
children: []
}
]
}
]
}
expect @tree.name
.toBe "new root"
for child, idx in @tree.children
expect child.name
.toBe "new child#{idx + 1}"
expect child.parent
.toBe @tree
expect @tree.children[1].children[0].name
.toBe "new child2-child"
expect @tree.children[1].children[0].parent
.toBe @tree.children[1]
|
[
{
"context": " Console Player)\n\n critters:\n name : \"critters\"\n animated : false\n height : 80\n ",
"end": 5880,
"score": 0.9989222884178162,
"start": 5876,
"tag": "NAME",
"value": "crit"
},
{
"context": "sole Player)\n\n critters:\n name : \"critters\"\n animated : false\n height : 80\n ",
"end": 5884,
"score": 0.7478612661361694,
"start": 5880,
"tag": "NAME",
"value": "ters"
}
] | app-src/config.coffee | hyperbotic/crowdgame-trivially | 0 | # ==================================================================================================================
Hy.Config =
AppId: "??"
platformAndroid: false
Production: false
kMaxRemotePlayers : 11
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 3
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "??"
Console:
kConsoleMajorVersion : 2
kConsoleMinorVersion : 6
kConsoleMinor2Version : 5
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Bonjour:
serviceType: '_cg_trivially._tcp'
domain: 'local.'
port: 40401
mode: Ti.Network.READ_WRITE_MODE
hostName: Ti.Network.INADDR_ANY
Commerce:
kFAKEPURCHASED : true # 2.7, 2017-06-10
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kHelpPage : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
RunsInOwnThread: false # Whether player network runs in its own thread.
HTTPServerRunsInOwnThread: true # Whether or not the HTTP Server runs in its own thread. If false, runs in the same
# thread as PlayerNetwork.
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 20 * 1000 # changed from 10 to 20 for 2.5.0
Rendezvous:
URL : "??"
URLDisplayName : "?"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 10*60*1000 # 10 minutes - changed for 2.0
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "003"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 20
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1000 * 1024 # 100k
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
Analytics:
active : true
Namespace : "Hy.Analytics"
Version : "1.0"
Google:
accountID : "??"
Support:
email : "??"
contactUs : "??"
Avatars:
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "assets/avatars"
kMaxNumAvatars : 12 # kMaxRemotePlayers + 1 (for Console Player)
critters:
name : "critters"
animated : false
height : 80
width : 80
padding : 1
gameshow:
name : "gameshow"
animated : true
height : 115
width : 80
padding : 5
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 3 * 60 * 1000
Hy.Config.Analytics.Namespace = "Hy.Analytics"
Hy.Config.Analytics.Version = "1.0"
Hy.Config.PlayerNetwork.HTTPServerRunsInOwnThread = false
Hy.Config.PlayerNetwork.RunsInOwnThread = false
# Really important to unset this prior to shipping
Hy.Config.Commerce.kFAKEPURCHASED = true
| 201808 | # ==================================================================================================================
Hy.Config =
AppId: "??"
platformAndroid: false
Production: false
kMaxRemotePlayers : 11
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 3
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "??"
Console:
kConsoleMajorVersion : 2
kConsoleMinorVersion : 6
kConsoleMinor2Version : 5
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Bonjour:
serviceType: '_cg_trivially._tcp'
domain: 'local.'
port: 40401
mode: Ti.Network.READ_WRITE_MODE
hostName: Ti.Network.INADDR_ANY
Commerce:
kFAKEPURCHASED : true # 2.7, 2017-06-10
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kHelpPage : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
RunsInOwnThread: false # Whether player network runs in its own thread.
HTTPServerRunsInOwnThread: true # Whether or not the HTTP Server runs in its own thread. If false, runs in the same
# thread as PlayerNetwork.
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 20 * 1000 # changed from 10 to 20 for 2.5.0
Rendezvous:
URL : "??"
URLDisplayName : "?"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 10*60*1000 # 10 minutes - changed for 2.0
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "003"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 20
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1000 * 1024 # 100k
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
Analytics:
active : true
Namespace : "Hy.Analytics"
Version : "1.0"
Google:
accountID : "??"
Support:
email : "??"
contactUs : "??"
Avatars:
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "assets/avatars"
kMaxNumAvatars : 12 # kMaxRemotePlayers + 1 (for Console Player)
critters:
name : "<NAME> <NAME>"
animated : false
height : 80
width : 80
padding : 1
gameshow:
name : "gameshow"
animated : true
height : 115
width : 80
padding : 5
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 3 * 60 * 1000
Hy.Config.Analytics.Namespace = "Hy.Analytics"
Hy.Config.Analytics.Version = "1.0"
Hy.Config.PlayerNetwork.HTTPServerRunsInOwnThread = false
Hy.Config.PlayerNetwork.RunsInOwnThread = false
# Really important to unset this prior to shipping
Hy.Config.Commerce.kFAKEPURCHASED = true
| true | # ==================================================================================================================
Hy.Config =
AppId: "??"
platformAndroid: false
Production: false
kMaxRemotePlayers : 11
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 3
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "??"
Console:
kConsoleMajorVersion : 2
kConsoleMinorVersion : 6
kConsoleMinor2Version : 5
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Bonjour:
serviceType: '_cg_trivially._tcp'
domain: 'local.'
port: 40401
mode: Ti.Network.READ_WRITE_MODE
hostName: Ti.Network.INADDR_ANY
Commerce:
kFAKEPURCHASED : true # 2.7, 2017-06-10
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kHelpPage : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
RunsInOwnThread: false # Whether player network runs in its own thread.
HTTPServerRunsInOwnThread: true # Whether or not the HTTP Server runs in its own thread. If false, runs in the same
# thread as PlayerNetwork.
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 20 * 1000 # changed from 10 to 20 for 2.5.0
Rendezvous:
URL : "??"
URLDisplayName : "?"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 10*60*1000 # 10 minutes - changed for 2.0
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "003"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 20
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1000 * 1024 # 100k
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
Analytics:
active : true
Namespace : "Hy.Analytics"
Version : "1.0"
Google:
accountID : "??"
Support:
email : "??"
contactUs : "??"
Avatars:
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "assets/avatars"
kMaxNumAvatars : 12 # kMaxRemotePlayers + 1 (for Console Player)
critters:
name : "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"
animated : false
height : 80
width : 80
padding : 1
gameshow:
name : "gameshow"
animated : true
height : 115
width : 80
padding : 5
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 3 * 60 * 1000
Hy.Config.Analytics.Namespace = "Hy.Analytics"
Hy.Config.Analytics.Version = "1.0"
Hy.Config.PlayerNetwork.HTTPServerRunsInOwnThread = false
Hy.Config.PlayerNetwork.RunsInOwnThread = false
# Really important to unset this prior to shipping
Hy.Config.Commerce.kFAKEPURCHASED = true
|
[
{
"context": "cribed in the README:\n # https://github.com/AppGyver/PushNotifications/blob/master/README.md\n #\n # N",
"end": 3167,
"score": 0.9942229390144348,
"start": 3159,
"tag": "USERNAME",
"value": "AppGyver"
},
{
"context": "opers.google.com/project/composer-2\n # using richard.anderson@appgyver.com\n #\n # N.B. You must restart Scanner app if you ",
"end": 3401,
"score": 0.9999056458473206,
"start": 3372,
"tag": "EMAIL",
"value": "richard.anderson@appgyver.com"
},
{
"context": "navigator.contacts.create {\n \"displayName\": \"Dolan Duck\"\n \"nickname\": \"Dolan Duck\"\n }\n myConta",
"end": 13123,
"score": 0.9995828866958618,
"start": 13113,
"tag": "NAME",
"value": "Dolan Duck"
},
{
"context": " \"displayName\": \"Dolan Duck\"\n \"nickname\": \"Dolan Duck\"\n }\n myContact.note = \"GOOBY PLZ\"\n\n name",
"end": 13154,
"score": 0.9964167475700378,
"start": 13144,
"tag": "NAME",
"value": "Dolan Duck"
},
{
"context": "ckname\": \"Dolan Duck\"\n }\n myContact.note = \"GOOBY PLZ\"\n\n name = new ContactName()\n name.givenName",
"end": 13193,
"score": 0.8407835960388184,
"start": 13184,
"tag": "NAME",
"value": "GOOBY PLZ"
},
{
"context": " name = new ContactName()\n name.givenName = \"Dolan\"\n name.familyName = \"Duck\"\n myContact.name ",
"end": 13252,
"score": 0.999739944934845,
"start": 13247,
"tag": "NAME",
"value": "Dolan"
},
{
"context": " name.givenName = \"Dolan\"\n name.familyName = \"Duck\"\n myContact.name = name\n\n myContact.save co",
"end": 13281,
"score": 0.9935597777366638,
"start": 13277,
"tag": "NAME",
"value": "Duck"
},
{
"context": " contactError.code\n\n\n # find all contacts with 'Dolan' in any name field\n @contactsFindTest = () ->\n ",
"end": 13682,
"score": 0.5987818837165833,
"start": 13678,
"tag": "NAME",
"value": "olan"
},
{
"context": "confirm \"Hello world!\", null, \"Cordova confirm\", \"Regards, Uncle, Dolan\"\n\n @promptTest = () ->\n nav",
"end": 25896,
"score": 0.6995733380317688,
"start": 25893,
"tag": "NAME",
"value": "Reg"
},
{
"context": "\"Hello world!\", null, \"Cordova confirm\", \"Regards, Uncle, Dolan\"\n\n @promptTest = () ->\n navigator.notificatio",
"end": 25914,
"score": 0.9831321835517883,
"start": 25902,
"tag": "NAME",
"value": "Uncle, Dolan"
}
] | testApp/app/controllers/plugin.coffee | AppGyver/steroids-js | 10 | class window.PluginController
# always put everything inside PhoneGap deviceready
document.addEventListener "deviceready", ->
# Make Navigation Bar to appear with a custom title text
steroids.navigationBar.show { title: "Plugin" }
now = new Date()
diff = now.getTime() - window.___START_TIME.getTime()
list = document.getElementById("ready")
el = document.createElement("li")
el.innerHTML = now.toLocaleTimeString() + " Cordova READY - " + diff + " ms since page load"
list.appendChild(el)
appendToDom = (content) =>
parent = document.getElementById("app-status-ul")
li = document.createElement("li")
li.innerHTML = content
parent.appendChild li
printNotification = (notification) =>
# android and ios have difference payloads
message = notification.message || notification.alert
sound = notification.sound || notification.soundname
appendToDom "notification .message || .alert: #{message}"
appendToDom "notification.badge: #{notification.badge}"
appendToDom "notification .sound || .soundname: #{sound}"
appendToDom "notification.msgcnt: #{notification.msgcnt}"
@testPushHandleBackGroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notifications happened while the app was in the background
# when notification are received in the background the app might be running
# or not running.
backgroundNotifications = (notification) =>
console.log "BACKGROUND Notifications : #{JSON.stringify(notification)}"
appendToDom "BACKGROUND NOTIFICATION"
# coldstart indicates that the application was not running
# and it was started by the notification
if notification.coldstart
#ios is always true ?
appendToDom "COLDSTART - App was started by the notification :-)"
else
appendToDom "RUNNIG - App received and handled the notification while running in background"
printNotification notification
window.plugins.pushNotification.onMessageInBackground backgroundNotifications, errorHandler
@testPushHandleForegroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notification happened while we were in the foreground.
# you might want to play a sound to get the user's attention, throw up a dialog, etc.
foregroundNotifications = (notification) =>
console.log "foregroundNotifications : #{JSON.stringify(notification)}"
appendToDom "FOREGROUND NOTIFICATION"
# if the notification contains a soundname, play it.
sound = notification.sound || notification.soundname
myMedia = new Media "#{steroids.app.absolutePath}/#{sound}"
myMedia.play()
printNotification notification
# register the notification handlers
window.plugins.pushNotification.onMessageInForeground foregroundNotifications, errorHandler
# Push Plugin
#
# API is described in the README:
# https://github.com/AppGyver/PushNotifications/blob/master/README.md
#
# N.B. senderID must match your project defined in GCM Developer console
# See https://console.developers.google.com/project/composer-2
# using richard.anderson@appgyver.com
#
# N.B. You must restart Scanner app if you change the GCM senderID
#
# N.B. It seems that you cannot set senderID in config.android.xml although
# the docs say otherwise.
@testPushRegister = () =>
successHandler = (token) =>
console.log "pushNotification.register success : #{token}"
# save the device registration/token in the server
appendToDom "Registration Complete -> Token/DeviceID -> #{token}"
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.register successHandler, errorHandler, {
"senderID": "695873862319" # android only option
"badge": true # ios only options
"sound": true
"alert": true
}
# senderID can also be configured in the -> config.android.xml
@testPushUnregister = () =>
successHandler = (msg) =>
console.log "pushNotification.unregister success : #{msg}"
# save the device registration/token in the server
appendToDom "Unregister complete: #{msg}"
errorHandler = (error) =>
console.log "pushNotification.unregister error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.unregister successHandler, errorHandler
@testBadgeReset = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success : #{msg}"
# save the device registration/token in the server
appendToDom "Badges reset!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 0
@testBadgeSet = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success: #{msg}"
# save the device registration/token in the server
appendToDom "Badge set to 2!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 2
# ACCELEROMETER TEST
accelerometerOnSuccess = (acceleration) ->
accel_x.innerHTML = acceleration.x;
accel_y.innerHTML = acceleration.y;
accel_z.innerHTML = acceleration.z;
accel_timestamp.innerHTML = acceleration.timestamp;
accelerometerOnError= () ->
navigator.notification.alert 'accelerometer onError!'
# TODO: the success callback function continues to fire forever and ever
@accelerometerTest = () ->
navigator.accelerometer.getCurrentAcceleration accelerometerOnSuccess, accelerometerOnError
watchId = undefined
@watchAccelerationTest = () ->
options =
frequency: 100
watchId = navigator.accelerometer.watchAcceleration accelerometerOnSuccess, accelerometerOnError
navigator.notification.alert "watching acceleration"
@clearAccelerationWatchTest = () ->
navigator.accelerometer.clearWatch watchId
accel_x.innerHTML = "";
accel_y.innerHTML = "";
accel_z.innerHTML = "";
accel_timestamp.innerHTML = "";
# BARCODE TEST
@barCodeScanTest = () ->
cordova.plugins.barcodeScanner.scan (result) ->
if result.cancelled
navigator.notification.alert "the user cancelled the scan"
else
qr_result.innerHTML = result.text
,
(error) ->
navigator.notification.alert "scanning failed: " + error
# CAMERA TEST
cameraOnSuccess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
cameraOnFail = (message) ->
navigator.notification.alert 'Failed because: ' + message
@cameraGetPictureTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
modalOpenedSuccess = () ->
navigator.notification.alert "modal opened on the camera callback !"
openModalOnSucess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
#open a modal
steroids.modal.show
view: new steroids.views.WebView "/views/modal/hide.html"
,
onSuccess: modalOpenedSuccess
@cameraFromPhotoLibraryOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraGetPictureOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraCleanupTest = () ->
navigator.camera.cleanup(
() ->
navigator.notification.alert "Camera cleanup success"
(message) ->
navigator.notification.alert "Camera cleanup failed: " + message
)
@cameraFromPhotoLibraryTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
fileError = (error)->
navigator.notification.alert "Cordova error code: " + error.code, null, "File system error!"
fileMoved = (file)->
image = document.querySelector '#cameraTest'
image.src = "/#{file.name}?#{(new Date()).getTime()}"
gotFileObject = (file)->
targetDirURI = "file://" + steroids.app.absoluteUserFilesPath
fileName = "user_pic.png"
window.resolveLocalFileSystemURL(
targetDirURI
(directory)->
file.moveTo directory, fileName, fileMoved, fileError
fileError
)
saveInUserFilesOnSuccess = (imageURI) ->
window.resolveLocalFileSystemURL imageURI, gotFileObject, fileError
@cameraGetPictureSaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraFromPhotoLibrarySaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
# CAPTURE TEST
captureOnSuccess = (mediaFiles) ->
for item in mediaFiles
navigator.notification.alert item.fullPath
captureOnError = (error) ->
navigator.notification.alert 'Capture error, error code: ' + error.code
@captureAudioTest = () ->
navigator.device.capture.captureAudio captureOnSuccess, captureOnError, {}
@captureImageTest = () ->
navigator.device.capture.captureImage captureOnSuccess, captureOnError, {limit:1}
@captureVideoTest = () ->
navigator.device.capture.captureVideo captureOnSuccess, captureOnError, {}
# COMPASS TEST
compassOnSuccess = (heading) ->
compass_result.innerHTML = heading.magneticHeading
compassOnError = (error) ->
navigator.notification.alert 'CompassError: ' + error.code
@compassTest = () ->
navigator.compass.getCurrentHeading compassOnSuccess, compassOnError
# CONNECTION TEST
@connectionTest = () ->
networkState = navigator.network.connection.type;
states = {}
states[Connection.UNKNOWN] = 'Unknown connection'
states[Connection.ETHERNET] = 'Ethernet connection'
states[Connection.WIFI] = 'WiFi connection'
states[Connection.CELL_2G] = 'Cell 2G connection'
states[Connection.CELL_3G] = 'Cell 3G connection'
states[Connection.CELL_4G] = 'Cell 4G connection'
states[Connection.NONE] = 'No network connection'
connection_result.innerHTML = states[networkState]
# CONTACTS TEST
myContact = undefined
contactsSaveOnSuccess = (contact) ->
contacts_result.innerHTML = contact.nickname + " created in Contacts."
contactsSaveOnError = (contactError) ->
contacts_result.innerHTML = "Contact save error = " + contactError.code
@contactsSaveTest = () ->
myContact = navigator.contacts.create {
"displayName": "Dolan Duck"
"nickname": "Dolan Duck"
}
myContact.note = "GOOBY PLZ"
name = new ContactName()
name.givenName = "Dolan"
name.familyName = "Duck"
myContact.name = name
myContact.save contactsSaveOnSuccess, contactsSaveOnError
# CONTACTS FIND TEST
contactsFindOnSuccess = (contacts) ->
contacts_result.innerHTML = 'Found ' + contacts.length + ' contacts matching Dolan.'
contactsFindOnError = (contactError) ->
contacts_result.innerHTML = 'Contacts find onError:' + contactError.code
# find all contacts with 'Dolan' in any name field
@contactsFindTest = () ->
options = new ContactFindOptions
options.filter = "Dolan"
options.multiple = true
fields = ["displayName", "name"]
navigator.contacts.find fields, contactsFindOnSuccess, contactsFindOnError, options
# DEVICE TESTS
@deviceTest = () ->
device_result.innerHTML =
"Device model: " + device.model + "<br>" +
"Device Cordova: " + device.cordova + "<br>" +
"Device platform: " + device.platform + "<br>" +
"Device UUID: " + device.uuid + "<br>" +
"Device version: " + device.version + "<br>"
# EVENTS TESTS
@addPause = () ->
document.addEventListener "pause", () ->
if pause_result.innerHTML == "NO"
pause_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
pause_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: pause"
@addResume = () ->
# alert needs to be wrapped in setTimeout to work
document.addEventListener "resume", () ->
if resume_result.innerHTML == "NO"
resume_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
resume_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: resume"
@addOnline = () ->
document.addEventListener "online", () ->
online_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: online"
@addOffline = () ->
document.addEventListener "offline", () ->
offline_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: offline"
@addBatteryCritical = () ->
window.addEventListener "batterycritical", (status) ->
navigator.notification.alert "Device's battery level is critical, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterycritical"
@addBatteryLow = () ->
window.addEventListener "batterylow", (status) ->
navigator.notification.alert "Device's battery level is low, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterylow"
@addBatteryStatus = () ->
window.addEventListener "batterystatus", (status) ->
navigator.notification.alert "Device's battery level was changed by at least 1%, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterystatus"
@addBackButton = () ->
handler = () ->
navigator.notification.alert "Device's back button pressed !"
document.addEventListener "backbutton", handler, false
navigator.notification.alert "Event listener added: backbutton"
@addMenuButton = () ->
handler = () ->
navigator.notification.alert "Menu button pressed !"
document.addEventListener "menubutton", handler, false
navigator.notification.alert "Event listener added: menubutton"
@addSearchButton = () ->
handler = () ->
navigator.notification.alert "searchbutton button pressed !"
document.addEventListener "searchbutton", handler, false
navigator.notification.alert "Event listener added: searchbutton"
# FILE TEST
testFS = undefined
@getFileSystemTest = () ->
window.requestFileSystem LocalFileSystem.PERSISTENT, 0, gotFS, fileTestFail
@readFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileEntry, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@writeFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileToWrite, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@deleteFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:false}, gotFileToDelete, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotFS = (fileSystem) ->
navigator.notification.alert "Got file system with root path: " + fileSystem.root.fullPath
testFS = fileSystem
gotFileEntry = (fileEntry) ->
navigator.notification.alert "Got file entry with path: " + fileEntry.fullPath
fileEntry.file gotFile, fileTestFail
gotFileToWrite = (fileEntry) ->
fileEntry.createWriter (fileWriter) ->
fileWriter.onwriteend = (e) ->
file_result.innerHTML = "Write completed"
fileWriter.onerror = (e) ->
file_result.innerHTML = 'Write failed: ' + JSON.stringify e
# Create a new Blob and write it to log.txt.
blob = new Blob ['Lorem Ipsum'], {type: 'text/plain'}
fileWriter.write blob
, fileTestFail
gotFileToDelete = (fileEntry) ->
fileEntry.remove( () ->
file_result.innerHTML = "File: #{fileEntry.name} deleted from path: #{fileEntry.fullPath}"
,
(error) ->
fileTestFail
)
gotFile = (file) ->
navigator.notification.alert "Got file: #{file.name} \n
Full path: #{file.fullPath} \n
Mime type: #{file.type} \n
Last modified: #{file.lastModifiedDate} \n
Size in bytes: #{file.size}"
readDataUrl(file);
readAsText(file);
readDataUrl = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
navigator.notification.alert "Read as data URL: " + evt.target.result
reader.readAsDataURL file
readAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
file_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
fileTestFail = (evt) ->
navigator.notification.alert "FILETESTFAIL: " + JSON.stringify evt
imageFileURL = undefined
@fileURLTest = () ->
if testFS?
testFS.root.getFile "image.png", {create:true}, gotImage, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@URLtoFileEntryTest = () ->
if testFS?
window.resolveLocalFileSystemURL imageFileURL, (fileEntry)->
fileURL_result.innerHTML = "fileEntry.fullPath: " + fileEntry.fullPath
, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotImage = (fileEntry) ->
imageFileURL = fileEntry.toURL()
fileURL_result.innerHTML = "fileEntry.toURl(): #{imageFileURL}"
imageTestFail = (error) ->
fileURL_result.innerHTML = "Error resolving fileEntry: " + JSON.stringify error
#FILETRANSFER TEST
readDownloadAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
fileTransfer_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
downloadFromURL = (uri, fileName, options={}) ->
fileTransfer = new FileTransfer()
filePath = steroids.app.absoluteUserFilesPath + fileName
uri = encodeURI uri
fileTransfer.download(
uri
filePath
(entry) ->
fileTransfer_result.innerHTML = "Download complete: #{entry.fullPath}, attempting to read file (if test stalls here, not download problem)"
win = (fileObject) ->
readDownloadAsText fileObject
fail = (error) ->
fileTransfer_result.innerHTML = "Failed to read file: #{JSON.stringify error}"
entry.file win, fail
(error) ->
fileTransfer_result.innerHTML = "
download error source: #{error.source} \n
download error target: #{error.target} \n
download error code: #{error.code}"
false
options
)
@downloadTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com/en/stable/index.html"
downloadFromURL "http://docs.appgyver.com/en/stable/index.html", "/test.response"
@downloadRedirectTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com, should redirect to /en/stable/index.html"
downloadFromURL "http://docs.appgyver.com", "/test.redirect.response"
@downloadAuthTest = () ->
fileTransfer_result.innerHTML = "Downloading with basic auth"
downloadFromURL(
"https://api.flowdock.com/flows/flemy/main/files/XaD24A7P0l_M__E4B1YBUw/20130624_130747.jpg"
"test.auth.response"
{ headers: { "Authorization": "Basic NjBlMDQ1MTE5NWZhZDY4OTg5OTU5NGE4Zjg0YzNjYmE6bnVsbA==" } }
)
@clearDownloadResult = () ->
fileTransfer_result.innerHTML = "result cleared"
# GEOLOCATION TEST
onGeolocationSuccess = (position) ->
geoLocation_result.innerHTML = "
Latitude: #{position.coords.latitude} \n
Longitude: #{position.coords.longitude} \n
Altitude: #{position.coords.altitude} \n
Accuracy: #{position.coords.accuracy} \n
Altitude Accuracy: #{position.coords.altitudeAccuracy} \n
Heading: #{position.coords.heading} \n
Speed: #{position.coords.speed} \n
Timestamp: #{position.timestamp} \n"
# onError Callback receives a PositionError object
onGeolocationError = (error) ->
navigator.notification.alert "
code: #{error.code} \n
message: #{error.message} "
@geolocationTest = () ->
navigator.geolocation.getCurrentPosition onGeolocationSuccess, onGeolocationError
watchId = undefined
@watchPositionTest = () ->
watchId = navigator.geolocation.watchPosition onGeolocationSuccess, onGeolocationError
@clearPositionWatchTest = () ->
if watchId?
navigator.geolocation.clearWatch(watchId)
geoLocation_result.innerHTML = "Watch cleared"
else
geoLocation_result.innerHTML = "No position watch to clear"
# GLOBALIZATION TEST
@localeTest = () ->
navigator.globalization.getLocaleName (locale) ->
globalization_result.innerHTML = locale.value
@DSTTest = () ->
navigator.globalization.isDayLightSavingsTime(
new Date()
(date) ->
globalization_result.innerHTML = "Is Daylight savings: " + date.dst
(error) ->
globalization_result.innerHTML = "Error: " + JSON.stringify error
)
# INAPPBROWSER TEST
ref = undefined
loadNum = 0
exit = () ->
ref.removeEventListener 'loadstart', loadStart
ref.removeEventListener 'exit', exit
if loadNum > 1
steroids.logger.log "SUCCESS in IABRedirectTest, loadstarts: #{loadNum}"
else
steroids.logger.log "FAILURE in IABRedirectTest, loadstarts: #{loadNum}"
loadNum = 0
loadStart = (e) ->
loadNum++
@openIABTest = () ->
ref = window.open 'http://www.google.com', '_blank', 'location=yes'
@IABRedirectTest = () ->
ref = window.open 'http://www.gooogle.com', '_blank', 'location=yes'
ref.addEventListener('loadstart', loadStart);
ref.addEventListener('exit', exit);
# MEDIA TEST
my_media = null;
# Play audio
@playExistingAudioFile = () ->
some_media = new Media "http://audio.ibeat.org/content/p1rj1s/p1rj1s_-_rockGuitar.mp3"
some_media.play()
@playAudio = () ->
if my_media == null
my_media = new Media "documents://"+"lol.wav"
my_media.play()
else
my_media.play()
@pauseAudio = () ->
if my_media?
my_media.pause()
media_record_result.innerHTML = "Playback paused!"
@stopAudio = () ->
if my_media?
my_media.stop()
media_record_result.innerHTML = "Playback stopped!"
@recordAudio = () ->
my_media = new Media "lol.wav"
my_media.startRecord()
media_record_result.innerHTML = "Recording!"
setTimeout () ->
my_media.stopRecord()
media_record_result.innerHTML = "Recording complete!"
, 5000
getPath = () ->
location.pathname.substring 0, location.pathname.lastIndexOf('/')+1
# NOTIFICATION TEST
@alertTest = () ->
navigator.notification.alert "Hello world!", null, "Cordova alert", "Lol"
@confirmTest = () ->
navigator.notification.confirm "Hello world!", null, "Cordova confirm", "Regards, Uncle, Dolan"
@promptTest = () ->
navigator.notification.prompt "Hello world!", null
@beepTest = () ->
navigator.notification.beep 5
@vibrateTest = () ->
navigator.notification.vibrate 500
# LOCAL STORAGE TEST
@setItemTest = () ->
window.localStorage.setItem "items", "apples"
localStorage_result.innerHTML = "Set an item 'items' with the value 'apples'."
@getItemTest = () ->
item = window.localStorage.getItem "items"
if item?
localStorage_result.innerHTML = "Got '#{item}'"
else
localStorage_result.innerHTML = "Error: could not find the item item-text-wrap"
@removeItemTest = () ->
item = window.localStorage.getItem "items"
if item?
window.localStorage.removeItem "items"
localStorage_result.innerHTML = "'items' removed"
else
localStorage_result.innerHTML = "Error: could not find the item to be removed"
# EXIT APP
@exitAppTest = () ->
navigator.app.exitApp()
@testInAppBrowserNOBar = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=no"
ref = window.open url, target, options
@testInAppBrowserRedirect = () ->
url = "https://jigsaw.w3.org/HTTP/300/"
target = "_blank"
options = "location=yes"
newWindow = window.open url, target, options
exit = ->
newWindow.removeEventListener 'loadstart', loadStart
newWindow.removeEventListener 'exit', exit
loadStart = (e) ->
console.log 'URL : ', e.url
newWindow.addEventListener 'loadstart', loadStart
newWindow.addEventListener 'exit', exit
@testInAppBrowserClearCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearcache=yes"
ref = window.open url, target, options
@testInAppBrowserClearSessionCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearsessioncache=yes"
ref = window.open url, target, options
@testInAppBrowserWithBar = () ->
appendEvent = (content) =>
parent = document.getElementById("in-app-browser-status")
li = document.createElement("li")
li.innerHTML = "#{content} - time: #{new Date()}"
parent.appendChild li
url = "http://www.google.com"
target = "_blank"
options = "location=yes"
appendEvent "open window"
ref = window.open url, target, options
ref.addEventListener "loadstart", () ->
appendEvent "loadstart"
ref.addEventListener "loadstop", () ->
appendEvent "loadstop"
ref.addEventListener "loaderror", () ->
appendEvent "loaderror"
ref.addEventListener "exit", () ->
appendEvent "exit"
@testNativeInputShow = () ->
params =
rightButton:
styleClass: 'send-button'
input:
placeHolder: 'Type your message here'
type: 'normal'
lines: 1
cordova.plugins.NativeInput.show ""
@testNativeInputShow_style = () ->
params =
leftButton:
styleCSS: 'text:Up;color:white;background-color:gray;'
rightButton:
styleClass: 'myRightButtonClass'
cssId: 'myRightButton'
panel:
styleClass: 'grey-panel'
input:
placeHolder: 'Type your message here'
type: 'uri'
lines: 2
styleClass: 'myInputClass'
styleId: 'myInputId'
cordova.plugins.NativeInput.show params
@testNativeInputShow_email = () ->
params =
input:
placeHolder: 'Chat box'
type: 'email'
lines: 1
cordova.plugins.NativeInput.show params
@testNativeInputKeyboardAction = () ->
cordova.plugins.NativeInput.onKeyboardAction true, (action) ->
keyboardAction = document.getElementById("keyboardAction")
keyboardAction.innerHTML = keyboardAction.innerHTML + "action: #{action}<br>"
@testNativeInputCloseKeyboard = () ->
cordova.plugins.NativeInput.closeKeyboard()
@testNativeInputOnButtonAction = () ->
cordova.plugins.NativeInput.onButtonAction (button) ->
buttonAction = document.getElementById("buttonAction")
buttonAction.innerHTML = buttonAction.innerHTML + "button: #{button}<br>"
@testNativeInputHide = () ->
cordova.plugins.NativeInput.hide()
@testNativeInputOnChange = () ->
cordova.plugins.NativeInput.onChange (value) ->
nativeInputValue1 = document.getElementById("nativeInputValue1")
nativeInputValue1.innerHTML = value
@testNativeInputGetValue = () ->
cordova.plugins.NativeInput.getValue (value) ->
nativeInputValue2 = document.getElementById("nativeInputValue2")
nativeInputValue2.innerHTML = value
| 159709 | class window.PluginController
# always put everything inside PhoneGap deviceready
document.addEventListener "deviceready", ->
# Make Navigation Bar to appear with a custom title text
steroids.navigationBar.show { title: "Plugin" }
now = new Date()
diff = now.getTime() - window.___START_TIME.getTime()
list = document.getElementById("ready")
el = document.createElement("li")
el.innerHTML = now.toLocaleTimeString() + " Cordova READY - " + diff + " ms since page load"
list.appendChild(el)
appendToDom = (content) =>
parent = document.getElementById("app-status-ul")
li = document.createElement("li")
li.innerHTML = content
parent.appendChild li
printNotification = (notification) =>
# android and ios have difference payloads
message = notification.message || notification.alert
sound = notification.sound || notification.soundname
appendToDom "notification .message || .alert: #{message}"
appendToDom "notification.badge: #{notification.badge}"
appendToDom "notification .sound || .soundname: #{sound}"
appendToDom "notification.msgcnt: #{notification.msgcnt}"
@testPushHandleBackGroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notifications happened while the app was in the background
# when notification are received in the background the app might be running
# or not running.
backgroundNotifications = (notification) =>
console.log "BACKGROUND Notifications : #{JSON.stringify(notification)}"
appendToDom "BACKGROUND NOTIFICATION"
# coldstart indicates that the application was not running
# and it was started by the notification
if notification.coldstart
#ios is always true ?
appendToDom "COLDSTART - App was started by the notification :-)"
else
appendToDom "RUNNIG - App received and handled the notification while running in background"
printNotification notification
window.plugins.pushNotification.onMessageInBackground backgroundNotifications, errorHandler
@testPushHandleForegroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notification happened while we were in the foreground.
# you might want to play a sound to get the user's attention, throw up a dialog, etc.
foregroundNotifications = (notification) =>
console.log "foregroundNotifications : #{JSON.stringify(notification)}"
appendToDom "FOREGROUND NOTIFICATION"
# if the notification contains a soundname, play it.
sound = notification.sound || notification.soundname
myMedia = new Media "#{steroids.app.absolutePath}/#{sound}"
myMedia.play()
printNotification notification
# register the notification handlers
window.plugins.pushNotification.onMessageInForeground foregroundNotifications, errorHandler
# Push Plugin
#
# API is described in the README:
# https://github.com/AppGyver/PushNotifications/blob/master/README.md
#
# N.B. senderID must match your project defined in GCM Developer console
# See https://console.developers.google.com/project/composer-2
# using <EMAIL>
#
# N.B. You must restart Scanner app if you change the GCM senderID
#
# N.B. It seems that you cannot set senderID in config.android.xml although
# the docs say otherwise.
@testPushRegister = () =>
successHandler = (token) =>
console.log "pushNotification.register success : #{token}"
# save the device registration/token in the server
appendToDom "Registration Complete -> Token/DeviceID -> #{token}"
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.register successHandler, errorHandler, {
"senderID": "695873862319" # android only option
"badge": true # ios only options
"sound": true
"alert": true
}
# senderID can also be configured in the -> config.android.xml
@testPushUnregister = () =>
successHandler = (msg) =>
console.log "pushNotification.unregister success : #{msg}"
# save the device registration/token in the server
appendToDom "Unregister complete: #{msg}"
errorHandler = (error) =>
console.log "pushNotification.unregister error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.unregister successHandler, errorHandler
@testBadgeReset = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success : #{msg}"
# save the device registration/token in the server
appendToDom "Badges reset!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 0
@testBadgeSet = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success: #{msg}"
# save the device registration/token in the server
appendToDom "Badge set to 2!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 2
# ACCELEROMETER TEST
accelerometerOnSuccess = (acceleration) ->
accel_x.innerHTML = acceleration.x;
accel_y.innerHTML = acceleration.y;
accel_z.innerHTML = acceleration.z;
accel_timestamp.innerHTML = acceleration.timestamp;
accelerometerOnError= () ->
navigator.notification.alert 'accelerometer onError!'
# TODO: the success callback function continues to fire forever and ever
@accelerometerTest = () ->
navigator.accelerometer.getCurrentAcceleration accelerometerOnSuccess, accelerometerOnError
watchId = undefined
@watchAccelerationTest = () ->
options =
frequency: 100
watchId = navigator.accelerometer.watchAcceleration accelerometerOnSuccess, accelerometerOnError
navigator.notification.alert "watching acceleration"
@clearAccelerationWatchTest = () ->
navigator.accelerometer.clearWatch watchId
accel_x.innerHTML = "";
accel_y.innerHTML = "";
accel_z.innerHTML = "";
accel_timestamp.innerHTML = "";
# BARCODE TEST
@barCodeScanTest = () ->
cordova.plugins.barcodeScanner.scan (result) ->
if result.cancelled
navigator.notification.alert "the user cancelled the scan"
else
qr_result.innerHTML = result.text
,
(error) ->
navigator.notification.alert "scanning failed: " + error
# CAMERA TEST
cameraOnSuccess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
cameraOnFail = (message) ->
navigator.notification.alert 'Failed because: ' + message
@cameraGetPictureTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
modalOpenedSuccess = () ->
navigator.notification.alert "modal opened on the camera callback !"
openModalOnSucess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
#open a modal
steroids.modal.show
view: new steroids.views.WebView "/views/modal/hide.html"
,
onSuccess: modalOpenedSuccess
@cameraFromPhotoLibraryOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraGetPictureOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraCleanupTest = () ->
navigator.camera.cleanup(
() ->
navigator.notification.alert "Camera cleanup success"
(message) ->
navigator.notification.alert "Camera cleanup failed: " + message
)
@cameraFromPhotoLibraryTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
fileError = (error)->
navigator.notification.alert "Cordova error code: " + error.code, null, "File system error!"
fileMoved = (file)->
image = document.querySelector '#cameraTest'
image.src = "/#{file.name}?#{(new Date()).getTime()}"
gotFileObject = (file)->
targetDirURI = "file://" + steroids.app.absoluteUserFilesPath
fileName = "user_pic.png"
window.resolveLocalFileSystemURL(
targetDirURI
(directory)->
file.moveTo directory, fileName, fileMoved, fileError
fileError
)
saveInUserFilesOnSuccess = (imageURI) ->
window.resolveLocalFileSystemURL imageURI, gotFileObject, fileError
@cameraGetPictureSaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraFromPhotoLibrarySaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
# CAPTURE TEST
captureOnSuccess = (mediaFiles) ->
for item in mediaFiles
navigator.notification.alert item.fullPath
captureOnError = (error) ->
navigator.notification.alert 'Capture error, error code: ' + error.code
@captureAudioTest = () ->
navigator.device.capture.captureAudio captureOnSuccess, captureOnError, {}
@captureImageTest = () ->
navigator.device.capture.captureImage captureOnSuccess, captureOnError, {limit:1}
@captureVideoTest = () ->
navigator.device.capture.captureVideo captureOnSuccess, captureOnError, {}
# COMPASS TEST
compassOnSuccess = (heading) ->
compass_result.innerHTML = heading.magneticHeading
compassOnError = (error) ->
navigator.notification.alert 'CompassError: ' + error.code
@compassTest = () ->
navigator.compass.getCurrentHeading compassOnSuccess, compassOnError
# CONNECTION TEST
@connectionTest = () ->
networkState = navigator.network.connection.type;
states = {}
states[Connection.UNKNOWN] = 'Unknown connection'
states[Connection.ETHERNET] = 'Ethernet connection'
states[Connection.WIFI] = 'WiFi connection'
states[Connection.CELL_2G] = 'Cell 2G connection'
states[Connection.CELL_3G] = 'Cell 3G connection'
states[Connection.CELL_4G] = 'Cell 4G connection'
states[Connection.NONE] = 'No network connection'
connection_result.innerHTML = states[networkState]
# CONTACTS TEST
myContact = undefined
contactsSaveOnSuccess = (contact) ->
contacts_result.innerHTML = contact.nickname + " created in Contacts."
contactsSaveOnError = (contactError) ->
contacts_result.innerHTML = "Contact save error = " + contactError.code
@contactsSaveTest = () ->
myContact = navigator.contacts.create {
"displayName": "<NAME>"
"nickname": "<NAME>"
}
myContact.note = "<NAME>"
name = new ContactName()
name.givenName = "<NAME>"
name.familyName = "<NAME>"
myContact.name = name
myContact.save contactsSaveOnSuccess, contactsSaveOnError
# CONTACTS FIND TEST
contactsFindOnSuccess = (contacts) ->
contacts_result.innerHTML = 'Found ' + contacts.length + ' contacts matching Dolan.'
contactsFindOnError = (contactError) ->
contacts_result.innerHTML = 'Contacts find onError:' + contactError.code
# find all contacts with 'D<NAME>' in any name field
@contactsFindTest = () ->
options = new ContactFindOptions
options.filter = "Dolan"
options.multiple = true
fields = ["displayName", "name"]
navigator.contacts.find fields, contactsFindOnSuccess, contactsFindOnError, options
# DEVICE TESTS
@deviceTest = () ->
device_result.innerHTML =
"Device model: " + device.model + "<br>" +
"Device Cordova: " + device.cordova + "<br>" +
"Device platform: " + device.platform + "<br>" +
"Device UUID: " + device.uuid + "<br>" +
"Device version: " + device.version + "<br>"
# EVENTS TESTS
@addPause = () ->
document.addEventListener "pause", () ->
if pause_result.innerHTML == "NO"
pause_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
pause_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: pause"
@addResume = () ->
# alert needs to be wrapped in setTimeout to work
document.addEventListener "resume", () ->
if resume_result.innerHTML == "NO"
resume_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
resume_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: resume"
@addOnline = () ->
document.addEventListener "online", () ->
online_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: online"
@addOffline = () ->
document.addEventListener "offline", () ->
offline_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: offline"
@addBatteryCritical = () ->
window.addEventListener "batterycritical", (status) ->
navigator.notification.alert "Device's battery level is critical, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterycritical"
@addBatteryLow = () ->
window.addEventListener "batterylow", (status) ->
navigator.notification.alert "Device's battery level is low, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterylow"
@addBatteryStatus = () ->
window.addEventListener "batterystatus", (status) ->
navigator.notification.alert "Device's battery level was changed by at least 1%, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterystatus"
@addBackButton = () ->
handler = () ->
navigator.notification.alert "Device's back button pressed !"
document.addEventListener "backbutton", handler, false
navigator.notification.alert "Event listener added: backbutton"
@addMenuButton = () ->
handler = () ->
navigator.notification.alert "Menu button pressed !"
document.addEventListener "menubutton", handler, false
navigator.notification.alert "Event listener added: menubutton"
@addSearchButton = () ->
handler = () ->
navigator.notification.alert "searchbutton button pressed !"
document.addEventListener "searchbutton", handler, false
navigator.notification.alert "Event listener added: searchbutton"
# FILE TEST
testFS = undefined
@getFileSystemTest = () ->
window.requestFileSystem LocalFileSystem.PERSISTENT, 0, gotFS, fileTestFail
@readFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileEntry, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@writeFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileToWrite, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@deleteFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:false}, gotFileToDelete, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotFS = (fileSystem) ->
navigator.notification.alert "Got file system with root path: " + fileSystem.root.fullPath
testFS = fileSystem
gotFileEntry = (fileEntry) ->
navigator.notification.alert "Got file entry with path: " + fileEntry.fullPath
fileEntry.file gotFile, fileTestFail
gotFileToWrite = (fileEntry) ->
fileEntry.createWriter (fileWriter) ->
fileWriter.onwriteend = (e) ->
file_result.innerHTML = "Write completed"
fileWriter.onerror = (e) ->
file_result.innerHTML = 'Write failed: ' + JSON.stringify e
# Create a new Blob and write it to log.txt.
blob = new Blob ['Lorem Ipsum'], {type: 'text/plain'}
fileWriter.write blob
, fileTestFail
gotFileToDelete = (fileEntry) ->
fileEntry.remove( () ->
file_result.innerHTML = "File: #{fileEntry.name} deleted from path: #{fileEntry.fullPath}"
,
(error) ->
fileTestFail
)
gotFile = (file) ->
navigator.notification.alert "Got file: #{file.name} \n
Full path: #{file.fullPath} \n
Mime type: #{file.type} \n
Last modified: #{file.lastModifiedDate} \n
Size in bytes: #{file.size}"
readDataUrl(file);
readAsText(file);
readDataUrl = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
navigator.notification.alert "Read as data URL: " + evt.target.result
reader.readAsDataURL file
readAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
file_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
fileTestFail = (evt) ->
navigator.notification.alert "FILETESTFAIL: " + JSON.stringify evt
imageFileURL = undefined
@fileURLTest = () ->
if testFS?
testFS.root.getFile "image.png", {create:true}, gotImage, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@URLtoFileEntryTest = () ->
if testFS?
window.resolveLocalFileSystemURL imageFileURL, (fileEntry)->
fileURL_result.innerHTML = "fileEntry.fullPath: " + fileEntry.fullPath
, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotImage = (fileEntry) ->
imageFileURL = fileEntry.toURL()
fileURL_result.innerHTML = "fileEntry.toURl(): #{imageFileURL}"
imageTestFail = (error) ->
fileURL_result.innerHTML = "Error resolving fileEntry: " + JSON.stringify error
#FILETRANSFER TEST
readDownloadAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
fileTransfer_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
downloadFromURL = (uri, fileName, options={}) ->
fileTransfer = new FileTransfer()
filePath = steroids.app.absoluteUserFilesPath + fileName
uri = encodeURI uri
fileTransfer.download(
uri
filePath
(entry) ->
fileTransfer_result.innerHTML = "Download complete: #{entry.fullPath}, attempting to read file (if test stalls here, not download problem)"
win = (fileObject) ->
readDownloadAsText fileObject
fail = (error) ->
fileTransfer_result.innerHTML = "Failed to read file: #{JSON.stringify error}"
entry.file win, fail
(error) ->
fileTransfer_result.innerHTML = "
download error source: #{error.source} \n
download error target: #{error.target} \n
download error code: #{error.code}"
false
options
)
@downloadTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com/en/stable/index.html"
downloadFromURL "http://docs.appgyver.com/en/stable/index.html", "/test.response"
@downloadRedirectTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com, should redirect to /en/stable/index.html"
downloadFromURL "http://docs.appgyver.com", "/test.redirect.response"
@downloadAuthTest = () ->
fileTransfer_result.innerHTML = "Downloading with basic auth"
downloadFromURL(
"https://api.flowdock.com/flows/flemy/main/files/XaD24A7P0l_M__E4B1YBUw/20130624_130747.jpg"
"test.auth.response"
{ headers: { "Authorization": "Basic NjBlMDQ1MTE5NWZhZDY4OTg5OTU5NGE4Zjg0YzNjYmE6bnVsbA==" } }
)
@clearDownloadResult = () ->
fileTransfer_result.innerHTML = "result cleared"
# GEOLOCATION TEST
onGeolocationSuccess = (position) ->
geoLocation_result.innerHTML = "
Latitude: #{position.coords.latitude} \n
Longitude: #{position.coords.longitude} \n
Altitude: #{position.coords.altitude} \n
Accuracy: #{position.coords.accuracy} \n
Altitude Accuracy: #{position.coords.altitudeAccuracy} \n
Heading: #{position.coords.heading} \n
Speed: #{position.coords.speed} \n
Timestamp: #{position.timestamp} \n"
# onError Callback receives a PositionError object
onGeolocationError = (error) ->
navigator.notification.alert "
code: #{error.code} \n
message: #{error.message} "
@geolocationTest = () ->
navigator.geolocation.getCurrentPosition onGeolocationSuccess, onGeolocationError
watchId = undefined
@watchPositionTest = () ->
watchId = navigator.geolocation.watchPosition onGeolocationSuccess, onGeolocationError
@clearPositionWatchTest = () ->
if watchId?
navigator.geolocation.clearWatch(watchId)
geoLocation_result.innerHTML = "Watch cleared"
else
geoLocation_result.innerHTML = "No position watch to clear"
# GLOBALIZATION TEST
@localeTest = () ->
navigator.globalization.getLocaleName (locale) ->
globalization_result.innerHTML = locale.value
@DSTTest = () ->
navigator.globalization.isDayLightSavingsTime(
new Date()
(date) ->
globalization_result.innerHTML = "Is Daylight savings: " + date.dst
(error) ->
globalization_result.innerHTML = "Error: " + JSON.stringify error
)
# INAPPBROWSER TEST
ref = undefined
loadNum = 0
exit = () ->
ref.removeEventListener 'loadstart', loadStart
ref.removeEventListener 'exit', exit
if loadNum > 1
steroids.logger.log "SUCCESS in IABRedirectTest, loadstarts: #{loadNum}"
else
steroids.logger.log "FAILURE in IABRedirectTest, loadstarts: #{loadNum}"
loadNum = 0
loadStart = (e) ->
loadNum++
@openIABTest = () ->
ref = window.open 'http://www.google.com', '_blank', 'location=yes'
@IABRedirectTest = () ->
ref = window.open 'http://www.gooogle.com', '_blank', 'location=yes'
ref.addEventListener('loadstart', loadStart);
ref.addEventListener('exit', exit);
# MEDIA TEST
my_media = null;
# Play audio
@playExistingAudioFile = () ->
some_media = new Media "http://audio.ibeat.org/content/p1rj1s/p1rj1s_-_rockGuitar.mp3"
some_media.play()
@playAudio = () ->
if my_media == null
my_media = new Media "documents://"+"lol.wav"
my_media.play()
else
my_media.play()
@pauseAudio = () ->
if my_media?
my_media.pause()
media_record_result.innerHTML = "Playback paused!"
@stopAudio = () ->
if my_media?
my_media.stop()
media_record_result.innerHTML = "Playback stopped!"
@recordAudio = () ->
my_media = new Media "lol.wav"
my_media.startRecord()
media_record_result.innerHTML = "Recording!"
setTimeout () ->
my_media.stopRecord()
media_record_result.innerHTML = "Recording complete!"
, 5000
getPath = () ->
location.pathname.substring 0, location.pathname.lastIndexOf('/')+1
# NOTIFICATION TEST
@alertTest = () ->
navigator.notification.alert "Hello world!", null, "Cordova alert", "Lol"
@confirmTest = () ->
navigator.notification.confirm "Hello world!", null, "Cordova confirm", "<NAME>ards, <NAME>"
@promptTest = () ->
navigator.notification.prompt "Hello world!", null
@beepTest = () ->
navigator.notification.beep 5
@vibrateTest = () ->
navigator.notification.vibrate 500
# LOCAL STORAGE TEST
@setItemTest = () ->
window.localStorage.setItem "items", "apples"
localStorage_result.innerHTML = "Set an item 'items' with the value 'apples'."
@getItemTest = () ->
item = window.localStorage.getItem "items"
if item?
localStorage_result.innerHTML = "Got '#{item}'"
else
localStorage_result.innerHTML = "Error: could not find the item item-text-wrap"
@removeItemTest = () ->
item = window.localStorage.getItem "items"
if item?
window.localStorage.removeItem "items"
localStorage_result.innerHTML = "'items' removed"
else
localStorage_result.innerHTML = "Error: could not find the item to be removed"
# EXIT APP
@exitAppTest = () ->
navigator.app.exitApp()
@testInAppBrowserNOBar = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=no"
ref = window.open url, target, options
@testInAppBrowserRedirect = () ->
url = "https://jigsaw.w3.org/HTTP/300/"
target = "_blank"
options = "location=yes"
newWindow = window.open url, target, options
exit = ->
newWindow.removeEventListener 'loadstart', loadStart
newWindow.removeEventListener 'exit', exit
loadStart = (e) ->
console.log 'URL : ', e.url
newWindow.addEventListener 'loadstart', loadStart
newWindow.addEventListener 'exit', exit
@testInAppBrowserClearCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearcache=yes"
ref = window.open url, target, options
@testInAppBrowserClearSessionCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearsessioncache=yes"
ref = window.open url, target, options
@testInAppBrowserWithBar = () ->
appendEvent = (content) =>
parent = document.getElementById("in-app-browser-status")
li = document.createElement("li")
li.innerHTML = "#{content} - time: #{new Date()}"
parent.appendChild li
url = "http://www.google.com"
target = "_blank"
options = "location=yes"
appendEvent "open window"
ref = window.open url, target, options
ref.addEventListener "loadstart", () ->
appendEvent "loadstart"
ref.addEventListener "loadstop", () ->
appendEvent "loadstop"
ref.addEventListener "loaderror", () ->
appendEvent "loaderror"
ref.addEventListener "exit", () ->
appendEvent "exit"
@testNativeInputShow = () ->
params =
rightButton:
styleClass: 'send-button'
input:
placeHolder: 'Type your message here'
type: 'normal'
lines: 1
cordova.plugins.NativeInput.show ""
@testNativeInputShow_style = () ->
params =
leftButton:
styleCSS: 'text:Up;color:white;background-color:gray;'
rightButton:
styleClass: 'myRightButtonClass'
cssId: 'myRightButton'
panel:
styleClass: 'grey-panel'
input:
placeHolder: 'Type your message here'
type: 'uri'
lines: 2
styleClass: 'myInputClass'
styleId: 'myInputId'
cordova.plugins.NativeInput.show params
@testNativeInputShow_email = () ->
params =
input:
placeHolder: 'Chat box'
type: 'email'
lines: 1
cordova.plugins.NativeInput.show params
@testNativeInputKeyboardAction = () ->
cordova.plugins.NativeInput.onKeyboardAction true, (action) ->
keyboardAction = document.getElementById("keyboardAction")
keyboardAction.innerHTML = keyboardAction.innerHTML + "action: #{action}<br>"
@testNativeInputCloseKeyboard = () ->
cordova.plugins.NativeInput.closeKeyboard()
@testNativeInputOnButtonAction = () ->
cordova.plugins.NativeInput.onButtonAction (button) ->
buttonAction = document.getElementById("buttonAction")
buttonAction.innerHTML = buttonAction.innerHTML + "button: #{button}<br>"
@testNativeInputHide = () ->
cordova.plugins.NativeInput.hide()
@testNativeInputOnChange = () ->
cordova.plugins.NativeInput.onChange (value) ->
nativeInputValue1 = document.getElementById("nativeInputValue1")
nativeInputValue1.innerHTML = value
@testNativeInputGetValue = () ->
cordova.plugins.NativeInput.getValue (value) ->
nativeInputValue2 = document.getElementById("nativeInputValue2")
nativeInputValue2.innerHTML = value
| true | class window.PluginController
# always put everything inside PhoneGap deviceready
document.addEventListener "deviceready", ->
# Make Navigation Bar to appear with a custom title text
steroids.navigationBar.show { title: "Plugin" }
now = new Date()
diff = now.getTime() - window.___START_TIME.getTime()
list = document.getElementById("ready")
el = document.createElement("li")
el.innerHTML = now.toLocaleTimeString() + " Cordova READY - " + diff + " ms since page load"
list.appendChild(el)
appendToDom = (content) =>
parent = document.getElementById("app-status-ul")
li = document.createElement("li")
li.innerHTML = content
parent.appendChild li
printNotification = (notification) =>
# android and ios have difference payloads
message = notification.message || notification.alert
sound = notification.sound || notification.soundname
appendToDom "notification .message || .alert: #{message}"
appendToDom "notification.badge: #{notification.badge}"
appendToDom "notification .sound || .soundname: #{sound}"
appendToDom "notification.msgcnt: #{notification.msgcnt}"
@testPushHandleBackGroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notifications happened while the app was in the background
# when notification are received in the background the app might be running
# or not running.
backgroundNotifications = (notification) =>
console.log "BACKGROUND Notifications : #{JSON.stringify(notification)}"
appendToDom "BACKGROUND NOTIFICATION"
# coldstart indicates that the application was not running
# and it was started by the notification
if notification.coldstart
#ios is always true ?
appendToDom "COLDSTART - App was started by the notification :-)"
else
appendToDom "RUNNIG - App received and handled the notification while running in background"
printNotification notification
window.plugins.pushNotification.onMessageInBackground backgroundNotifications, errorHandler
@testPushHandleForegroundMessages = () =>
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error.msg}"
# this notification happened while we were in the foreground.
# you might want to play a sound to get the user's attention, throw up a dialog, etc.
foregroundNotifications = (notification) =>
console.log "foregroundNotifications : #{JSON.stringify(notification)}"
appendToDom "FOREGROUND NOTIFICATION"
# if the notification contains a soundname, play it.
sound = notification.sound || notification.soundname
myMedia = new Media "#{steroids.app.absolutePath}/#{sound}"
myMedia.play()
printNotification notification
# register the notification handlers
window.plugins.pushNotification.onMessageInForeground foregroundNotifications, errorHandler
# Push Plugin
#
# API is described in the README:
# https://github.com/AppGyver/PushNotifications/blob/master/README.md
#
# N.B. senderID must match your project defined in GCM Developer console
# See https://console.developers.google.com/project/composer-2
# using PI:EMAIL:<EMAIL>END_PI
#
# N.B. You must restart Scanner app if you change the GCM senderID
#
# N.B. It seems that you cannot set senderID in config.android.xml although
# the docs say otherwise.
@testPushRegister = () =>
successHandler = (token) =>
console.log "pushNotification.register success : #{token}"
# save the device registration/token in the server
appendToDom "Registration Complete -> Token/DeviceID -> #{token}"
errorHandler = (error) =>
console.log "pushNotification.register error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.register successHandler, errorHandler, {
"senderID": "695873862319" # android only option
"badge": true # ios only options
"sound": true
"alert": true
}
# senderID can also be configured in the -> config.android.xml
@testPushUnregister = () =>
successHandler = (msg) =>
console.log "pushNotification.unregister success : #{msg}"
# save the device registration/token in the server
appendToDom "Unregister complete: #{msg}"
errorHandler = (error) =>
console.log "pushNotification.unregister error: ", error
appendToDom "ERROR -> MSG: #{error}"
window.plugins.pushNotification.unregister successHandler, errorHandler
@testBadgeReset = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success : #{msg}"
# save the device registration/token in the server
appendToDom "Badges reset!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 0
@testBadgeSet = () =>
successHandler = (msg) =>
console.log "pushNotification.setApplicationIconBadgeNumber success: #{msg}"
# save the device registration/token in the server
appendToDom "Badge set to 2!"
errorHandler = (error) =>
console.log "pushNotification.setApplicationIconBadgeNumber error: ", error
appendToDom "ERROR -> MSG: #{error}"
plugins.pushNotification.setApplicationIconBadgeNumber successHandler, errorHandler, 2
# ACCELEROMETER TEST
accelerometerOnSuccess = (acceleration) ->
accel_x.innerHTML = acceleration.x;
accel_y.innerHTML = acceleration.y;
accel_z.innerHTML = acceleration.z;
accel_timestamp.innerHTML = acceleration.timestamp;
accelerometerOnError= () ->
navigator.notification.alert 'accelerometer onError!'
# TODO: the success callback function continues to fire forever and ever
@accelerometerTest = () ->
navigator.accelerometer.getCurrentAcceleration accelerometerOnSuccess, accelerometerOnError
watchId = undefined
@watchAccelerationTest = () ->
options =
frequency: 100
watchId = navigator.accelerometer.watchAcceleration accelerometerOnSuccess, accelerometerOnError
navigator.notification.alert "watching acceleration"
@clearAccelerationWatchTest = () ->
navigator.accelerometer.clearWatch watchId
accel_x.innerHTML = "";
accel_y.innerHTML = "";
accel_z.innerHTML = "";
accel_timestamp.innerHTML = "";
# BARCODE TEST
@barCodeScanTest = () ->
cordova.plugins.barcodeScanner.scan (result) ->
if result.cancelled
navigator.notification.alert "the user cancelled the scan"
else
qr_result.innerHTML = result.text
,
(error) ->
navigator.notification.alert "scanning failed: " + error
# CAMERA TEST
cameraOnSuccess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
cameraOnFail = (message) ->
navigator.notification.alert 'Failed because: ' + message
@cameraGetPictureTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
modalOpenedSuccess = () ->
navigator.notification.alert "modal opened on the camera callback !"
openModalOnSucess = (imageData) ->
image = document.querySelector '#cameraTest'
image.src = "data:image/jpeg;base64," + imageData;
#open a modal
steroids.modal.show
view: new steroids.views.WebView "/views/modal/hide.html"
,
onSuccess: modalOpenedSuccess
@cameraFromPhotoLibraryOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraGetPictureOpenModalTest = () ->
navigator.camera.getPicture openModalOnSucess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraCleanupTest = () ->
navigator.camera.cleanup(
() ->
navigator.notification.alert "Camera cleanup success"
(message) ->
navigator.notification.alert "Camera cleanup failed: " + message
)
@cameraFromPhotoLibraryTest = () ->
navigator.camera.getPicture cameraOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.DATA_URL
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
fileError = (error)->
navigator.notification.alert "Cordova error code: " + error.code, null, "File system error!"
fileMoved = (file)->
image = document.querySelector '#cameraTest'
image.src = "/#{file.name}?#{(new Date()).getTime()}"
gotFileObject = (file)->
targetDirURI = "file://" + steroids.app.absoluteUserFilesPath
fileName = "user_pic.png"
window.resolveLocalFileSystemURL(
targetDirURI
(directory)->
file.moveTo directory, fileName, fileMoved, fileError
fileError
)
saveInUserFilesOnSuccess = (imageURI) ->
window.resolveLocalFileSystemURL imageURI, gotFileObject, fileError
@cameraGetPictureSaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.CAMERA
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
@cameraFromPhotoLibrarySaveInUserFilesTest = () ->
navigator.camera.getPicture saveInUserFilesOnSuccess, cameraOnFail, {
quality: 50
destinationType: Camera.DestinationType.FILE_URI
sourceType: Camera.PictureSourceType.PHOTOLIBRARY
targetWidth: 300
targetHeight: 300
encodingType: Camera.EncodingType.JPEG
mediaType: Camera.MediaType.PICTURE
allowEdit : false
correctOrientation: true
saveToPhotoAlbum: false
}
# CAPTURE TEST
captureOnSuccess = (mediaFiles) ->
for item in mediaFiles
navigator.notification.alert item.fullPath
captureOnError = (error) ->
navigator.notification.alert 'Capture error, error code: ' + error.code
@captureAudioTest = () ->
navigator.device.capture.captureAudio captureOnSuccess, captureOnError, {}
@captureImageTest = () ->
navigator.device.capture.captureImage captureOnSuccess, captureOnError, {limit:1}
@captureVideoTest = () ->
navigator.device.capture.captureVideo captureOnSuccess, captureOnError, {}
# COMPASS TEST
compassOnSuccess = (heading) ->
compass_result.innerHTML = heading.magneticHeading
compassOnError = (error) ->
navigator.notification.alert 'CompassError: ' + error.code
@compassTest = () ->
navigator.compass.getCurrentHeading compassOnSuccess, compassOnError
# CONNECTION TEST
@connectionTest = () ->
networkState = navigator.network.connection.type;
states = {}
states[Connection.UNKNOWN] = 'Unknown connection'
states[Connection.ETHERNET] = 'Ethernet connection'
states[Connection.WIFI] = 'WiFi connection'
states[Connection.CELL_2G] = 'Cell 2G connection'
states[Connection.CELL_3G] = 'Cell 3G connection'
states[Connection.CELL_4G] = 'Cell 4G connection'
states[Connection.NONE] = 'No network connection'
connection_result.innerHTML = states[networkState]
# CONTACTS TEST
myContact = undefined
contactsSaveOnSuccess = (contact) ->
contacts_result.innerHTML = contact.nickname + " created in Contacts."
contactsSaveOnError = (contactError) ->
contacts_result.innerHTML = "Contact save error = " + contactError.code
@contactsSaveTest = () ->
myContact = navigator.contacts.create {
"displayName": "PI:NAME:<NAME>END_PI"
"nickname": "PI:NAME:<NAME>END_PI"
}
myContact.note = "PI:NAME:<NAME>END_PI"
name = new ContactName()
name.givenName = "PI:NAME:<NAME>END_PI"
name.familyName = "PI:NAME:<NAME>END_PI"
myContact.name = name
myContact.save contactsSaveOnSuccess, contactsSaveOnError
# CONTACTS FIND TEST
contactsFindOnSuccess = (contacts) ->
contacts_result.innerHTML = 'Found ' + contacts.length + ' contacts matching Dolan.'
contactsFindOnError = (contactError) ->
contacts_result.innerHTML = 'Contacts find onError:' + contactError.code
# find all contacts with 'DPI:NAME:<NAME>END_PI' in any name field
@contactsFindTest = () ->
options = new ContactFindOptions
options.filter = "Dolan"
options.multiple = true
fields = ["displayName", "name"]
navigator.contacts.find fields, contactsFindOnSuccess, contactsFindOnError, options
# DEVICE TESTS
@deviceTest = () ->
device_result.innerHTML =
"Device model: " + device.model + "<br>" +
"Device Cordova: " + device.cordova + "<br>" +
"Device platform: " + device.platform + "<br>" +
"Device UUID: " + device.uuid + "<br>" +
"Device version: " + device.version + "<br>"
# EVENTS TESTS
@addPause = () ->
document.addEventListener "pause", () ->
if pause_result.innerHTML == "NO"
pause_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
pause_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: pause"
@addResume = () ->
# alert needs to be wrapped in setTimeout to work
document.addEventListener "resume", () ->
if resume_result.innerHTML == "NO"
resume_result.innerHTML = "YES: " + new Date().toLocaleTimeString();
else
resume_result.innerHTML += "<br>another one: " + new Date().toLocaleTimeString();
navigator.notification.alert "Event listener added: resume"
@addOnline = () ->
document.addEventListener "online", () ->
online_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: online"
@addOffline = () ->
document.addEventListener "offline", () ->
offline_result.innerHTML = "YES"
navigator.notification.alert "Event listener added: offline"
@addBatteryCritical = () ->
window.addEventListener "batterycritical", (status) ->
navigator.notification.alert "Device's battery level is critical, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterycritical"
@addBatteryLow = () ->
window.addEventListener "batterylow", (status) ->
navigator.notification.alert "Device's battery level is low, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterylow"
@addBatteryStatus = () ->
window.addEventListener "batterystatus", (status) ->
navigator.notification.alert "Device's battery level was changed by at least 1%, with #{status.level}
% battery life. \n
Is it plugged in? #{status.isPlugged}"
navigator.notification.alert "Event listener added: batterystatus"
@addBackButton = () ->
handler = () ->
navigator.notification.alert "Device's back button pressed !"
document.addEventListener "backbutton", handler, false
navigator.notification.alert "Event listener added: backbutton"
@addMenuButton = () ->
handler = () ->
navigator.notification.alert "Menu button pressed !"
document.addEventListener "menubutton", handler, false
navigator.notification.alert "Event listener added: menubutton"
@addSearchButton = () ->
handler = () ->
navigator.notification.alert "searchbutton button pressed !"
document.addEventListener "searchbutton", handler, false
navigator.notification.alert "Event listener added: searchbutton"
# FILE TEST
testFS = undefined
@getFileSystemTest = () ->
window.requestFileSystem LocalFileSystem.PERSISTENT, 0, gotFS, fileTestFail
@readFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileEntry, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@writeFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:true}, gotFileToWrite, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@deleteFileTest = () ->
if testFS?
testFS.root.getFile "lol.txt", {create:false}, gotFileToDelete, fileTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotFS = (fileSystem) ->
navigator.notification.alert "Got file system with root path: " + fileSystem.root.fullPath
testFS = fileSystem
gotFileEntry = (fileEntry) ->
navigator.notification.alert "Got file entry with path: " + fileEntry.fullPath
fileEntry.file gotFile, fileTestFail
gotFileToWrite = (fileEntry) ->
fileEntry.createWriter (fileWriter) ->
fileWriter.onwriteend = (e) ->
file_result.innerHTML = "Write completed"
fileWriter.onerror = (e) ->
file_result.innerHTML = 'Write failed: ' + JSON.stringify e
# Create a new Blob and write it to log.txt.
blob = new Blob ['Lorem Ipsum'], {type: 'text/plain'}
fileWriter.write blob
, fileTestFail
gotFileToDelete = (fileEntry) ->
fileEntry.remove( () ->
file_result.innerHTML = "File: #{fileEntry.name} deleted from path: #{fileEntry.fullPath}"
,
(error) ->
fileTestFail
)
gotFile = (file) ->
navigator.notification.alert "Got file: #{file.name} \n
Full path: #{file.fullPath} \n
Mime type: #{file.type} \n
Last modified: #{file.lastModifiedDate} \n
Size in bytes: #{file.size}"
readDataUrl(file);
readAsText(file);
readDataUrl = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
navigator.notification.alert "Read as data URL: " + evt.target.result
reader.readAsDataURL file
readAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
file_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
fileTestFail = (evt) ->
navigator.notification.alert "FILETESTFAIL: " + JSON.stringify evt
imageFileURL = undefined
@fileURLTest = () ->
if testFS?
testFS.root.getFile "image.png", {create:true}, gotImage, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
@URLtoFileEntryTest = () ->
if testFS?
window.resolveLocalFileSystemURL imageFileURL, (fileEntry)->
fileURL_result.innerHTML = "fileEntry.fullPath: " + fileEntry.fullPath
, imageTestFail
else
navigator.notification.alert "Request a fileSystem with the 'Get fileSystem' test first"
gotImage = (fileEntry) ->
imageFileURL = fileEntry.toURL()
fileURL_result.innerHTML = "fileEntry.toURl(): #{imageFileURL}"
imageTestFail = (error) ->
fileURL_result.innerHTML = "Error resolving fileEntry: " + JSON.stringify error
#FILETRANSFER TEST
readDownloadAsText = (file) ->
reader = new FileReader()
reader.onloadend = (evt) ->
fileTransfer_result.innerHTML = "
Contents of #{file.name}: \n
#{evt.target.result}"
reader.readAsText file
downloadFromURL = (uri, fileName, options={}) ->
fileTransfer = new FileTransfer()
filePath = steroids.app.absoluteUserFilesPath + fileName
uri = encodeURI uri
fileTransfer.download(
uri
filePath
(entry) ->
fileTransfer_result.innerHTML = "Download complete: #{entry.fullPath}, attempting to read file (if test stalls here, not download problem)"
win = (fileObject) ->
readDownloadAsText fileObject
fail = (error) ->
fileTransfer_result.innerHTML = "Failed to read file: #{JSON.stringify error}"
entry.file win, fail
(error) ->
fileTransfer_result.innerHTML = "
download error source: #{error.source} \n
download error target: #{error.target} \n
download error code: #{error.code}"
false
options
)
@downloadTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com/en/stable/index.html"
downloadFromURL "http://docs.appgyver.com/en/stable/index.html", "/test.response"
@downloadRedirectTest = () ->
fileTransfer_result.innerHTML = "Downloading from docs.appgyver.com, should redirect to /en/stable/index.html"
downloadFromURL "http://docs.appgyver.com", "/test.redirect.response"
@downloadAuthTest = () ->
fileTransfer_result.innerHTML = "Downloading with basic auth"
downloadFromURL(
"https://api.flowdock.com/flows/flemy/main/files/XaD24A7P0l_M__E4B1YBUw/20130624_130747.jpg"
"test.auth.response"
{ headers: { "Authorization": "Basic NjBlMDQ1MTE5NWZhZDY4OTg5OTU5NGE4Zjg0YzNjYmE6bnVsbA==" } }
)
@clearDownloadResult = () ->
fileTransfer_result.innerHTML = "result cleared"
# GEOLOCATION TEST
onGeolocationSuccess = (position) ->
geoLocation_result.innerHTML = "
Latitude: #{position.coords.latitude} \n
Longitude: #{position.coords.longitude} \n
Altitude: #{position.coords.altitude} \n
Accuracy: #{position.coords.accuracy} \n
Altitude Accuracy: #{position.coords.altitudeAccuracy} \n
Heading: #{position.coords.heading} \n
Speed: #{position.coords.speed} \n
Timestamp: #{position.timestamp} \n"
# onError Callback receives a PositionError object
onGeolocationError = (error) ->
navigator.notification.alert "
code: #{error.code} \n
message: #{error.message} "
@geolocationTest = () ->
navigator.geolocation.getCurrentPosition onGeolocationSuccess, onGeolocationError
watchId = undefined
@watchPositionTest = () ->
watchId = navigator.geolocation.watchPosition onGeolocationSuccess, onGeolocationError
@clearPositionWatchTest = () ->
if watchId?
navigator.geolocation.clearWatch(watchId)
geoLocation_result.innerHTML = "Watch cleared"
else
geoLocation_result.innerHTML = "No position watch to clear"
# GLOBALIZATION TEST
@localeTest = () ->
navigator.globalization.getLocaleName (locale) ->
globalization_result.innerHTML = locale.value
@DSTTest = () ->
navigator.globalization.isDayLightSavingsTime(
new Date()
(date) ->
globalization_result.innerHTML = "Is Daylight savings: " + date.dst
(error) ->
globalization_result.innerHTML = "Error: " + JSON.stringify error
)
# INAPPBROWSER TEST
ref = undefined
loadNum = 0
exit = () ->
ref.removeEventListener 'loadstart', loadStart
ref.removeEventListener 'exit', exit
if loadNum > 1
steroids.logger.log "SUCCESS in IABRedirectTest, loadstarts: #{loadNum}"
else
steroids.logger.log "FAILURE in IABRedirectTest, loadstarts: #{loadNum}"
loadNum = 0
loadStart = (e) ->
loadNum++
@openIABTest = () ->
ref = window.open 'http://www.google.com', '_blank', 'location=yes'
@IABRedirectTest = () ->
ref = window.open 'http://www.gooogle.com', '_blank', 'location=yes'
ref.addEventListener('loadstart', loadStart);
ref.addEventListener('exit', exit);
# MEDIA TEST
my_media = null;
# Play audio
@playExistingAudioFile = () ->
some_media = new Media "http://audio.ibeat.org/content/p1rj1s/p1rj1s_-_rockGuitar.mp3"
some_media.play()
@playAudio = () ->
if my_media == null
my_media = new Media "documents://"+"lol.wav"
my_media.play()
else
my_media.play()
@pauseAudio = () ->
if my_media?
my_media.pause()
media_record_result.innerHTML = "Playback paused!"
@stopAudio = () ->
if my_media?
my_media.stop()
media_record_result.innerHTML = "Playback stopped!"
@recordAudio = () ->
my_media = new Media "lol.wav"
my_media.startRecord()
media_record_result.innerHTML = "Recording!"
setTimeout () ->
my_media.stopRecord()
media_record_result.innerHTML = "Recording complete!"
, 5000
getPath = () ->
location.pathname.substring 0, location.pathname.lastIndexOf('/')+1
# NOTIFICATION TEST
@alertTest = () ->
navigator.notification.alert "Hello world!", null, "Cordova alert", "Lol"
@confirmTest = () ->
navigator.notification.confirm "Hello world!", null, "Cordova confirm", "PI:NAME:<NAME>END_PIards, PI:NAME:<NAME>END_PI"
@promptTest = () ->
navigator.notification.prompt "Hello world!", null
@beepTest = () ->
navigator.notification.beep 5
@vibrateTest = () ->
navigator.notification.vibrate 500
# LOCAL STORAGE TEST
@setItemTest = () ->
window.localStorage.setItem "items", "apples"
localStorage_result.innerHTML = "Set an item 'items' with the value 'apples'."
@getItemTest = () ->
item = window.localStorage.getItem "items"
if item?
localStorage_result.innerHTML = "Got '#{item}'"
else
localStorage_result.innerHTML = "Error: could not find the item item-text-wrap"
@removeItemTest = () ->
item = window.localStorage.getItem "items"
if item?
window.localStorage.removeItem "items"
localStorage_result.innerHTML = "'items' removed"
else
localStorage_result.innerHTML = "Error: could not find the item to be removed"
# EXIT APP
@exitAppTest = () ->
navigator.app.exitApp()
@testInAppBrowserNOBar = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=no"
ref = window.open url, target, options
@testInAppBrowserRedirect = () ->
url = "https://jigsaw.w3.org/HTTP/300/"
target = "_blank"
options = "location=yes"
newWindow = window.open url, target, options
exit = ->
newWindow.removeEventListener 'loadstart', loadStart
newWindow.removeEventListener 'exit', exit
loadStart = (e) ->
console.log 'URL : ', e.url
newWindow.addEventListener 'loadstart', loadStart
newWindow.addEventListener 'exit', exit
@testInAppBrowserClearCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearcache=yes"
ref = window.open url, target, options
@testInAppBrowserClearSessionCache = () ->
url = "http://localhost/views/plugin/index.html"
target = "_blank"
options = "location=yes,clearsessioncache=yes"
ref = window.open url, target, options
@testInAppBrowserWithBar = () ->
appendEvent = (content) =>
parent = document.getElementById("in-app-browser-status")
li = document.createElement("li")
li.innerHTML = "#{content} - time: #{new Date()}"
parent.appendChild li
url = "http://www.google.com"
target = "_blank"
options = "location=yes"
appendEvent "open window"
ref = window.open url, target, options
ref.addEventListener "loadstart", () ->
appendEvent "loadstart"
ref.addEventListener "loadstop", () ->
appendEvent "loadstop"
ref.addEventListener "loaderror", () ->
appendEvent "loaderror"
ref.addEventListener "exit", () ->
appendEvent "exit"
@testNativeInputShow = () ->
params =
rightButton:
styleClass: 'send-button'
input:
placeHolder: 'Type your message here'
type: 'normal'
lines: 1
cordova.plugins.NativeInput.show ""
@testNativeInputShow_style = () ->
params =
leftButton:
styleCSS: 'text:Up;color:white;background-color:gray;'
rightButton:
styleClass: 'myRightButtonClass'
cssId: 'myRightButton'
panel:
styleClass: 'grey-panel'
input:
placeHolder: 'Type your message here'
type: 'uri'
lines: 2
styleClass: 'myInputClass'
styleId: 'myInputId'
cordova.plugins.NativeInput.show params
@testNativeInputShow_email = () ->
params =
input:
placeHolder: 'Chat box'
type: 'email'
lines: 1
cordova.plugins.NativeInput.show params
@testNativeInputKeyboardAction = () ->
cordova.plugins.NativeInput.onKeyboardAction true, (action) ->
keyboardAction = document.getElementById("keyboardAction")
keyboardAction.innerHTML = keyboardAction.innerHTML + "action: #{action}<br>"
@testNativeInputCloseKeyboard = () ->
cordova.plugins.NativeInput.closeKeyboard()
@testNativeInputOnButtonAction = () ->
cordova.plugins.NativeInput.onButtonAction (button) ->
buttonAction = document.getElementById("buttonAction")
buttonAction.innerHTML = buttonAction.innerHTML + "button: #{button}<br>"
@testNativeInputHide = () ->
cordova.plugins.NativeInput.hide()
@testNativeInputOnChange = () ->
cordova.plugins.NativeInput.onChange (value) ->
nativeInputValue1 = document.getElementById("nativeInputValue1")
nativeInputValue1.innerHTML = value
@testNativeInputGetValue = () ->
cordova.plugins.NativeInput.getValue (value) ->
nativeInputValue2 = document.getElementById("nativeInputValue2")
nativeInputValue2.innerHTML = value
|
[
{
"context": "# Copyright 2016 Clement Bramy\n#\n# Licensed under the Apache License, Version 2.",
"end": 30,
"score": 0.9998596906661987,
"start": 17,
"tag": "NAME",
"value": "Clement Bramy"
}
] | lib/publishers/publisher.coffee | cbramy/snippy | 0 | # Copyright 2016 Clement Bramy
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_ = require 'lodash'
module.exports = class Publisher
http_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
https_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
get_http_headers: (headers) ->
_.merge http_default_headers, headers
get_https_headers: (headers) ->
_.merge https_default_headers, headers
success: (url) =>
atom.clipboard.write(url)
atom.notifications?.addSuccess 'Code shared.',
dismissable: true
icon: 'cloud-upload'
detail: """
Your code snippet has been successfully shared on #{@name}.
Snippet url has been copied in your clipboard.
Snippet available at #{url}!
"""
failure: (error) =>
atom.notifications?addError 'Code sharing failure.',
dismissable: true
icon: 'hubot'
detail: """
Your code could not be uploaded to #{@name} due to errors:
#{error}
"""
share: (timeout) ->
# TODO: show loading pane
| 104297 | # Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_ = require 'lodash'
module.exports = class Publisher
http_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
https_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
get_http_headers: (headers) ->
_.merge http_default_headers, headers
get_https_headers: (headers) ->
_.merge https_default_headers, headers
success: (url) =>
atom.clipboard.write(url)
atom.notifications?.addSuccess 'Code shared.',
dismissable: true
icon: 'cloud-upload'
detail: """
Your code snippet has been successfully shared on #{@name}.
Snippet url has been copied in your clipboard.
Snippet available at #{url}!
"""
failure: (error) =>
atom.notifications?addError 'Code sharing failure.',
dismissable: true
icon: 'hubot'
detail: """
Your code could not be uploaded to #{@name} due to errors:
#{error}
"""
share: (timeout) ->
# TODO: show loading pane
| true | # Copyright 2016 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_ = require 'lodash'
module.exports = class Publisher
http_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
https_default_headers =
'Accept-Charset': 'utf-8'
'Cache-Control': 'no-cache'
'Content-Type': 'application/x-www-form-urlencoded'
get_http_headers: (headers) ->
_.merge http_default_headers, headers
get_https_headers: (headers) ->
_.merge https_default_headers, headers
success: (url) =>
atom.clipboard.write(url)
atom.notifications?.addSuccess 'Code shared.',
dismissable: true
icon: 'cloud-upload'
detail: """
Your code snippet has been successfully shared on #{@name}.
Snippet url has been copied in your clipboard.
Snippet available at #{url}!
"""
failure: (error) =>
atom.notifications?addError 'Code sharing failure.',
dismissable: true
icon: 'hubot'
detail: """
Your code could not be uploaded to #{@name} due to errors:
#{error}
"""
share: (timeout) ->
# TODO: show loading pane
|
[
{
"context": "http'\n resolution: '@2x.png'\n accessToken: 'pk.eyJ1IjoibWFwYm94IiwiYSI6IlhHVkZmaW8ifQ.hAMX5hSW-QnTeRCMAy9A8Q'\n",
"end": 341,
"score": 0.9997044205665588,
"start": 277,
"tag": "KEY",
"value": "pk.eyJ1IjoibWFwYm94IiwiYSI6IlhHVkZmaW8ifQ.hAMX5hSW-QnTeRCMAy9A8Q"
}
] | config/production.coffee | googoid/tile-cache | 0 | module.exports =
cacheLifetime: 48 * 60 * 60
port: 3005
mongodb: 'mongodb://localhost:27017/tile-cache'
tileServer:
hostname: 'b.tiles.mapbox.com'
path: '/v4/smarttaxi.c8c68db2/{zoom}/{x}/{y}{res}'
scheme: 'http'
resolution: '@2x.png'
accessToken: 'pk.eyJ1IjoibWFwYm94IiwiYSI6IlhHVkZmaW8ifQ.hAMX5hSW-QnTeRCMAy9A8Q'
| 156473 | module.exports =
cacheLifetime: 48 * 60 * 60
port: 3005
mongodb: 'mongodb://localhost:27017/tile-cache'
tileServer:
hostname: 'b.tiles.mapbox.com'
path: '/v4/smarttaxi.c8c68db2/{zoom}/{x}/{y}{res}'
scheme: 'http'
resolution: '@2x.png'
accessToken: '<KEY>'
| true | module.exports =
cacheLifetime: 48 * 60 * 60
port: 3005
mongodb: 'mongodb://localhost:27017/tile-cache'
tileServer:
hostname: 'b.tiles.mapbox.com'
path: '/v4/smarttaxi.c8c68db2/{zoom}/{x}/{y}{res}'
scheme: 'http'
resolution: '@2x.png'
accessToken: 'PI:KEY:<KEY>END_PI'
|
[
{
"context": "d\"\n name: \"old_password\"\n key: \"old_password\"\n id: \"old_password\"\n floatingL",
"end": 356,
"score": 0.6401354670524597,
"start": 344,
"tag": "PASSWORD",
"value": "old_password"
},
{
"context": "d\"\n name: \"new_password\"\n key: \"new_password\"\n id: \"new_password\"\n ",
"end": 547,
"score": 0.9435485601425171,
"start": 544,
"tag": "PASSWORD",
"value": "new"
},
{
"context": " name: \"new_password\"\n key: \"new_password\"\n id: \"new_password\"\n f",
"end": 547,
"score": 0.4964905381202698,
"start": 547,
"tag": "KEY",
"value": ""
},
{
"context": " name: \"new_password\"\n key: \"new_password\"\n id: \"new_password\"\n floatingL",
"end": 556,
"score": 0.6187922954559326,
"start": 548,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " }, {\n type: \"password\"\n name: \"confirm_password\"\n key: \"confirm_password\"\n ",
"end": 695,
"score": 0.5058619379997253,
"start": 688,
"tag": "PASSWORD",
"value": "confirm"
},
{
"context": " name: \"confirm_password\"\n key: \"confirm_password\"\n id: \"confirm_password\"\n ",
"end": 729,
"score": 0.9071031212806702,
"start": 722,
"tag": "PASSWORD",
"value": "confirm"
},
{
"context": " name: \"confirm_password\"\n key: \"confirm_password\"\n id: \"confirm_password\"\n ",
"end": 729,
"score": 0.6891109943389893,
"start": 729,
"tag": "KEY",
"value": ""
},
{
"context": " name: \"confirm_password\"\n key: \"confirm_password\"\n id: \"confirm_password\"\n float",
"end": 738,
"score": 0.4278823733329773,
"start": 730,
"tag": "PASSWORD",
"value": "password"
}
] | src/components/user/change_password.coffee | dwetterau/life | 2 | React = require 'react'
{FormPage} = require './form_page'
ChangePassword = React.createClass
displayName: 'ChangePassword'
render: () ->
React.createElement FormPage,
pageHeader: 'Change Password'
action: '/user/password'
inputs: [
{
type: "password"
name: "old_password"
key: "old_password"
id: "old_password"
floatingLabelText: "Old Password"
autofocus: true
}, {
type: "password"
name: "new_password"
key: "new_password"
id: "new_password"
floatingLabelText: "New Password"
}, {
type: "password"
name: "confirm_password"
key: "confirm_password"
id: "confirm_password"
floatingLabelText: "Confirm Password"
}
]
submitLabel: 'Change password'
module.exports = {ChangePassword}
| 132154 | React = require 'react'
{FormPage} = require './form_page'
ChangePassword = React.createClass
displayName: 'ChangePassword'
render: () ->
React.createElement FormPage,
pageHeader: 'Change Password'
action: '/user/password'
inputs: [
{
type: "password"
name: "old_password"
key: "<PASSWORD>"
id: "old_password"
floatingLabelText: "Old Password"
autofocus: true
}, {
type: "password"
name: "new_password"
key: "<PASSWORD> <KEY>_<PASSWORD>"
id: "new_password"
floatingLabelText: "New Password"
}, {
type: "password"
name: "<PASSWORD>_password"
key: "<PASSWORD> <KEY>_<PASSWORD>"
id: "confirm_password"
floatingLabelText: "Confirm Password"
}
]
submitLabel: 'Change password'
module.exports = {ChangePassword}
| true | React = require 'react'
{FormPage} = require './form_page'
ChangePassword = React.createClass
displayName: 'ChangePassword'
render: () ->
React.createElement FormPage,
pageHeader: 'Change Password'
action: '/user/password'
inputs: [
{
type: "password"
name: "old_password"
key: "PI:PASSWORD:<PASSWORD>END_PI"
id: "old_password"
floatingLabelText: "Old Password"
autofocus: true
}, {
type: "password"
name: "new_password"
key: "PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI_PI:PASSWORD:<PASSWORD>END_PI"
id: "new_password"
floatingLabelText: "New Password"
}, {
type: "password"
name: "PI:PASSWORD:<PASSWORD>END_PI_password"
key: "PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI_PI:PASSWORD:<PASSWORD>END_PI"
id: "confirm_password"
floatingLabelText: "Confirm Password"
}
]
submitLabel: 'Change password'
module.exports = {ChangePassword}
|
[
{
"context": "\"Welcome to the official Jeff Koons page on Artsy. Jeff Koons plays with ideas of taste, pleasure, celebrity, a",
"end": 1659,
"score": 0.9814559817314148,
"start": 1649,
"tag": "NAME",
"value": "Jeff Koons"
},
{
"context": "containEql \"<meta property=\\\"og:title\\\" content=\\\"Jeff Koons - 100+ Artworks\"\n @html.should.not.containEq",
"end": 1856,
"score": 0.9998067617416382,
"start": 1846,
"tag": "NAME",
"value": "Jeff Koons"
},
{
"context": "\"Welcome to the official Jeff Koons page on Artsy. Jeff Koons plays with ideas of taste, pleasure, celebrity, a",
"end": 2521,
"score": 0.9997175931930542,
"start": 2511,
"tag": "NAME",
"value": "Jeff Koons"
},
{
"context": "containEql \"<meta property=\\\"og:title\\\" content=\\\"Jeff Koons - 100+ Artworks\"\n @html.should.not.containEq",
"end": 2718,
"score": 0.9997574687004089,
"start": 2708,
"tag": "NAME",
"value": "Jeff Koons"
},
{
"context": "artist: @artist\n @sd.CURRENT_PATH = \"/artist/jeff-koons-1/works\"\n @html = jade.render fs.readFileSync(",
"end": 4175,
"score": 0.7459829449653625,
"start": 4163,
"tag": "USERNAME",
"value": "jeff-koons-1"
}
] | desktop/apps/artist/test/meta.coffee | craigspaeth/_force-merge | 1 | fs = require 'fs'
jade = require 'jade'
sinon = require 'sinon'
path = require 'path'
Artist = require '../../../models/artist'
{ fabricate } = require 'antigravity'
artistJSON = require './fixtures'
helpers = require '../view_helpers'
Nav = require '../nav'
_ = require 'underscore'
describe 'Meta tags', ->
before ->
@file = "#{path.resolve __dirname, '../'}/templates/meta.jade"
@sd =
APP_URL: 'http://localhost:5000'
CANONICAL_MOBILE_URL: 'http://m.localhost:5000'
MOBILE_MEDIA_QUERY: 'mobile-media-query'
CURRENT_PATH: '/artist/jeff-koons-1'
describe 'basic artist with name and short blurb', ->
beforeEach ->
@artist = _.pick artistJSON, '_id', 'id', 'name', 'gender', 'blurb', 'stastuses', 'counts', 'meta'
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
viewHelpers: helpers
nav: @nav
it 'includes mobile alternate, canonical, twitter card, og tags, next/prev links', ->
@html.should.containEql "<link rel=\"alternate\" media=\"mobile-media-query\" href=\"http://m.localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary"
@html.should.containEql "<link rel=\"canonical\" href=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:url\" content=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. Jeff Koons plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"Jeff Koons - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'artist with name no blurb, nationality, or years', ->
beforeEach ->
@artist = _.extend nationality: null, blurb: null, years: null, artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'renders correctly', ->
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. Jeff Koons plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"Jeff Koons - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with an image', ->
beforeEach ->
@artist = artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'includes og:image and twitter card', ->
@html.should.containEql "<meta property=\"og:image\" content=\"/foo/bar/large.jpg"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary_large_image"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with no blurb or artworks', ->
beforeEach ->
@artist = _.extend artistJSON, counts: { artworks: 0 }, blurb: null
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
describe 'on the works page (but with artworks)', ->
beforeEach ->
@artist = _.extend artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@sd.CURRENT_PATH = "/artist/jeff-koons-1/works"
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: rels: sinon.stub().returns {}
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
| 99379 | fs = require 'fs'
jade = require 'jade'
sinon = require 'sinon'
path = require 'path'
Artist = require '../../../models/artist'
{ fabricate } = require 'antigravity'
artistJSON = require './fixtures'
helpers = require '../view_helpers'
Nav = require '../nav'
_ = require 'underscore'
describe 'Meta tags', ->
before ->
@file = "#{path.resolve __dirname, '../'}/templates/meta.jade"
@sd =
APP_URL: 'http://localhost:5000'
CANONICAL_MOBILE_URL: 'http://m.localhost:5000'
MOBILE_MEDIA_QUERY: 'mobile-media-query'
CURRENT_PATH: '/artist/jeff-koons-1'
describe 'basic artist with name and short blurb', ->
beforeEach ->
@artist = _.pick artistJSON, '_id', 'id', 'name', 'gender', 'blurb', 'stastuses', 'counts', 'meta'
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
viewHelpers: helpers
nav: @nav
it 'includes mobile alternate, canonical, twitter card, og tags, next/prev links', ->
@html.should.containEql "<link rel=\"alternate\" media=\"mobile-media-query\" href=\"http://m.localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary"
@html.should.containEql "<link rel=\"canonical\" href=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:url\" content=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. <NAME> plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"<NAME> - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'artist with name no blurb, nationality, or years', ->
beforeEach ->
@artist = _.extend nationality: null, blurb: null, years: null, artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'renders correctly', ->
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. <NAME> plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"<NAME> - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with an image', ->
beforeEach ->
@artist = artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'includes og:image and twitter card', ->
@html.should.containEql "<meta property=\"og:image\" content=\"/foo/bar/large.jpg"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary_large_image"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with no blurb or artworks', ->
beforeEach ->
@artist = _.extend artistJSON, counts: { artworks: 0 }, blurb: null
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
describe 'on the works page (but with artworks)', ->
beforeEach ->
@artist = _.extend artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@sd.CURRENT_PATH = "/artist/jeff-koons-1/works"
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: rels: sinon.stub().returns {}
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
| true | fs = require 'fs'
jade = require 'jade'
sinon = require 'sinon'
path = require 'path'
Artist = require '../../../models/artist'
{ fabricate } = require 'antigravity'
artistJSON = require './fixtures'
helpers = require '../view_helpers'
Nav = require '../nav'
_ = require 'underscore'
describe 'Meta tags', ->
before ->
@file = "#{path.resolve __dirname, '../'}/templates/meta.jade"
@sd =
APP_URL: 'http://localhost:5000'
CANONICAL_MOBILE_URL: 'http://m.localhost:5000'
MOBILE_MEDIA_QUERY: 'mobile-media-query'
CURRENT_PATH: '/artist/jeff-koons-1'
describe 'basic artist with name and short blurb', ->
beforeEach ->
@artist = _.pick artistJSON, '_id', 'id', 'name', 'gender', 'blurb', 'stastuses', 'counts', 'meta'
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
viewHelpers: helpers
nav: @nav
it 'includes mobile alternate, canonical, twitter card, og tags, next/prev links', ->
@html.should.containEql "<link rel=\"alternate\" media=\"mobile-media-query\" href=\"http://m.localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary"
@html.should.containEql "<link rel=\"canonical\" href=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:url\" content=\"http://localhost:5000/artist/jeff-koons-1"
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. PI:NAME:<NAME>END_PI plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"PI:NAME:<NAME>END_PI - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'artist with name no blurb, nationality, or years', ->
beforeEach ->
@artist = _.extend nationality: null, blurb: null, years: null, artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'renders correctly', ->
@html.should.containEql "<meta property=\"og:description\" content=\"Welcome to the official Jeff Koons page on Artsy. PI:NAME:<NAME>END_PI plays with ideas of taste, pleasure, celebrity, and commerce. “I believe in advertisement and media completely” ..."
@html.should.containEql "<meta property=\"og:title\" content=\"PI:NAME:<NAME>END_PI - 100+ Artworks"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with an image', ->
beforeEach ->
@artist = artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'includes og:image and twitter card', ->
@html.should.containEql "<meta property=\"og:image\" content=\"/foo/bar/large.jpg"
@html.should.containEql "<meta property=\"twitter:card\" content=\"summary_large_image"
@html.should.not.containEql "<meta name=\"robots\" content=\"noindex"
describe 'with no blurb or artworks', ->
beforeEach ->
@artist = _.extend artistJSON, counts: { artworks: 0 }, blurb: null
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: @nav
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
describe 'on the works page (but with artworks)', ->
beforeEach ->
@artist = _.extend artistJSON
@artist.statuses = _.clone artistJSON.statuses
@nav = new Nav artist: @artist
@sd.CURRENT_PATH = "/artist/jeff-koons-1/works"
@html = jade.render fs.readFileSync(@file).toString(),
artist: @artist
sd: @sd
nav: rels: sinon.stub().returns {}
viewHelpers: helpers
it 'should have a noindex, follow tag', ->
@html.should.containEql "<meta name=\"robots\" content=\"noindex"
|
[
{
"context": "ets, (widget) ->\n Widget\n key: \"widget-#{widget.name}\"\n name: widget.name\n ",
"end": 444,
"score": 0.7812166213989258,
"start": 444,
"tag": "KEY",
"value": ""
}
] | src/components/widgets.coffee | brianshaler/kerplunk-blog-hexa | 0 | _ = require 'lodash'
React = require 'react'
Widget = require './widget'
{DOM} = React
module.exports = React.createFactory React.createClass
render: ->
widgets = [
{
name: 'Comments'
content: DOM.div null, 'hi'
}
{
name: 'Tags'
content: DOM.div null, 'hello'
}
]
DOM.div
className: 'widget-areas'
, _.map widgets, (widget) ->
Widget
key: "widget-#{widget.name}"
name: widget.name
contents: widget.content
| 114035 | _ = require 'lodash'
React = require 'react'
Widget = require './widget'
{DOM} = React
module.exports = React.createFactory React.createClass
render: ->
widgets = [
{
name: 'Comments'
content: DOM.div null, 'hi'
}
{
name: 'Tags'
content: DOM.div null, 'hello'
}
]
DOM.div
className: 'widget-areas'
, _.map widgets, (widget) ->
Widget
key: "widget<KEY>-#{widget.name}"
name: widget.name
contents: widget.content
| true | _ = require 'lodash'
React = require 'react'
Widget = require './widget'
{DOM} = React
module.exports = React.createFactory React.createClass
render: ->
widgets = [
{
name: 'Comments'
content: DOM.div null, 'hi'
}
{
name: 'Tags'
content: DOM.div null, 'hello'
}
]
DOM.div
className: 'widget-areas'
, _.map widgets, (widget) ->
Widget
key: "widgetPI:KEY:<KEY>END_PI-#{widget.name}"
name: widget.name
contents: widget.content
|
[
{
"context": "# Copyright (C) 2013 John Judnich\n# Released under The MIT License - see \"LICENSE\" ",
"end": 33,
"score": 0.999872088432312,
"start": 21,
"tag": "NAME",
"value": "John Judnich"
}
] | shaders/NormalMapGeneratorShader.coffee | anandprabhakar0507/Kosmos | 46 | # Copyright (C) 2013 John Judnich
# Released under The MIT License - see "LICENSE" file for details.
frag = """precision highp float;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform sampler2D sampler;
#define ONE_TEXEL (1.0/4096.0)
vec4 positionAndHeight(vec3 cubePos, vec2 uv)
{
vec3 pos = normalize(cubePos);
float h = texture2D(sampler, uv).a;
pos *= 0.997 + h * 0.003;
return vec4(pos, h);
}
void main(void) {
// ========================== Compute normal vector =======================
vec4 hCenter = positionAndHeight(vPos, vUV);
vec4 hR = positionAndHeight(vPos + ONE_TEXEL * vBinormal, vUV + vec2(ONE_TEXEL, 0));
vec4 hF = positionAndHeight(vPos + ONE_TEXEL * vTangent, vUV + vec2(0, ONE_TEXEL));
vec4 hL = positionAndHeight(vPos - ONE_TEXEL * vBinormal, vUV - vec2(ONE_TEXEL, 0));
vec4 hB = positionAndHeight(vPos - ONE_TEXEL * vTangent, vUV - vec2(0, ONE_TEXEL));
vec3 right = (hR.xyz - hL.xyz);
vec3 forward = (hF.xyz - hB.xyz);
vec3 normal = normalize(cross(right, forward));
// ========================== Compute horizon angle ==========================
/*float horizon = 0.0;
vec3 vUnitPos = normalize(vPos);
for (int i = 1; i < 8; ++i) {
float n = float(i);
float a = n * .0981748;
float x, y;
x = sin(a);
y = cos(a);
vec3 hR = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632);
y = cos(a + 1.57079632);
vec3 hF = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 2.0);
y = cos(a + 1.57079632 * 2.0);
vec3 hL = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 3.0);
y = cos(a + 1.57079632 * 3.0);
vec3 hB = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
float d1 = dot(normalize(hR), vUnitPos);
float d2 = dot(normalize(hF), vUnitPos);
float d3 = dot(normalize(hL), vUnitPos);
float d4 = dot(normalize(hB), vUnitPos);
float d = max(d1, max(d2, max(d3, d4)));
horizon = max(horizon, d);
}
horizon = clamp(horizon, 0.0, 1.0);*/
// this is a very unique and extremely efficient hack
// basically we encode the ambient occlusion map / horizon map as the normal vector length!
// not only does this efficiently pack this info, but actually ENHANCES the normal map quality
// because wide open areas determined by the horizon map scale down the vector length, resulting
// in a "sharpening" effect for these areas, and a smoothing effect for curved surfaces. the end
// result is sharpened normal maps in general appearing 2x as high resolution! mainly this is because
// mountain peaks are sharpened, and thus dont appear as blurry as regular normals do.
// Note: The reason scaling down normal vectors sharpens them is when interpolating linearly between
// a large vector to a small vector, and renormalizing in the fragment shader, this has the effect of
// producing a nonlinear interpolation. Specifically, the smaller the destination vector, the faster
// it is approached, thus creating a "sharpened" look.
float ave = (hR.a + hF.a + hL.a + hB.a) * 0.25;
float diff = abs(hCenter.a - ave) * 500.0;
normal /= (1.0 + diff);
//normal *= ((1.0-horizon) * 0.9375 + 0.0625);
float height = hCenter.a;
gl_FragColor = vec4((normal + 1.0) * 0.5, height);
}
"""
vert = """
attribute vec2 aUV;
attribute vec3 aPos;
attribute vec3 aTangent;
attribute vec3 aBinormal;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform vec2 verticalViewport;
void main(void) {
vUV = aUV;
vPos = aPos;
vTangent = aTangent;
vBinormal = aBinormal;
vec2 pos = aUV;
pos.y = (pos.y - verticalViewport.x) / verticalViewport.y;
pos = pos * 2.0 - 1.0;
gl_Position = vec4(pos, 0.0, 1.0);
}
"""
xgl.addProgram("normalMapGenerator", vert, frag)
| 224114 | # Copyright (C) 2013 <NAME>
# Released under The MIT License - see "LICENSE" file for details.
frag = """precision highp float;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform sampler2D sampler;
#define ONE_TEXEL (1.0/4096.0)
vec4 positionAndHeight(vec3 cubePos, vec2 uv)
{
vec3 pos = normalize(cubePos);
float h = texture2D(sampler, uv).a;
pos *= 0.997 + h * 0.003;
return vec4(pos, h);
}
void main(void) {
// ========================== Compute normal vector =======================
vec4 hCenter = positionAndHeight(vPos, vUV);
vec4 hR = positionAndHeight(vPos + ONE_TEXEL * vBinormal, vUV + vec2(ONE_TEXEL, 0));
vec4 hF = positionAndHeight(vPos + ONE_TEXEL * vTangent, vUV + vec2(0, ONE_TEXEL));
vec4 hL = positionAndHeight(vPos - ONE_TEXEL * vBinormal, vUV - vec2(ONE_TEXEL, 0));
vec4 hB = positionAndHeight(vPos - ONE_TEXEL * vTangent, vUV - vec2(0, ONE_TEXEL));
vec3 right = (hR.xyz - hL.xyz);
vec3 forward = (hF.xyz - hB.xyz);
vec3 normal = normalize(cross(right, forward));
// ========================== Compute horizon angle ==========================
/*float horizon = 0.0;
vec3 vUnitPos = normalize(vPos);
for (int i = 1; i < 8; ++i) {
float n = float(i);
float a = n * .0981748;
float x, y;
x = sin(a);
y = cos(a);
vec3 hR = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632);
y = cos(a + 1.57079632);
vec3 hF = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 2.0);
y = cos(a + 1.57079632 * 2.0);
vec3 hL = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 3.0);
y = cos(a + 1.57079632 * 3.0);
vec3 hB = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
float d1 = dot(normalize(hR), vUnitPos);
float d2 = dot(normalize(hF), vUnitPos);
float d3 = dot(normalize(hL), vUnitPos);
float d4 = dot(normalize(hB), vUnitPos);
float d = max(d1, max(d2, max(d3, d4)));
horizon = max(horizon, d);
}
horizon = clamp(horizon, 0.0, 1.0);*/
// this is a very unique and extremely efficient hack
// basically we encode the ambient occlusion map / horizon map as the normal vector length!
// not only does this efficiently pack this info, but actually ENHANCES the normal map quality
// because wide open areas determined by the horizon map scale down the vector length, resulting
// in a "sharpening" effect for these areas, and a smoothing effect for curved surfaces. the end
// result is sharpened normal maps in general appearing 2x as high resolution! mainly this is because
// mountain peaks are sharpened, and thus dont appear as blurry as regular normals do.
// Note: The reason scaling down normal vectors sharpens them is when interpolating linearly between
// a large vector to a small vector, and renormalizing in the fragment shader, this has the effect of
// producing a nonlinear interpolation. Specifically, the smaller the destination vector, the faster
// it is approached, thus creating a "sharpened" look.
float ave = (hR.a + hF.a + hL.a + hB.a) * 0.25;
float diff = abs(hCenter.a - ave) * 500.0;
normal /= (1.0 + diff);
//normal *= ((1.0-horizon) * 0.9375 + 0.0625);
float height = hCenter.a;
gl_FragColor = vec4((normal + 1.0) * 0.5, height);
}
"""
vert = """
attribute vec2 aUV;
attribute vec3 aPos;
attribute vec3 aTangent;
attribute vec3 aBinormal;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform vec2 verticalViewport;
void main(void) {
vUV = aUV;
vPos = aPos;
vTangent = aTangent;
vBinormal = aBinormal;
vec2 pos = aUV;
pos.y = (pos.y - verticalViewport.x) / verticalViewport.y;
pos = pos * 2.0 - 1.0;
gl_Position = vec4(pos, 0.0, 1.0);
}
"""
xgl.addProgram("normalMapGenerator", vert, frag)
| true | # Copyright (C) 2013 PI:NAME:<NAME>END_PI
# Released under The MIT License - see "LICENSE" file for details.
frag = """precision highp float;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform sampler2D sampler;
#define ONE_TEXEL (1.0/4096.0)
vec4 positionAndHeight(vec3 cubePos, vec2 uv)
{
vec3 pos = normalize(cubePos);
float h = texture2D(sampler, uv).a;
pos *= 0.997 + h * 0.003;
return vec4(pos, h);
}
void main(void) {
// ========================== Compute normal vector =======================
vec4 hCenter = positionAndHeight(vPos, vUV);
vec4 hR = positionAndHeight(vPos + ONE_TEXEL * vBinormal, vUV + vec2(ONE_TEXEL, 0));
vec4 hF = positionAndHeight(vPos + ONE_TEXEL * vTangent, vUV + vec2(0, ONE_TEXEL));
vec4 hL = positionAndHeight(vPos - ONE_TEXEL * vBinormal, vUV - vec2(ONE_TEXEL, 0));
vec4 hB = positionAndHeight(vPos - ONE_TEXEL * vTangent, vUV - vec2(0, ONE_TEXEL));
vec3 right = (hR.xyz - hL.xyz);
vec3 forward = (hF.xyz - hB.xyz);
vec3 normal = normalize(cross(right, forward));
// ========================== Compute horizon angle ==========================
/*float horizon = 0.0;
vec3 vUnitPos = normalize(vPos);
for (int i = 1; i < 8; ++i) {
float n = float(i);
float a = n * .0981748;
float x, y;
x = sin(a);
y = cos(a);
vec3 hR = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632);
y = cos(a + 1.57079632);
vec3 hF = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 2.0);
y = cos(a + 1.57079632 * 2.0);
vec3 hL = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
x = sin(a + 1.57079632 * 3.0);
y = cos(a + 1.57079632 * 3.0);
vec3 hB = positionAndHeight(vPos + x * ONE_TEXEL * vBinormal * n + y * ONE_TEXEL * vTangent * n, vUV + vec2(x, y) * ONE_TEXEL * n).xyz - hCenter.xyz;
float d1 = dot(normalize(hR), vUnitPos);
float d2 = dot(normalize(hF), vUnitPos);
float d3 = dot(normalize(hL), vUnitPos);
float d4 = dot(normalize(hB), vUnitPos);
float d = max(d1, max(d2, max(d3, d4)));
horizon = max(horizon, d);
}
horizon = clamp(horizon, 0.0, 1.0);*/
// this is a very unique and extremely efficient hack
// basically we encode the ambient occlusion map / horizon map as the normal vector length!
// not only does this efficiently pack this info, but actually ENHANCES the normal map quality
// because wide open areas determined by the horizon map scale down the vector length, resulting
// in a "sharpening" effect for these areas, and a smoothing effect for curved surfaces. the end
// result is sharpened normal maps in general appearing 2x as high resolution! mainly this is because
// mountain peaks are sharpened, and thus dont appear as blurry as regular normals do.
// Note: The reason scaling down normal vectors sharpens them is when interpolating linearly between
// a large vector to a small vector, and renormalizing in the fragment shader, this has the effect of
// producing a nonlinear interpolation. Specifically, the smaller the destination vector, the faster
// it is approached, thus creating a "sharpened" look.
float ave = (hR.a + hF.a + hL.a + hB.a) * 0.25;
float diff = abs(hCenter.a - ave) * 500.0;
normal /= (1.0 + diff);
//normal *= ((1.0-horizon) * 0.9375 + 0.0625);
float height = hCenter.a;
gl_FragColor = vec4((normal + 1.0) * 0.5, height);
}
"""
vert = """
attribute vec2 aUV;
attribute vec3 aPos;
attribute vec3 aTangent;
attribute vec3 aBinormal;
varying vec3 vPos;
varying vec3 vTangent;
varying vec3 vBinormal;
varying vec2 vUV;
uniform vec2 verticalViewport;
void main(void) {
vUV = aUV;
vPos = aPos;
vTangent = aTangent;
vBinormal = aBinormal;
vec2 pos = aUV;
pos.y = (pos.y - verticalViewport.x) / verticalViewport.y;
pos = pos * 2.0 - 1.0;
gl_Position = vec4(pos, 0.0, 1.0);
}
"""
xgl.addProgram("normalMapGenerator", vert, frag)
|
[
{
"context": "usEventName: 'popstate'\n refreshPageStorageKey: 'refreshPage:scrollTop'\n\n windowNavigationInitialize: ->\n if scrollT",
"end": 121,
"score": 0.9536093473434448,
"start": 100,
"tag": "KEY",
"value": "refreshPage:scrollTop"
}
] | app/assets/javascripts/evrobone/app-mixins/window-navigation.js.coffee | KODerFunk/evrobone | 7 | Evrobone.AppMixins.WindowNavigation =
popstateBusEventName: 'popstate'
refreshPageStorageKey: 'refreshPage:scrollTop'
windowNavigationInitialize: ->
if scrollTop = sessionStorage?.getItem(@refreshPageStorageKey)
@$window.scrollTop scrollTop
sessionStorage.removeItem @refreshPageStorageKey
@$window.on 'popstate', _.bind(@historyChangeHandler, @)
return
historyChangeHandler: ->
@trigger @popstateBusEventName, arguments...
return
onPopState: (callback, context) ->
context.listenTo @, @popstateBusEventName, callback
offPopState: (callback, context) ->
context.stopListening @, @popstateBusEventName, callback
changeLocation: (url, push = false, title = '', options) ->
historyMethod = window.history[if push then 'pushState' else 'replaceState']
if historyMethod
unless /^\w+:\/\//.test(url)
url = "#{window.location.protocol}//#{window.location.host}#{url}"
historyMethod.call window.history, _.extend({ turbolinks: Turbolinks?, url: url }, options), title, url
else
window.location.hash = url
return
visit: (location) ->
if Turbolinks?
Turbolinks.visit location
else
window.location = location
return
reloadPage: ->
@visit window.location
refreshPage: ->
if Turbolinks?
# TODO may be should use similar mech as for non-turbo variant with sessionStorage
$document = $(document)
scrollTop = 0
$document.one 'page:before-unload.refreshPage', =>
scrollTop = @$window.scrollTop()
return
# TODO on or one ?
$document.one 'page:load.refreshPage page:restore.refreshPage', =>
@$window.scrollTop scrollTop
return
else
# TODO need store url and timestamp for more strong collision protection
sessionStorage?.setItem @refreshPageStorageKey, @$window.scrollTop()
@reloadPage()
return
| 91610 | Evrobone.AppMixins.WindowNavigation =
popstateBusEventName: 'popstate'
refreshPageStorageKey: '<KEY>'
windowNavigationInitialize: ->
if scrollTop = sessionStorage?.getItem(@refreshPageStorageKey)
@$window.scrollTop scrollTop
sessionStorage.removeItem @refreshPageStorageKey
@$window.on 'popstate', _.bind(@historyChangeHandler, @)
return
historyChangeHandler: ->
@trigger @popstateBusEventName, arguments...
return
onPopState: (callback, context) ->
context.listenTo @, @popstateBusEventName, callback
offPopState: (callback, context) ->
context.stopListening @, @popstateBusEventName, callback
changeLocation: (url, push = false, title = '', options) ->
historyMethod = window.history[if push then 'pushState' else 'replaceState']
if historyMethod
unless /^\w+:\/\//.test(url)
url = "#{window.location.protocol}//#{window.location.host}#{url}"
historyMethod.call window.history, _.extend({ turbolinks: Turbolinks?, url: url }, options), title, url
else
window.location.hash = url
return
visit: (location) ->
if Turbolinks?
Turbolinks.visit location
else
window.location = location
return
reloadPage: ->
@visit window.location
refreshPage: ->
if Turbolinks?
# TODO may be should use similar mech as for non-turbo variant with sessionStorage
$document = $(document)
scrollTop = 0
$document.one 'page:before-unload.refreshPage', =>
scrollTop = @$window.scrollTop()
return
# TODO on or one ?
$document.one 'page:load.refreshPage page:restore.refreshPage', =>
@$window.scrollTop scrollTop
return
else
# TODO need store url and timestamp for more strong collision protection
sessionStorage?.setItem @refreshPageStorageKey, @$window.scrollTop()
@reloadPage()
return
| true | Evrobone.AppMixins.WindowNavigation =
popstateBusEventName: 'popstate'
refreshPageStorageKey: 'PI:KEY:<KEY>END_PI'
windowNavigationInitialize: ->
if scrollTop = sessionStorage?.getItem(@refreshPageStorageKey)
@$window.scrollTop scrollTop
sessionStorage.removeItem @refreshPageStorageKey
@$window.on 'popstate', _.bind(@historyChangeHandler, @)
return
historyChangeHandler: ->
@trigger @popstateBusEventName, arguments...
return
onPopState: (callback, context) ->
context.listenTo @, @popstateBusEventName, callback
offPopState: (callback, context) ->
context.stopListening @, @popstateBusEventName, callback
changeLocation: (url, push = false, title = '', options) ->
historyMethod = window.history[if push then 'pushState' else 'replaceState']
if historyMethod
unless /^\w+:\/\//.test(url)
url = "#{window.location.protocol}//#{window.location.host}#{url}"
historyMethod.call window.history, _.extend({ turbolinks: Turbolinks?, url: url }, options), title, url
else
window.location.hash = url
return
visit: (location) ->
if Turbolinks?
Turbolinks.visit location
else
window.location = location
return
reloadPage: ->
@visit window.location
refreshPage: ->
if Turbolinks?
# TODO may be should use similar mech as for non-turbo variant with sessionStorage
$document = $(document)
scrollTop = 0
$document.one 'page:before-unload.refreshPage', =>
scrollTop = @$window.scrollTop()
return
# TODO on or one ?
$document.one 'page:load.refreshPage page:restore.refreshPage', =>
@$window.scrollTop scrollTop
return
else
# TODO need store url and timestamp for more strong collision protection
sessionStorage?.setItem @refreshPageStorageKey, @$window.scrollTop()
@reloadPage()
return
|
[
{
"context": "and Coffeescript (Version 5)\n\n Copyright 2011-12, Ronald Holshausen (https://github.com/uglyog)\n Released under the ",
"end": 114,
"score": 0.9998854994773865,
"start": 97,
"tag": "NAME",
"value": "Ronald Holshausen"
},
{
"context": "ht 2011-12, Ronald Holshausen (https://github.com/uglyog)\n Released under the MIT License (http://www.ope",
"end": 141,
"score": 0.9993138909339905,
"start": 135,
"tag": "USERNAME",
"value": "uglyog"
}
] | src/header.coffee | mtoribio/clientside-haml-js | 19 | ###
clientside HAML compiler for Javascript and Coffeescript (Version 5)
Copyright 2011-12, Ronald Holshausen (https://github.com/uglyog)
Released under the MIT License (http://www.opensource.org/licenses/MIT)
###
root = this | 208001 | ###
clientside HAML compiler for Javascript and Coffeescript (Version 5)
Copyright 2011-12, <NAME> (https://github.com/uglyog)
Released under the MIT License (http://www.opensource.org/licenses/MIT)
###
root = this | true | ###
clientside HAML compiler for Javascript and Coffeescript (Version 5)
Copyright 2011-12, PI:NAME:<NAME>END_PI (https://github.com/uglyog)
Released under the MIT License (http://www.opensource.org/licenses/MIT)
###
root = this |
[
{
"context": " dist:\n options:\n mysqlHost: '10.6.22.97'\n redisHost: '10.6.25.201'\n src: ",
"end": 2579,
"score": 0.9996002316474915,
"start": 2569,
"tag": "IP_ADDRESS",
"value": "10.6.22.97"
},
{
"context": " mysqlHost: '10.6.22.97'\n redisHost: '10.6.25.201'\n src: 'dist/<%= pkg.name %>/config/index.",
"end": 2614,
"score": 0.9996091723442078,
"start": 2603,
"tag": "IP_ADDRESS",
"value": "10.6.25.201"
},
{
"context": "'test']\n 'DB_PASSWD': ['database password', '123456']\n 'RDS_HOST': ['redis host', 'localhost']\n ",
"end": 3568,
"score": 0.9993407130241394,
"start": 3562,
"tag": "PASSWORD",
"value": "123456"
},
{
"context": "lUser if mysqlUser\n config.mysql.password = mysqlPassword if mysqlPassword\n config.redis.host = redi",
"end": 7551,
"score": 0.995760440826416,
"start": 7538,
"tag": "PASSWORD",
"value": "mysqlPassword"
}
] | Gruntfile.coffee | zzhouj/node_framework | 6 | _ = require 'underscore'
path = require 'path'
prompt = require './utils/prompt'
myUtils = require './utils/myUtils'
module.exports = (grunt) ->
grunt.initConfig
pkg: grunt.file.readJSON 'package.json'
init:
files: [
'package.json'
'restart.sh'
'config/index.json'
'routes/index.coffee'
'routes/index.js'
]
add:
controllers:
cwd: 'template/controllers/'
filter: 'isFile'
src: '**'
dest: 'controllers/'
models:
cwd: 'template/models/'
filter: 'isFile'
src: '**'
dest: 'models/'
permissions:
cwd: 'template/permissions/'
filter: 'isFile'
src: '**'
dest: 'permissions/'
restful:
cwd: 'template/restful/'
filter: 'isFile'
src: '**'
dest: 'restful/'
app:
cwd: 'template/app/'
filter: 'isFile'
src: '**'
dest: 'public/app/'
nav:
cwd: 'template/'
src: 'nav.html'
dest: 'views/baseApp.ejs'
replaceText: '<!--{{nav}}-->'
remove:
controllers:
src: [
'controllers/{{name}}Controller.coffee'
'controllers/{{name}}Controller.js'
]
models:
src: [
'models/{{name}}.coffee'
'models/{{name}}.js'
]
permissions:
src: [
'permissions/{{name}}Permission.coffee'
'permissions/{{name}}Permission.js'
]
restful:
src: [
'restful/{{name}}Restful.coffee'
'restful/{{name}}Restful.js'
]
app:
src: [
'public/app/{{name}}/'
]
sql:
src: [
'sql/{{name}}.sql'
]
clean:
dist: ['dist/']
coffee_js: ['dist/**/*.coffee.js']
coffee:
dist:
expand: true
src: ['{controllers,crons,models,permissions,public,restful,routes,utils}/**/*.coffee']
dest: 'dist/<%= pkg.name %>/'
ext: '.coffee.js'
copy:
dist:
expand: true
src: [
'{bin,config,views,public}/**'
'!**/*.{coffee,js}'
'!public/download/**'
'public/javascripts/vendor/**/*.js'
'app.js'
'Gruntfile.js'
'package.json'
'restart.sh'
]
dest: 'dist/<%= pkg.name %>/'
uglify:
dist:
expand: true
src: ['dist/<%= pkg.name %>/**/*.coffee.js']
ext: '.js'
fixconfig:
dist:
options:
mysqlHost: '10.6.22.97'
redisHost: '10.6.25.201'
src: 'dist/<%= pkg.name %>/config/index.json'
compress:
dist:
options:
archive: 'dist/<%= pkg.name %>_<%= grunt.template.today("yyyy-mm-dd") %>.tar.gz'
mode: 'tgz'
expand: true
cwd: 'dist/'
src: '**'
sql:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'sql/'
tpl:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'public/app/'
grunt.registerMultiTask 'init', ->
done = @async()
{filesSrc} = @
prompt
'PRJ_NAME': ['project name', 'node_framework']
'TITLE': ['web app title', 'node_framework']
'PORT': ['web app port', '3000']
'DB_HOST': ['mysql host', 'localhost']
'DB_PORT': ['mysql server port', '3306']
'DB_NAME': ['database name', 'test']
'DB_USER': ['database username', 'test']
'DB_PASSWD': ['database password', '123456']
'RDS_HOST': ['redis host', 'localhost']
'RDS_PORT': ['redis server port', '6379']
'ROOT_SECRET': ['root secret of web app', myUtils.genNumPass()]
'DEFAULT_APP': ['default app of web app', 'user']
, (err, answers) ->
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
_.each filesSrc, (file) ->
content = grunt.file.read file
if content
_.each answers, (val, key) ->
content = myUtils.replaceAll content, "{{#{key}}}", val
grunt.file.write file, content
done()
grunt.registerMultiTask 'add', ->
name = grunt.config 'add.name'
label = grunt.config 'add.label'
if name and label
addTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'add.name', name
grunt.config 'add.label', label
addTask.call @, name, label, done
grunt.registerMultiTask 'remove', ->
name = grunt.config 'remove.name'
label = grunt.config 'remove.label'
if name and label
removeTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'remove.name', name
grunt.config 'remove.label', label
removeTask.call @, name, label, done
addTask = (name, label, done) ->
{replaceText} = @data
if replaceText
_.each @files, (file) ->
withText = ''
_.each file.src, (srcFile) ->
withText += grunt.file.read path.join file.cwd, srcFile
withText = myUtils.replaceAll withText, "{{name}}", name
withText = myUtils.replaceAll withText, "{{label}}", label
withText += "\n " + replaceText
grunt.file.write file.dest, myUtils.replaceAll grunt.file.read(file.dest), replaceText, withText
else
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = srcFile.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
destFile = path.join file.dest, destFile
srcFile = path.join file.cwd, srcFile
grunt.file.copy srcFile, destFile,
process: (content) ->
content = myUtils.replaceAll content, "{{name}}", name
content = myUtils.replaceAll content, "{{label}}", label
done?()
removeTask = (name, label, done) ->
for src in @data.src
grunt.file.delete src.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
done?()
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-copy'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-compress'
grunt.loadNpmTasks 'grunt-text-replace'
grunt.registerTask 'dist', [
'clean:dist'
'copy:dist'
'coffee:dist'
'uglify:dist'
'clean:coffee_js'
'fixconfig:dist'
'compress:dist'
]
grunt.registerMultiTask 'fixconfig', ->
return grunt.log.error 'no options' unless @data.options
{port, mysqlHost, mysqlPort, mysqlUser, mysqlPassword, redisHost, rootSecret, cdnUrl} = @data.options
for src in @filesSrc
config = grunt.file.readJSON src
if config
config.port = port if port
config.mysql.host = mysqlHost if mysqlHost
config.mysql.port = mysqlPort if mysqlPort
config.mysql.user = mysqlUser if mysqlUser
config.mysql.password = mysqlPassword if mysqlPassword
config.redis.host = redisHost if redisHost
config.rootSecret = rootSecret if rootSecret
config.cdnUrl = cdnUrl if cdnUrl
grunt.file.write src, JSON.stringify config, null, 4
grunt.registerMultiTask 'sql', ->
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = path.join file.dest, srcFile.replace /\.js$/, '.sql'
srcFile = path.join file.cwd, srcFile
try
model = require "./#{srcFile}"
catch e
console.log e
sql = model?.createTableSql?()
if sql
grunt.log.writeln "writing >> #{destFile}"
grunt.file.write destFile, sql
grunt.registerMultiTask 'tpl', ->
done = @async()
_.each @files, (file) ->
candidates = _.map file.src, (srcFile) ->
srcFile: path.join file.cwd, srcFile
model: srcFile.match(/(.+)\.js/)[1]
hint = _.map(candidates, (candidate, i) ->
"#{i}: #{candidate.model}"
).join '\n'
prompt
'candidateIndex': ["candidate index\n#{hint}\n", 'no default']
, (err, answers) ->
grunt.log.error err if err
return done false if err
candidate = candidates[answers.candidateIndex]
grunt.log.error 'invalid candidate index' unless candidate
return done false unless candidate
candidate.destFiles = _.filter [
path.join file.dest, candidate.model, 'tpl/edit.tpl.html'
path.join file.dest, candidate.model, 'tpl/list.tpl.html'
], (destFile) ->
grunt.file.exists(destFile)
tplTask candidate
done()
tplTask = (candidate) ->
model = require "./#{candidate.srcFile}"
labels = model.table?.labels || {}
{schema} = model.table
options = _.mapObject labels, (label, field) ->
option = schema?[field]
option = if option?.type? then option else {type: option}
option.isNotNull = true unless option.isNotNull?
option
replaceMap = {}
replaceMap['{{name.label}}'] = labels.name if labels.name
replaceMap['{{model.label}}'] = labels.$model if labels.$model
destContents = _.map candidate.destFiles, (destFile) ->
grunt.file.read(destFile)
labels = _.omit labels, (label, field) ->
return true if field == '$model'
for destContent in destContents
return true if destContent?.match new RegExp "item\\.#{myUtils.RegExpEscape field}"
false
indent = ' '
replaceMap["<!--{{field.label}}-->"] = _.map(_.values(labels), (label) ->
"<td>#{label}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.label}}-->"
replaceMap["<!--{{field.value}}-->"] = _.map(_.keys(labels), (field) ->
if options[field]?.type == Number
"<td>{{item.#{field} | number}}</td>"
else if options[field]?.type == Date
"<td>{{item.#{field} | date:'MM-dd HH:mm'}}</td>"
else
"<td>{{item.#{field}}}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.value}}-->"
labels = _.omit labels, 'createTime', 'updateTime'
indent = ' '
replaceMap["#{indent}<!--{{field.input}}-->"] = _.map(labels, (label, field) ->
typeAttr = ''
if options[field]?.type == Number
typeAttr = ' type="number"'
else if options[field]?.type == Date
typeAttr = ' type="datetime-local"'
requireAttr = if options[field]?.isNotNull then ' required' else ''
"""
#{indent}<div class="form-group">
#{indent} <label for="#{field}">#{label}:</label>
#{indent} <input class="form-control" id="#{field}" ng-model="item.#{field}"#{typeAttr}#{requireAttr}>
#{indent}</div>
"""
).join('\n') + "\n#{indent}<!--{{field.input}}-->"
_.each candidate.destFiles, (destFile) ->
_.each replaceMap, (withText, replaceText) ->
grunt.file.write destFile, myUtils.replaceAll grunt.file.read(destFile), replaceText, withText
| 213039 | _ = require 'underscore'
path = require 'path'
prompt = require './utils/prompt'
myUtils = require './utils/myUtils'
module.exports = (grunt) ->
grunt.initConfig
pkg: grunt.file.readJSON 'package.json'
init:
files: [
'package.json'
'restart.sh'
'config/index.json'
'routes/index.coffee'
'routes/index.js'
]
add:
controllers:
cwd: 'template/controllers/'
filter: 'isFile'
src: '**'
dest: 'controllers/'
models:
cwd: 'template/models/'
filter: 'isFile'
src: '**'
dest: 'models/'
permissions:
cwd: 'template/permissions/'
filter: 'isFile'
src: '**'
dest: 'permissions/'
restful:
cwd: 'template/restful/'
filter: 'isFile'
src: '**'
dest: 'restful/'
app:
cwd: 'template/app/'
filter: 'isFile'
src: '**'
dest: 'public/app/'
nav:
cwd: 'template/'
src: 'nav.html'
dest: 'views/baseApp.ejs'
replaceText: '<!--{{nav}}-->'
remove:
controllers:
src: [
'controllers/{{name}}Controller.coffee'
'controllers/{{name}}Controller.js'
]
models:
src: [
'models/{{name}}.coffee'
'models/{{name}}.js'
]
permissions:
src: [
'permissions/{{name}}Permission.coffee'
'permissions/{{name}}Permission.js'
]
restful:
src: [
'restful/{{name}}Restful.coffee'
'restful/{{name}}Restful.js'
]
app:
src: [
'public/app/{{name}}/'
]
sql:
src: [
'sql/{{name}}.sql'
]
clean:
dist: ['dist/']
coffee_js: ['dist/**/*.coffee.js']
coffee:
dist:
expand: true
src: ['{controllers,crons,models,permissions,public,restful,routes,utils}/**/*.coffee']
dest: 'dist/<%= pkg.name %>/'
ext: '.coffee.js'
copy:
dist:
expand: true
src: [
'{bin,config,views,public}/**'
'!**/*.{coffee,js}'
'!public/download/**'
'public/javascripts/vendor/**/*.js'
'app.js'
'Gruntfile.js'
'package.json'
'restart.sh'
]
dest: 'dist/<%= pkg.name %>/'
uglify:
dist:
expand: true
src: ['dist/<%= pkg.name %>/**/*.coffee.js']
ext: '.js'
fixconfig:
dist:
options:
mysqlHost: '10.6.22.97'
redisHost: '10.6.25.201'
src: 'dist/<%= pkg.name %>/config/index.json'
compress:
dist:
options:
archive: 'dist/<%= pkg.name %>_<%= grunt.template.today("yyyy-mm-dd") %>.tar.gz'
mode: 'tgz'
expand: true
cwd: 'dist/'
src: '**'
sql:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'sql/'
tpl:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'public/app/'
grunt.registerMultiTask 'init', ->
done = @async()
{filesSrc} = @
prompt
'PRJ_NAME': ['project name', 'node_framework']
'TITLE': ['web app title', 'node_framework']
'PORT': ['web app port', '3000']
'DB_HOST': ['mysql host', 'localhost']
'DB_PORT': ['mysql server port', '3306']
'DB_NAME': ['database name', 'test']
'DB_USER': ['database username', 'test']
'DB_PASSWD': ['database password', '<PASSWORD>']
'RDS_HOST': ['redis host', 'localhost']
'RDS_PORT': ['redis server port', '6379']
'ROOT_SECRET': ['root secret of web app', myUtils.genNumPass()]
'DEFAULT_APP': ['default app of web app', 'user']
, (err, answers) ->
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
_.each filesSrc, (file) ->
content = grunt.file.read file
if content
_.each answers, (val, key) ->
content = myUtils.replaceAll content, "{{#{key}}}", val
grunt.file.write file, content
done()
grunt.registerMultiTask 'add', ->
name = grunt.config 'add.name'
label = grunt.config 'add.label'
if name and label
addTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'add.name', name
grunt.config 'add.label', label
addTask.call @, name, label, done
grunt.registerMultiTask 'remove', ->
name = grunt.config 'remove.name'
label = grunt.config 'remove.label'
if name and label
removeTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'remove.name', name
grunt.config 'remove.label', label
removeTask.call @, name, label, done
addTask = (name, label, done) ->
{replaceText} = @data
if replaceText
_.each @files, (file) ->
withText = ''
_.each file.src, (srcFile) ->
withText += grunt.file.read path.join file.cwd, srcFile
withText = myUtils.replaceAll withText, "{{name}}", name
withText = myUtils.replaceAll withText, "{{label}}", label
withText += "\n " + replaceText
grunt.file.write file.dest, myUtils.replaceAll grunt.file.read(file.dest), replaceText, withText
else
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = srcFile.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
destFile = path.join file.dest, destFile
srcFile = path.join file.cwd, srcFile
grunt.file.copy srcFile, destFile,
process: (content) ->
content = myUtils.replaceAll content, "{{name}}", name
content = myUtils.replaceAll content, "{{label}}", label
done?()
removeTask = (name, label, done) ->
for src in @data.src
grunt.file.delete src.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
done?()
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-copy'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-compress'
grunt.loadNpmTasks 'grunt-text-replace'
grunt.registerTask 'dist', [
'clean:dist'
'copy:dist'
'coffee:dist'
'uglify:dist'
'clean:coffee_js'
'fixconfig:dist'
'compress:dist'
]
grunt.registerMultiTask 'fixconfig', ->
return grunt.log.error 'no options' unless @data.options
{port, mysqlHost, mysqlPort, mysqlUser, mysqlPassword, redisHost, rootSecret, cdnUrl} = @data.options
for src in @filesSrc
config = grunt.file.readJSON src
if config
config.port = port if port
config.mysql.host = mysqlHost if mysqlHost
config.mysql.port = mysqlPort if mysqlPort
config.mysql.user = mysqlUser if mysqlUser
config.mysql.password = <PASSWORD> if mysqlPassword
config.redis.host = redisHost if redisHost
config.rootSecret = rootSecret if rootSecret
config.cdnUrl = cdnUrl if cdnUrl
grunt.file.write src, JSON.stringify config, null, 4
grunt.registerMultiTask 'sql', ->
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = path.join file.dest, srcFile.replace /\.js$/, '.sql'
srcFile = path.join file.cwd, srcFile
try
model = require "./#{srcFile}"
catch e
console.log e
sql = model?.createTableSql?()
if sql
grunt.log.writeln "writing >> #{destFile}"
grunt.file.write destFile, sql
grunt.registerMultiTask 'tpl', ->
done = @async()
_.each @files, (file) ->
candidates = _.map file.src, (srcFile) ->
srcFile: path.join file.cwd, srcFile
model: srcFile.match(/(.+)\.js/)[1]
hint = _.map(candidates, (candidate, i) ->
"#{i}: #{candidate.model}"
).join '\n'
prompt
'candidateIndex': ["candidate index\n#{hint}\n", 'no default']
, (err, answers) ->
grunt.log.error err if err
return done false if err
candidate = candidates[answers.candidateIndex]
grunt.log.error 'invalid candidate index' unless candidate
return done false unless candidate
candidate.destFiles = _.filter [
path.join file.dest, candidate.model, 'tpl/edit.tpl.html'
path.join file.dest, candidate.model, 'tpl/list.tpl.html'
], (destFile) ->
grunt.file.exists(destFile)
tplTask candidate
done()
tplTask = (candidate) ->
model = require "./#{candidate.srcFile}"
labels = model.table?.labels || {}
{schema} = model.table
options = _.mapObject labels, (label, field) ->
option = schema?[field]
option = if option?.type? then option else {type: option}
option.isNotNull = true unless option.isNotNull?
option
replaceMap = {}
replaceMap['{{name.label}}'] = labels.name if labels.name
replaceMap['{{model.label}}'] = labels.$model if labels.$model
destContents = _.map candidate.destFiles, (destFile) ->
grunt.file.read(destFile)
labels = _.omit labels, (label, field) ->
return true if field == '$model'
for destContent in destContents
return true if destContent?.match new RegExp "item\\.#{myUtils.RegExpEscape field}"
false
indent = ' '
replaceMap["<!--{{field.label}}-->"] = _.map(_.values(labels), (label) ->
"<td>#{label}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.label}}-->"
replaceMap["<!--{{field.value}}-->"] = _.map(_.keys(labels), (field) ->
if options[field]?.type == Number
"<td>{{item.#{field} | number}}</td>"
else if options[field]?.type == Date
"<td>{{item.#{field} | date:'MM-dd HH:mm'}}</td>"
else
"<td>{{item.#{field}}}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.value}}-->"
labels = _.omit labels, 'createTime', 'updateTime'
indent = ' '
replaceMap["#{indent}<!--{{field.input}}-->"] = _.map(labels, (label, field) ->
typeAttr = ''
if options[field]?.type == Number
typeAttr = ' type="number"'
else if options[field]?.type == Date
typeAttr = ' type="datetime-local"'
requireAttr = if options[field]?.isNotNull then ' required' else ''
"""
#{indent}<div class="form-group">
#{indent} <label for="#{field}">#{label}:</label>
#{indent} <input class="form-control" id="#{field}" ng-model="item.#{field}"#{typeAttr}#{requireAttr}>
#{indent}</div>
"""
).join('\n') + "\n#{indent}<!--{{field.input}}-->"
_.each candidate.destFiles, (destFile) ->
_.each replaceMap, (withText, replaceText) ->
grunt.file.write destFile, myUtils.replaceAll grunt.file.read(destFile), replaceText, withText
| true | _ = require 'underscore'
path = require 'path'
prompt = require './utils/prompt'
myUtils = require './utils/myUtils'
module.exports = (grunt) ->
grunt.initConfig
pkg: grunt.file.readJSON 'package.json'
init:
files: [
'package.json'
'restart.sh'
'config/index.json'
'routes/index.coffee'
'routes/index.js'
]
add:
controllers:
cwd: 'template/controllers/'
filter: 'isFile'
src: '**'
dest: 'controllers/'
models:
cwd: 'template/models/'
filter: 'isFile'
src: '**'
dest: 'models/'
permissions:
cwd: 'template/permissions/'
filter: 'isFile'
src: '**'
dest: 'permissions/'
restful:
cwd: 'template/restful/'
filter: 'isFile'
src: '**'
dest: 'restful/'
app:
cwd: 'template/app/'
filter: 'isFile'
src: '**'
dest: 'public/app/'
nav:
cwd: 'template/'
src: 'nav.html'
dest: 'views/baseApp.ejs'
replaceText: '<!--{{nav}}-->'
remove:
controllers:
src: [
'controllers/{{name}}Controller.coffee'
'controllers/{{name}}Controller.js'
]
models:
src: [
'models/{{name}}.coffee'
'models/{{name}}.js'
]
permissions:
src: [
'permissions/{{name}}Permission.coffee'
'permissions/{{name}}Permission.js'
]
restful:
src: [
'restful/{{name}}Restful.coffee'
'restful/{{name}}Restful.js'
]
app:
src: [
'public/app/{{name}}/'
]
sql:
src: [
'sql/{{name}}.sql'
]
clean:
dist: ['dist/']
coffee_js: ['dist/**/*.coffee.js']
coffee:
dist:
expand: true
src: ['{controllers,crons,models,permissions,public,restful,routes,utils}/**/*.coffee']
dest: 'dist/<%= pkg.name %>/'
ext: '.coffee.js'
copy:
dist:
expand: true
src: [
'{bin,config,views,public}/**'
'!**/*.{coffee,js}'
'!public/download/**'
'public/javascripts/vendor/**/*.js'
'app.js'
'Gruntfile.js'
'package.json'
'restart.sh'
]
dest: 'dist/<%= pkg.name %>/'
uglify:
dist:
expand: true
src: ['dist/<%= pkg.name %>/**/*.coffee.js']
ext: '.js'
fixconfig:
dist:
options:
mysqlHost: '10.6.22.97'
redisHost: '10.6.25.201'
src: 'dist/<%= pkg.name %>/config/index.json'
compress:
dist:
options:
archive: 'dist/<%= pkg.name %>_<%= grunt.template.today("yyyy-mm-dd") %>.tar.gz'
mode: 'tgz'
expand: true
cwd: 'dist/'
src: '**'
sql:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'sql/'
tpl:
models:
cwd: 'models/'
src: ['**.js', '!baseModel.js']
dest: 'public/app/'
grunt.registerMultiTask 'init', ->
done = @async()
{filesSrc} = @
prompt
'PRJ_NAME': ['project name', 'node_framework']
'TITLE': ['web app title', 'node_framework']
'PORT': ['web app port', '3000']
'DB_HOST': ['mysql host', 'localhost']
'DB_PORT': ['mysql server port', '3306']
'DB_NAME': ['database name', 'test']
'DB_USER': ['database username', 'test']
'DB_PASSWD': ['database password', 'PI:PASSWORD:<PASSWORD>END_PI']
'RDS_HOST': ['redis host', 'localhost']
'RDS_PORT': ['redis server port', '6379']
'ROOT_SECRET': ['root secret of web app', myUtils.genNumPass()]
'DEFAULT_APP': ['default app of web app', 'user']
, (err, answers) ->
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
_.each filesSrc, (file) ->
content = grunt.file.read file
if content
_.each answers, (val, key) ->
content = myUtils.replaceAll content, "{{#{key}}}", val
grunt.file.write file, content
done()
grunt.registerMultiTask 'add', ->
name = grunt.config 'add.name'
label = grunt.config 'add.label'
if name and label
addTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'add.name', name
grunt.config 'add.label', label
addTask.call @, name, label, done
grunt.registerMultiTask 'remove', ->
name = grunt.config 'remove.name'
label = grunt.config 'remove.label'
if name and label
removeTask.call @, name, label
else
done = @async()
prompt
'name': ['web app name', 'user']
'label': ['web app label', '用户管理']
, (err, answers) =>
grunt.log.error err if err
return done false if err
grunt.log.writeln JSON.stringify answers, null, 4
{name, label} = answers
grunt.config 'remove.name', name
grunt.config 'remove.label', label
removeTask.call @, name, label, done
addTask = (name, label, done) ->
{replaceText} = @data
if replaceText
_.each @files, (file) ->
withText = ''
_.each file.src, (srcFile) ->
withText += grunt.file.read path.join file.cwd, srcFile
withText = myUtils.replaceAll withText, "{{name}}", name
withText = myUtils.replaceAll withText, "{{label}}", label
withText += "\n " + replaceText
grunt.file.write file.dest, myUtils.replaceAll grunt.file.read(file.dest), replaceText, withText
else
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = srcFile.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
destFile = path.join file.dest, destFile
srcFile = path.join file.cwd, srcFile
grunt.file.copy srcFile, destFile,
process: (content) ->
content = myUtils.replaceAll content, "{{name}}", name
content = myUtils.replaceAll content, "{{label}}", label
done?()
removeTask = (name, label, done) ->
for src in @data.src
grunt.file.delete src.replace new RegExp(myUtils.RegExpEscape("{{name}}"), 'g'), name
done?()
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-copy'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-compress'
grunt.loadNpmTasks 'grunt-text-replace'
grunt.registerTask 'dist', [
'clean:dist'
'copy:dist'
'coffee:dist'
'uglify:dist'
'clean:coffee_js'
'fixconfig:dist'
'compress:dist'
]
grunt.registerMultiTask 'fixconfig', ->
return grunt.log.error 'no options' unless @data.options
{port, mysqlHost, mysqlPort, mysqlUser, mysqlPassword, redisHost, rootSecret, cdnUrl} = @data.options
for src in @filesSrc
config = grunt.file.readJSON src
if config
config.port = port if port
config.mysql.host = mysqlHost if mysqlHost
config.mysql.port = mysqlPort if mysqlPort
config.mysql.user = mysqlUser if mysqlUser
config.mysql.password = PI:PASSWORD:<PASSWORD>END_PI if mysqlPassword
config.redis.host = redisHost if redisHost
config.rootSecret = rootSecret if rootSecret
config.cdnUrl = cdnUrl if cdnUrl
grunt.file.write src, JSON.stringify config, null, 4
grunt.registerMultiTask 'sql', ->
_.each @files, (file) ->
_.each file.src, (srcFile) ->
destFile = path.join file.dest, srcFile.replace /\.js$/, '.sql'
srcFile = path.join file.cwd, srcFile
try
model = require "./#{srcFile}"
catch e
console.log e
sql = model?.createTableSql?()
if sql
grunt.log.writeln "writing >> #{destFile}"
grunt.file.write destFile, sql
grunt.registerMultiTask 'tpl', ->
done = @async()
_.each @files, (file) ->
candidates = _.map file.src, (srcFile) ->
srcFile: path.join file.cwd, srcFile
model: srcFile.match(/(.+)\.js/)[1]
hint = _.map(candidates, (candidate, i) ->
"#{i}: #{candidate.model}"
).join '\n'
prompt
'candidateIndex': ["candidate index\n#{hint}\n", 'no default']
, (err, answers) ->
grunt.log.error err if err
return done false if err
candidate = candidates[answers.candidateIndex]
grunt.log.error 'invalid candidate index' unless candidate
return done false unless candidate
candidate.destFiles = _.filter [
path.join file.dest, candidate.model, 'tpl/edit.tpl.html'
path.join file.dest, candidate.model, 'tpl/list.tpl.html'
], (destFile) ->
grunt.file.exists(destFile)
tplTask candidate
done()
tplTask = (candidate) ->
model = require "./#{candidate.srcFile}"
labels = model.table?.labels || {}
{schema} = model.table
options = _.mapObject labels, (label, field) ->
option = schema?[field]
option = if option?.type? then option else {type: option}
option.isNotNull = true unless option.isNotNull?
option
replaceMap = {}
replaceMap['{{name.label}}'] = labels.name if labels.name
replaceMap['{{model.label}}'] = labels.$model if labels.$model
destContents = _.map candidate.destFiles, (destFile) ->
grunt.file.read(destFile)
labels = _.omit labels, (label, field) ->
return true if field == '$model'
for destContent in destContents
return true if destContent?.match new RegExp "item\\.#{myUtils.RegExpEscape field}"
false
indent = ' '
replaceMap["<!--{{field.label}}-->"] = _.map(_.values(labels), (label) ->
"<td>#{label}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.label}}-->"
replaceMap["<!--{{field.value}}-->"] = _.map(_.keys(labels), (field) ->
if options[field]?.type == Number
"<td>{{item.#{field} | number}}</td>"
else if options[field]?.type == Date
"<td>{{item.#{field} | date:'MM-dd HH:mm'}}</td>"
else
"<td>{{item.#{field}}}</td>"
).join("\n#{indent}") + "\n#{indent}<!--{{field.value}}-->"
labels = _.omit labels, 'createTime', 'updateTime'
indent = ' '
replaceMap["#{indent}<!--{{field.input}}-->"] = _.map(labels, (label, field) ->
typeAttr = ''
if options[field]?.type == Number
typeAttr = ' type="number"'
else if options[field]?.type == Date
typeAttr = ' type="datetime-local"'
requireAttr = if options[field]?.isNotNull then ' required' else ''
"""
#{indent}<div class="form-group">
#{indent} <label for="#{field}">#{label}:</label>
#{indent} <input class="form-control" id="#{field}" ng-model="item.#{field}"#{typeAttr}#{requireAttr}>
#{indent}</div>
"""
).join('\n') + "\n#{indent}<!--{{field.input}}-->"
_.each candidate.destFiles, (destFile) ->
_.each replaceMap, (withText, replaceText) ->
grunt.file.write destFile, myUtils.replaceAll grunt.file.read(destFile), replaceText, withText
|
[
{
"context": " \"vm_forwarding_email\":\"eswari@choochee.com\",\n \"dnd_enabled\":false",
"end": 1700,
"score": 0.9999251961708069,
"start": 1681,
"tag": "EMAIL",
"value": "eswari@choochee.com"
}
] | models/directory/fixture/add_bundles.coffee | signonsridhar/sridhar_hbs | 0 | define([ 'can_fixture'], (can)->
can.fixture('POST /bss/tenant?action=addbundles', (req, res)->
return `{
"response":{
"service":"addbundles",
"response_code":100,
"execution_time":20516,
"timestamp":"2013-12-18T21:03:27+0000",
"response_data":{
"purchase_summary":{
"line_name":"T-Mobile Line Bundle Prepaid Semi-Annually (Dev)",
"total_line_quantity":1.0,
"line_quantity":1.0,
"extension_quantity":1.0,
"did_quantity":1.0,
"device_quantity":1.0,
"master_paln_extensioin_quantity":0.0,
"master_plan_did_quantity":0.0,
"master_plan_device_quantity":0.0,
"billing_period":6,
"amount_per_unit":240.0,
"tax_amount":0.0,
"total_amount_before_taxes":238.68,
"total_amount_after_taxes":238.68
},
"bundles":[
{
"bundle_id":10000118,
"tenant_id":10000042,
"status":"OSSPROVISIONED",
"is_assigned":false,
"extensions":[
{
"extensionid":10000282,
"tenantid":10000042,
"extension_number":203,
"vm_forwarding_email":"eswari@choochee.com",
"dnd_enabled":false,
"call_waiting_enabled":false,
"forwarding_enabled":false,
"vm_forwarding_enabled":false,
"international_calls_enabled":false,
"devices":[
{
"deviceid":10000118,
"is_assigned":false,
"extension":203,
"productid":"10261667",
"product_sku":"SKU-CSO-IP303-001",
"status":"unselected",
"device_name":"Cisco IP303 Default Device",
"creation_date":"2013-12-18T21:03:23+0000"
}
],
"phone_numbers":[
{
"didid":40000054,
"partner_code":"tmus",
"phonenumber":16508000054,
"caller_id":0,
"area_code":"650",
"country_code":"1",
"city":"Mountain View",
"state":"CA",
"is_conference":false,
"is_toll_free":false,
"is_ported":false,
"is_assigned":true,
"extension":0,
"type":"user_line"
}
],
"group_members":[
]
}
]
}
]
},
"version":"1.0"
}
}`
)
) | 87504 | define([ 'can_fixture'], (can)->
can.fixture('POST /bss/tenant?action=addbundles', (req, res)->
return `{
"response":{
"service":"addbundles",
"response_code":100,
"execution_time":20516,
"timestamp":"2013-12-18T21:03:27+0000",
"response_data":{
"purchase_summary":{
"line_name":"T-Mobile Line Bundle Prepaid Semi-Annually (Dev)",
"total_line_quantity":1.0,
"line_quantity":1.0,
"extension_quantity":1.0,
"did_quantity":1.0,
"device_quantity":1.0,
"master_paln_extensioin_quantity":0.0,
"master_plan_did_quantity":0.0,
"master_plan_device_quantity":0.0,
"billing_period":6,
"amount_per_unit":240.0,
"tax_amount":0.0,
"total_amount_before_taxes":238.68,
"total_amount_after_taxes":238.68
},
"bundles":[
{
"bundle_id":10000118,
"tenant_id":10000042,
"status":"OSSPROVISIONED",
"is_assigned":false,
"extensions":[
{
"extensionid":10000282,
"tenantid":10000042,
"extension_number":203,
"vm_forwarding_email":"<EMAIL>",
"dnd_enabled":false,
"call_waiting_enabled":false,
"forwarding_enabled":false,
"vm_forwarding_enabled":false,
"international_calls_enabled":false,
"devices":[
{
"deviceid":10000118,
"is_assigned":false,
"extension":203,
"productid":"10261667",
"product_sku":"SKU-CSO-IP303-001",
"status":"unselected",
"device_name":"Cisco IP303 Default Device",
"creation_date":"2013-12-18T21:03:23+0000"
}
],
"phone_numbers":[
{
"didid":40000054,
"partner_code":"tmus",
"phonenumber":16508000054,
"caller_id":0,
"area_code":"650",
"country_code":"1",
"city":"Mountain View",
"state":"CA",
"is_conference":false,
"is_toll_free":false,
"is_ported":false,
"is_assigned":true,
"extension":0,
"type":"user_line"
}
],
"group_members":[
]
}
]
}
]
},
"version":"1.0"
}
}`
)
) | true | define([ 'can_fixture'], (can)->
can.fixture('POST /bss/tenant?action=addbundles', (req, res)->
return `{
"response":{
"service":"addbundles",
"response_code":100,
"execution_time":20516,
"timestamp":"2013-12-18T21:03:27+0000",
"response_data":{
"purchase_summary":{
"line_name":"T-Mobile Line Bundle Prepaid Semi-Annually (Dev)",
"total_line_quantity":1.0,
"line_quantity":1.0,
"extension_quantity":1.0,
"did_quantity":1.0,
"device_quantity":1.0,
"master_paln_extensioin_quantity":0.0,
"master_plan_did_quantity":0.0,
"master_plan_device_quantity":0.0,
"billing_period":6,
"amount_per_unit":240.0,
"tax_amount":0.0,
"total_amount_before_taxes":238.68,
"total_amount_after_taxes":238.68
},
"bundles":[
{
"bundle_id":10000118,
"tenant_id":10000042,
"status":"OSSPROVISIONED",
"is_assigned":false,
"extensions":[
{
"extensionid":10000282,
"tenantid":10000042,
"extension_number":203,
"vm_forwarding_email":"PI:EMAIL:<EMAIL>END_PI",
"dnd_enabled":false,
"call_waiting_enabled":false,
"forwarding_enabled":false,
"vm_forwarding_enabled":false,
"international_calls_enabled":false,
"devices":[
{
"deviceid":10000118,
"is_assigned":false,
"extension":203,
"productid":"10261667",
"product_sku":"SKU-CSO-IP303-001",
"status":"unselected",
"device_name":"Cisco IP303 Default Device",
"creation_date":"2013-12-18T21:03:23+0000"
}
],
"phone_numbers":[
{
"didid":40000054,
"partner_code":"tmus",
"phonenumber":16508000054,
"caller_id":0,
"area_code":"650",
"country_code":"1",
"city":"Mountain View",
"state":"CA",
"is_conference":false,
"is_toll_free":false,
"is_ported":false,
"is_assigned":true,
"extension":0,
"type":"user_line"
}
],
"group_members":[
]
}
]
}
]
},
"version":"1.0"
}
}`
)
) |
[
{
"context": " git clone -q git://github.com/joshdmiller/ng-boilerplate \"#{instancesDir}/#{name}\"\n ",
"end": 4528,
"score": 0.9995385408401489,
"start": 4517,
"tag": "USERNAME",
"value": "joshdmiller"
},
{
"context": " <i>note: your sudo password is your koding password. </i>\n </div>\n ",
"end": 8085,
"score": 0.7409213185310364,
"start": 8070,
"tag": "PASSWORD",
"value": "koding password"
}
] | installpane.coffee | ellotheth/kodularjs.kdapp | 1 | kite = KD.getSingleton "kiteController"
{nickname} = KD.whoami().profile
appStorage = new AppStorage "kodularjs-installer", "1.0"
class InstallPane extends KodularJSPane
constructor:->
super
@form = new KDFormViewWithFields
callback : @bound "installAngularJS"
buttons :
install :
title : "Create AngularJS app"
style : "cupid-green"
type : "submit"
loader :
color : "#444444"
diameter : 12
fields :
name :
label : "Name of AngularJS App:"
name : "name"
placeholder : "type a name for your app..."
defaultValue : "my_angularjs_app"
validate :
rules :
required : "yes"
regExp : /(^$)|(^[a-z\d]+([_][a-z\d]+)*$)/i
messages :
required : "a name for your angularjs app is required!"
nextElement :
timestamp :
name : "timestamp"
type : "hidden"
defaultValue : Date.now()
domain :
label : "Domain :"
name : "domain"
itemClass : KDSelectBox
defaultValue : "#{nickname}.kd.io"
angularjsversion :
label : "AngularJS Version :"
name : "angularjsversion"
itemClass : KDSelectBox
defaultValue : "1.0.3"
@form.on "FormValidationFailed", => @form.buttons["Create AngularJS app"].hideLoader()
vmc = KD.getSingleton 'vmController'
vmc.fetchVMs (err, vms)=>
if err then console.log err
else
vms.forEach (vm) =>
vmc.fetchVMDomains vm, (err, domains) =>
newSelectOptions = []
usableDomains = [domain for domain in domains when not /^(vm|shared)-[0-9]/.test domain].first
usableDomains.forEach (domain) =>
newSelectOptions.push {title : domain, value : domain}
{domain} = @form.inputs
domain.setSelectOptions newSelectOptions
newVersionOptions = []
#newVersionOptions.push {title : "Latest (git)", value : "git"}
newVersionOptions.push {title : "1.0.3 (stable)", value : "1.0.3"}
{angularjsversion} = @form.inputs
angularjsversion.setSelectOptions newVersionOptions
completeInputs:(fromPath = no)->
{path, name, pathExtension} = @form.inputs
if fromPath
val = path.getValue()
slug = KD.utils.slugify val
path.setValue val.replace('/', '') if /\//.test val
else
slug = KD.utils.slugify name.getValue()
path.setValue slug
slug += "/" if slug
pathExtension.inputLabel.updateTitle "/#{slug}"
checkPath: (name, callback)->
instancesDir = "kodularjs"
kite.run "[ -d /home/#{nickname}/#{instancesDir}/#{name} ] && echo 'These directories exist'"
, (err, response)->
if response
console.log "You have already a AngularJS app with the name \"#{name}\". Please delete it or choose another path"
callback? err, response
showInstallFail: ->
new KDNotificationView
title : "AngularJS app exists already. Please delete it or choose another name"
duration : 3000
installAngularJS: =>
domain = @form.inputs.domain.getValue()
name = @form.inputs.name.getValue()
angularjsversion = @form.inputs.angularjsversion.getValue()
timestamp = parseInt @form.inputs.timestamp.getValue(), 10
console.log "ANGULARJS VERSION", angularjsversion
@checkPath name, (err, response)=>
if err # means there is no such folder
console.log "Starting install with formData", @form
#If you change it, grep the source file because this variable is used
instancesDir = "/home/#{nickname}/kodularjs"
webDir = "/home/#{nickname}/Web/kodularjs/#{name}"
tmpAppDir = "#{instancesDir}/tmp"
kite.run "mkdir -p '#{tmpAppDir}'", (err, res)=>
if err then console.log err
else
kodularjsScript = """
#!/bin/bash
echo "Grabbing the AngularJS boilerplate project"
git clone -q git://github.com/joshdmiller/ng-boilerplate "#{instancesDir}/#{name}"
cd "#{instancesDir}/#{name}"
git checkout -b master
echo "Checking for grunt (configuration management)"
[ `which grunt` ] || sudo npm -g --loglevel warn install grunt-cli
echo "Checking for karma (unit testing)"
[ `which karma` ] || sudo npm -g --loglevel warn install karma
echo "Checking for bower (package management)"
[ `which bower` ] || sudo npm -g --loglevel warn install bower
sudo chown -R #{nickname}:#{nickname} `npm -g config get tmp`
echo "Installing boilerplate dependencies"
npm --loglevel warn install
npm cache clean
bower --quiet cache clean
bower --quiet install
# use phantomjs by default, because this environment is headless
mv karma/karma-unit.tpl.js karma/karma-unit.tpl.js.bak
sed "s/'Firefox'/'PhantomJS'/" karma/karma-unit.tpl.js.bak > karma/karma-unit.tpl.js
rm karma/karma-unit.tpl.js.bak
# move the compile dir to the web root
mv build.config.js build.config.js.bak
sed "s,compile_dir: 'bin',compile_dir: '#{webDir}'," build.config.js.bak > build.config.js
rm build.config.js.bak
# commit config changes
git commit -am "modify config for kodularjs"
echo "Setting up the first build"
grunt clean html2js jshint coffeelint coffee recess:build concat:build_css copy index:build compile
echo -e '\nNew AngularJS project "#{name}" created:'
echo -e ' Source directory : #{instancesDir}/#{name}'
echo -e ' Web directory : #{webDir}'
echo -e ' Web address : http://#{nickname}.kd.io/kodularjs/#{name}\n'
"""
newFile = FSHelper.createFile
type : 'file'
path : "#{tmpAppDir}/kodularjsScript.sh"
vmName : @vmName
newFile.save kodularjsScript, (err, res)=>
if err then warn err
else
@emit "fs.saveAs.finished", newFile, @
installCmd = "bash #{tmpAppDir}/kodularjsScript.sh && rm -rf #{tmpAppDir}\n"
formData = {timestamp: timestamp, domain: domain, name: name, angularjsversion: angularjsversion}
modal = new ModalViewWithTerminal
title : "Creating AngularJS App: '#{name}'"
width : 700
overlay : no
terminal:
height: 500
command: installCmd
hidden: no
content : """
<div class='modalformline'>
<p>Using AngularJS <strong>#{angularjsversion}</strong></p>
<br>
<i>note: your sudo password is your koding password. </i>
</div>
"""
@form.buttons.install.hideLoader()
appStorage.fetchValue 'blogs', (blogs)->
blogs or= []
blogs.push formData
appStorage.setValue "blogs", blogs
@emit "AngularJSInstalled", formData
else # there is a folder on the same path so fail.
@form.buttons.install.hideLoader()
@showInstallFail()
pistachio:-> "{{> this.form}}"
| 139167 | kite = KD.getSingleton "kiteController"
{nickname} = KD.whoami().profile
appStorage = new AppStorage "kodularjs-installer", "1.0"
class InstallPane extends KodularJSPane
constructor:->
super
@form = new KDFormViewWithFields
callback : @bound "installAngularJS"
buttons :
install :
title : "Create AngularJS app"
style : "cupid-green"
type : "submit"
loader :
color : "#444444"
diameter : 12
fields :
name :
label : "Name of AngularJS App:"
name : "name"
placeholder : "type a name for your app..."
defaultValue : "my_angularjs_app"
validate :
rules :
required : "yes"
regExp : /(^$)|(^[a-z\d]+([_][a-z\d]+)*$)/i
messages :
required : "a name for your angularjs app is required!"
nextElement :
timestamp :
name : "timestamp"
type : "hidden"
defaultValue : Date.now()
domain :
label : "Domain :"
name : "domain"
itemClass : KDSelectBox
defaultValue : "#{nickname}.kd.io"
angularjsversion :
label : "AngularJS Version :"
name : "angularjsversion"
itemClass : KDSelectBox
defaultValue : "1.0.3"
@form.on "FormValidationFailed", => @form.buttons["Create AngularJS app"].hideLoader()
vmc = KD.getSingleton 'vmController'
vmc.fetchVMs (err, vms)=>
if err then console.log err
else
vms.forEach (vm) =>
vmc.fetchVMDomains vm, (err, domains) =>
newSelectOptions = []
usableDomains = [domain for domain in domains when not /^(vm|shared)-[0-9]/.test domain].first
usableDomains.forEach (domain) =>
newSelectOptions.push {title : domain, value : domain}
{domain} = @form.inputs
domain.setSelectOptions newSelectOptions
newVersionOptions = []
#newVersionOptions.push {title : "Latest (git)", value : "git"}
newVersionOptions.push {title : "1.0.3 (stable)", value : "1.0.3"}
{angularjsversion} = @form.inputs
angularjsversion.setSelectOptions newVersionOptions
completeInputs:(fromPath = no)->
{path, name, pathExtension} = @form.inputs
if fromPath
val = path.getValue()
slug = KD.utils.slugify val
path.setValue val.replace('/', '') if /\//.test val
else
slug = KD.utils.slugify name.getValue()
path.setValue slug
slug += "/" if slug
pathExtension.inputLabel.updateTitle "/#{slug}"
checkPath: (name, callback)->
instancesDir = "kodularjs"
kite.run "[ -d /home/#{nickname}/#{instancesDir}/#{name} ] && echo 'These directories exist'"
, (err, response)->
if response
console.log "You have already a AngularJS app with the name \"#{name}\". Please delete it or choose another path"
callback? err, response
showInstallFail: ->
new KDNotificationView
title : "AngularJS app exists already. Please delete it or choose another name"
duration : 3000
installAngularJS: =>
domain = @form.inputs.domain.getValue()
name = @form.inputs.name.getValue()
angularjsversion = @form.inputs.angularjsversion.getValue()
timestamp = parseInt @form.inputs.timestamp.getValue(), 10
console.log "ANGULARJS VERSION", angularjsversion
@checkPath name, (err, response)=>
if err # means there is no such folder
console.log "Starting install with formData", @form
#If you change it, grep the source file because this variable is used
instancesDir = "/home/#{nickname}/kodularjs"
webDir = "/home/#{nickname}/Web/kodularjs/#{name}"
tmpAppDir = "#{instancesDir}/tmp"
kite.run "mkdir -p '#{tmpAppDir}'", (err, res)=>
if err then console.log err
else
kodularjsScript = """
#!/bin/bash
echo "Grabbing the AngularJS boilerplate project"
git clone -q git://github.com/joshdmiller/ng-boilerplate "#{instancesDir}/#{name}"
cd "#{instancesDir}/#{name}"
git checkout -b master
echo "Checking for grunt (configuration management)"
[ `which grunt` ] || sudo npm -g --loglevel warn install grunt-cli
echo "Checking for karma (unit testing)"
[ `which karma` ] || sudo npm -g --loglevel warn install karma
echo "Checking for bower (package management)"
[ `which bower` ] || sudo npm -g --loglevel warn install bower
sudo chown -R #{nickname}:#{nickname} `npm -g config get tmp`
echo "Installing boilerplate dependencies"
npm --loglevel warn install
npm cache clean
bower --quiet cache clean
bower --quiet install
# use phantomjs by default, because this environment is headless
mv karma/karma-unit.tpl.js karma/karma-unit.tpl.js.bak
sed "s/'Firefox'/'PhantomJS'/" karma/karma-unit.tpl.js.bak > karma/karma-unit.tpl.js
rm karma/karma-unit.tpl.js.bak
# move the compile dir to the web root
mv build.config.js build.config.js.bak
sed "s,compile_dir: 'bin',compile_dir: '#{webDir}'," build.config.js.bak > build.config.js
rm build.config.js.bak
# commit config changes
git commit -am "modify config for kodularjs"
echo "Setting up the first build"
grunt clean html2js jshint coffeelint coffee recess:build concat:build_css copy index:build compile
echo -e '\nNew AngularJS project "#{name}" created:'
echo -e ' Source directory : #{instancesDir}/#{name}'
echo -e ' Web directory : #{webDir}'
echo -e ' Web address : http://#{nickname}.kd.io/kodularjs/#{name}\n'
"""
newFile = FSHelper.createFile
type : 'file'
path : "#{tmpAppDir}/kodularjsScript.sh"
vmName : @vmName
newFile.save kodularjsScript, (err, res)=>
if err then warn err
else
@emit "fs.saveAs.finished", newFile, @
installCmd = "bash #{tmpAppDir}/kodularjsScript.sh && rm -rf #{tmpAppDir}\n"
formData = {timestamp: timestamp, domain: domain, name: name, angularjsversion: angularjsversion}
modal = new ModalViewWithTerminal
title : "Creating AngularJS App: '#{name}'"
width : 700
overlay : no
terminal:
height: 500
command: installCmd
hidden: no
content : """
<div class='modalformline'>
<p>Using AngularJS <strong>#{angularjsversion}</strong></p>
<br>
<i>note: your sudo password is your <PASSWORD>. </i>
</div>
"""
@form.buttons.install.hideLoader()
appStorage.fetchValue 'blogs', (blogs)->
blogs or= []
blogs.push formData
appStorage.setValue "blogs", blogs
@emit "AngularJSInstalled", formData
else # there is a folder on the same path so fail.
@form.buttons.install.hideLoader()
@showInstallFail()
pistachio:-> "{{> this.form}}"
| true | kite = KD.getSingleton "kiteController"
{nickname} = KD.whoami().profile
appStorage = new AppStorage "kodularjs-installer", "1.0"
class InstallPane extends KodularJSPane
constructor:->
super
@form = new KDFormViewWithFields
callback : @bound "installAngularJS"
buttons :
install :
title : "Create AngularJS app"
style : "cupid-green"
type : "submit"
loader :
color : "#444444"
diameter : 12
fields :
name :
label : "Name of AngularJS App:"
name : "name"
placeholder : "type a name for your app..."
defaultValue : "my_angularjs_app"
validate :
rules :
required : "yes"
regExp : /(^$)|(^[a-z\d]+([_][a-z\d]+)*$)/i
messages :
required : "a name for your angularjs app is required!"
nextElement :
timestamp :
name : "timestamp"
type : "hidden"
defaultValue : Date.now()
domain :
label : "Domain :"
name : "domain"
itemClass : KDSelectBox
defaultValue : "#{nickname}.kd.io"
angularjsversion :
label : "AngularJS Version :"
name : "angularjsversion"
itemClass : KDSelectBox
defaultValue : "1.0.3"
@form.on "FormValidationFailed", => @form.buttons["Create AngularJS app"].hideLoader()
vmc = KD.getSingleton 'vmController'
vmc.fetchVMs (err, vms)=>
if err then console.log err
else
vms.forEach (vm) =>
vmc.fetchVMDomains vm, (err, domains) =>
newSelectOptions = []
usableDomains = [domain for domain in domains when not /^(vm|shared)-[0-9]/.test domain].first
usableDomains.forEach (domain) =>
newSelectOptions.push {title : domain, value : domain}
{domain} = @form.inputs
domain.setSelectOptions newSelectOptions
newVersionOptions = []
#newVersionOptions.push {title : "Latest (git)", value : "git"}
newVersionOptions.push {title : "1.0.3 (stable)", value : "1.0.3"}
{angularjsversion} = @form.inputs
angularjsversion.setSelectOptions newVersionOptions
completeInputs:(fromPath = no)->
{path, name, pathExtension} = @form.inputs
if fromPath
val = path.getValue()
slug = KD.utils.slugify val
path.setValue val.replace('/', '') if /\//.test val
else
slug = KD.utils.slugify name.getValue()
path.setValue slug
slug += "/" if slug
pathExtension.inputLabel.updateTitle "/#{slug}"
checkPath: (name, callback)->
instancesDir = "kodularjs"
kite.run "[ -d /home/#{nickname}/#{instancesDir}/#{name} ] && echo 'These directories exist'"
, (err, response)->
if response
console.log "You have already a AngularJS app with the name \"#{name}\". Please delete it or choose another path"
callback? err, response
showInstallFail: ->
new KDNotificationView
title : "AngularJS app exists already. Please delete it or choose another name"
duration : 3000
installAngularJS: =>
domain = @form.inputs.domain.getValue()
name = @form.inputs.name.getValue()
angularjsversion = @form.inputs.angularjsversion.getValue()
timestamp = parseInt @form.inputs.timestamp.getValue(), 10
console.log "ANGULARJS VERSION", angularjsversion
@checkPath name, (err, response)=>
if err # means there is no such folder
console.log "Starting install with formData", @form
#If you change it, grep the source file because this variable is used
instancesDir = "/home/#{nickname}/kodularjs"
webDir = "/home/#{nickname}/Web/kodularjs/#{name}"
tmpAppDir = "#{instancesDir}/tmp"
kite.run "mkdir -p '#{tmpAppDir}'", (err, res)=>
if err then console.log err
else
kodularjsScript = """
#!/bin/bash
echo "Grabbing the AngularJS boilerplate project"
git clone -q git://github.com/joshdmiller/ng-boilerplate "#{instancesDir}/#{name}"
cd "#{instancesDir}/#{name}"
git checkout -b master
echo "Checking for grunt (configuration management)"
[ `which grunt` ] || sudo npm -g --loglevel warn install grunt-cli
echo "Checking for karma (unit testing)"
[ `which karma` ] || sudo npm -g --loglevel warn install karma
echo "Checking for bower (package management)"
[ `which bower` ] || sudo npm -g --loglevel warn install bower
sudo chown -R #{nickname}:#{nickname} `npm -g config get tmp`
echo "Installing boilerplate dependencies"
npm --loglevel warn install
npm cache clean
bower --quiet cache clean
bower --quiet install
# use phantomjs by default, because this environment is headless
mv karma/karma-unit.tpl.js karma/karma-unit.tpl.js.bak
sed "s/'Firefox'/'PhantomJS'/" karma/karma-unit.tpl.js.bak > karma/karma-unit.tpl.js
rm karma/karma-unit.tpl.js.bak
# move the compile dir to the web root
mv build.config.js build.config.js.bak
sed "s,compile_dir: 'bin',compile_dir: '#{webDir}'," build.config.js.bak > build.config.js
rm build.config.js.bak
# commit config changes
git commit -am "modify config for kodularjs"
echo "Setting up the first build"
grunt clean html2js jshint coffeelint coffee recess:build concat:build_css copy index:build compile
echo -e '\nNew AngularJS project "#{name}" created:'
echo -e ' Source directory : #{instancesDir}/#{name}'
echo -e ' Web directory : #{webDir}'
echo -e ' Web address : http://#{nickname}.kd.io/kodularjs/#{name}\n'
"""
newFile = FSHelper.createFile
type : 'file'
path : "#{tmpAppDir}/kodularjsScript.sh"
vmName : @vmName
newFile.save kodularjsScript, (err, res)=>
if err then warn err
else
@emit "fs.saveAs.finished", newFile, @
installCmd = "bash #{tmpAppDir}/kodularjsScript.sh && rm -rf #{tmpAppDir}\n"
formData = {timestamp: timestamp, domain: domain, name: name, angularjsversion: angularjsversion}
modal = new ModalViewWithTerminal
title : "Creating AngularJS App: '#{name}'"
width : 700
overlay : no
terminal:
height: 500
command: installCmd
hidden: no
content : """
<div class='modalformline'>
<p>Using AngularJS <strong>#{angularjsversion}</strong></p>
<br>
<i>note: your sudo password is your PI:PASSWORD:<PASSWORD>END_PI. </i>
</div>
"""
@form.buttons.install.hideLoader()
appStorage.fetchValue 'blogs', (blogs)->
blogs or= []
blogs.push formData
appStorage.setValue "blogs", blogs
@emit "AngularJSInstalled", formData
else # there is a folder on the same path so fail.
@form.buttons.install.hideLoader()
@showInstallFail()
pistachio:-> "{{> this.form}}"
|
[
{
"context": "tem with other id', ->\n qItem =\n id: 'qiita2'\n body: \"ccc\"\n title: 'qiita1 updat",
"end": 717,
"score": 0.9949614405632019,
"start": 711,
"tag": "USERNAME",
"value": "qiita2"
},
{
"context": "tem = FactoryDog.build 'qiita-item',\n id: 'qiita1'\n tags: []\n title: 'qiita1 updated'",
"end": 1084,
"score": 0.9897711873054504,
"start": 1078,
"tag": "USERNAME",
"value": "qiita1"
},
{
"context": "future updated_at', ->\n qItem =\n id: 'qiita1'\n body: \"bbb\"\n tags: []\n tit",
"end": 1398,
"score": 0.9957583546638489,
"start": 1392,
"tag": "USERNAME",
"value": "qiita1"
},
{
"context": "m = FactoryDog.build 'qiita-item',\n id: 'qiita1'\n updated_at: basetime\n title: ",
"end": 1995,
"score": 0.9872055053710938,
"start": 1989,
"tag": "USERNAME",
"value": "qiita1"
},
{
"context": "m = FactoryDog.build 'qiita-item',\n id: 'qiita1'\n tag: []\n title: 'qiita1 updat",
"end": 2564,
"score": 0.9781806468963623,
"start": 2558,
"tag": "USERNAME",
"value": "qiita1"
},
{
"context": "->\n localStorage.setItem 'api-token', '--token--'\n localStorage.setItem 'login-id', '--",
"end": 3210,
"score": 0.6788386702537537,
"start": 3205,
"tag": "KEY",
"value": "token"
}
] | application/api/test/commands/initialize/sync-items-test.coffee | CHU-BURA/clone-app-kobito-oss | 215 | require '../../spec-helper'
m = require('moment')
describe 'domains/commands', ->
context '#syncItem', ->
stubDatabases()
basetime = 'Mon Feb 23 2015 15:09:48 GMT+0900 (JST)'
future1 = 'Mon Feb 23 2015 15:10:00 GMT+0900 (JST)'
future2 = 'Mon Feb 23 2015 15:11:00 GMT+0900 (JST)'
beforeEach ->
Team.save(name: 'foo', _id: 'foo', local: false)
.then =>
Item.save(
title: "aaa"
body: "bbb"
tags: [{name: 'a'}]
remote_updated_at: m(basetime).unix()
synced_at: m(basetime).unix()
syncedItemId: 'qiita1'
teamId: 'foo'
)
it 'save as new item with other id', ->
qItem =
id: 'qiita2'
body: "ccc"
title: 'qiita1 updated'
tags: []
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.NEW
it 'pass if remote_updated_at is not changed', ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tags: []
title: 'qiita1 updated'
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
it 'update with future updated_at', ->
qItem =
id: 'qiita1'
body: "bbb"
tags: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.UPDATE
Item.find(itemId)
.then (item) ->
assert item.title is 'qiita1 updated'
it 'pass if only local is changed', ->
Item.first(-> true)
.then (item) ->
item.title = 'local'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
updated_at: basetime
title: 'remote'
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
Item.find itemId
.then (item) ->
assert item.title is 'local'
it 'detect conflict when both local and remote updated', ->
Item.first(-> true)
.then (item) ->
item.title = 'touched'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tag: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.CONFLICT
Item.find itemId
.then (item) ->
assert !!item.conflict_item
context '#syncItems', ->
context 'without token', ->
it 'throw', (done) ->
kobito.commands.sync.syncItems()
.catch -> done()
context 'with databases', ->
context 'with token', ->
stubDatabases()
beforeEach ->
localStorage.setItem 'api-token', '--token--'
localStorage.setItem 'login-id', '--xxx--'
@sinon.stub(kobito.qiita, 'fetchLoginUserItems')
.returns [
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
]
# it 'save results', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
# it 'override same id item', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
| 165254 | require '../../spec-helper'
m = require('moment')
describe 'domains/commands', ->
context '#syncItem', ->
stubDatabases()
basetime = 'Mon Feb 23 2015 15:09:48 GMT+0900 (JST)'
future1 = 'Mon Feb 23 2015 15:10:00 GMT+0900 (JST)'
future2 = 'Mon Feb 23 2015 15:11:00 GMT+0900 (JST)'
beforeEach ->
Team.save(name: 'foo', _id: 'foo', local: false)
.then =>
Item.save(
title: "aaa"
body: "bbb"
tags: [{name: 'a'}]
remote_updated_at: m(basetime).unix()
synced_at: m(basetime).unix()
syncedItemId: 'qiita1'
teamId: 'foo'
)
it 'save as new item with other id', ->
qItem =
id: 'qiita2'
body: "ccc"
title: 'qiita1 updated'
tags: []
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.NEW
it 'pass if remote_updated_at is not changed', ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tags: []
title: 'qiita1 updated'
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
it 'update with future updated_at', ->
qItem =
id: 'qiita1'
body: "bbb"
tags: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.UPDATE
Item.find(itemId)
.then (item) ->
assert item.title is 'qiita1 updated'
it 'pass if only local is changed', ->
Item.first(-> true)
.then (item) ->
item.title = 'local'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
updated_at: basetime
title: 'remote'
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
Item.find itemId
.then (item) ->
assert item.title is 'local'
it 'detect conflict when both local and remote updated', ->
Item.first(-> true)
.then (item) ->
item.title = 'touched'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tag: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.CONFLICT
Item.find itemId
.then (item) ->
assert !!item.conflict_item
context '#syncItems', ->
context 'without token', ->
it 'throw', (done) ->
kobito.commands.sync.syncItems()
.catch -> done()
context 'with databases', ->
context 'with token', ->
stubDatabases()
beforeEach ->
localStorage.setItem 'api-token', '--<KEY>--'
localStorage.setItem 'login-id', '--xxx--'
@sinon.stub(kobito.qiita, 'fetchLoginUserItems')
.returns [
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
]
# it 'save results', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
# it 'override same id item', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
| true | require '../../spec-helper'
m = require('moment')
describe 'domains/commands', ->
context '#syncItem', ->
stubDatabases()
basetime = 'Mon Feb 23 2015 15:09:48 GMT+0900 (JST)'
future1 = 'Mon Feb 23 2015 15:10:00 GMT+0900 (JST)'
future2 = 'Mon Feb 23 2015 15:11:00 GMT+0900 (JST)'
beforeEach ->
Team.save(name: 'foo', _id: 'foo', local: false)
.then =>
Item.save(
title: "aaa"
body: "bbb"
tags: [{name: 'a'}]
remote_updated_at: m(basetime).unix()
synced_at: m(basetime).unix()
syncedItemId: 'qiita1'
teamId: 'foo'
)
it 'save as new item with other id', ->
qItem =
id: 'qiita2'
body: "ccc"
title: 'qiita1 updated'
tags: []
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.NEW
it 'pass if remote_updated_at is not changed', ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tags: []
title: 'qiita1 updated'
updated_at: basetime
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
it 'update with future updated_at', ->
qItem =
id: 'qiita1'
body: "bbb"
tags: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.UPDATE
Item.find(itemId)
.then (item) ->
assert item.title is 'qiita1 updated'
it 'pass if only local is changed', ->
Item.first(-> true)
.then (item) ->
item.title = 'local'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
updated_at: basetime
title: 'remote'
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.PASS
Item.find itemId
.then (item) ->
assert item.title is 'local'
it 'detect conflict when both local and remote updated', ->
Item.first(-> true)
.then (item) ->
item.title = 'touched'
kobito.commands.updateItem(item)
.then ->
qItem = FactoryDog.build 'qiita-item',
id: 'qiita1'
tag: []
title: 'qiita1 updated'
updated_at: future1
kobito.commands.sync.syncItem(qItem, 'foo')
.then ({result, itemId}) ->
assert result is kobito.commands.sync.SyncItemResultStatus.CONFLICT
Item.find itemId
.then (item) ->
assert !!item.conflict_item
context '#syncItems', ->
context 'without token', ->
it 'throw', (done) ->
kobito.commands.sync.syncItems()
.catch -> done()
context 'with databases', ->
context 'with token', ->
stubDatabases()
beforeEach ->
localStorage.setItem 'api-token', '--PI:KEY:<KEY>END_PI--'
localStorage.setItem 'login-id', '--xxx--'
@sinon.stub(kobito.qiita, 'fetchLoginUserItems')
.returns [
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
FactoryDog.build 'qiita-item', tags: []
]
# it 'save results', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
# it 'override same id item', ->
# kobito.commands.sync.syncItems('#foo')
# .then -> kobito.commands.sync.syncItems('#foo')
# .then -> Item.select (i) -> i.teamId is '#foo'
# .then (items) ->
# assert items.length is 3
|
[
{
"context": "username: conn[environment].user\n password: conn[environment].password\n changeLogFile: \"_wo",
"end": 1376,
"score": 0.9736809134483337,
"start": 1372,
"tag": "PASSWORD",
"value": "conn"
},
{
"context": "vironment].user\n password: conn[environment].password\n changeLogFile: \"_workshop/liquibase/#{cha",
"end": 1398,
"score": 0.8125140070915222,
"start": 1390,
"tag": "PASSWORD",
"value": "password"
}
] | src/lib/configuration.coffee | assignittous/knodeo_workshop | 0 | # configuration.coffee
utils = require('aitutils').aitutils
configuration = utils.configuration
cwd = process.env.PWD || process.cwd()
console.log configuration
exports.Configuration = {
forLiquibase: (database, environment, cliParameters, changelogOverride) ->
configuration.load("#{cwd}/config.workshop.cson")
environment = environment || configuration.current.defaults.environment
database = database || configuration.current.defaults.database
console.log "database: " + database
console.log "environment: " + environment
changelog = changelogOverride || database
conn = configuration.current.databases[database]
console.log conn
db_driver = configuration.current.databases[database][environment].driver
driver = configuration.current.databases.drivers[db_driver]
cwd = process.env.PWD || process.cwd()
return {
database: database
environment: environment
sourcePath: "_src/database_models/#{database}.jade"
outputPath: "_workshop/liquibase/#{database}.xml"
cliParameters: cliParameters
runParameters:
driver: driver.class
classpath: "#{driver.classPath.replace(/{{cwd}}/g,cwd)}"
url: "#{driver.baseUrl}#{conn[environment].host}:#{conn[environment].port}/#{conn[environment].database}"
username: conn[environment].user
password: conn[environment].password
changeLogFile: "_workshop/liquibase/#{changelog}.xml"
}
forScriptella: ()->
return {
}
# ## Cloud SErvice related stuff
# configs for a cloud service
forService: (service)->
return @current.cloud[service.replace(/-/g,'_')]
# data directory for a cloud service
dataDirectoryForService: (service)->
return "#{@cwd()}/#{@current.cloud[service.replace(/-/g,'_')].data_path}"
doSlackForService: (service)->
return false
doEmailForService: (service)->
return false
} | 130683 | # configuration.coffee
utils = require('aitutils').aitutils
configuration = utils.configuration
cwd = process.env.PWD || process.cwd()
console.log configuration
exports.Configuration = {
forLiquibase: (database, environment, cliParameters, changelogOverride) ->
configuration.load("#{cwd}/config.workshop.cson")
environment = environment || configuration.current.defaults.environment
database = database || configuration.current.defaults.database
console.log "database: " + database
console.log "environment: " + environment
changelog = changelogOverride || database
conn = configuration.current.databases[database]
console.log conn
db_driver = configuration.current.databases[database][environment].driver
driver = configuration.current.databases.drivers[db_driver]
cwd = process.env.PWD || process.cwd()
return {
database: database
environment: environment
sourcePath: "_src/database_models/#{database}.jade"
outputPath: "_workshop/liquibase/#{database}.xml"
cliParameters: cliParameters
runParameters:
driver: driver.class
classpath: "#{driver.classPath.replace(/{{cwd}}/g,cwd)}"
url: "#{driver.baseUrl}#{conn[environment].host}:#{conn[environment].port}/#{conn[environment].database}"
username: conn[environment].user
password: <PASSWORD>[environment].<PASSWORD>
changeLogFile: "_workshop/liquibase/#{changelog}.xml"
}
forScriptella: ()->
return {
}
# ## Cloud SErvice related stuff
# configs for a cloud service
forService: (service)->
return @current.cloud[service.replace(/-/g,'_')]
# data directory for a cloud service
dataDirectoryForService: (service)->
return "#{@cwd()}/#{@current.cloud[service.replace(/-/g,'_')].data_path}"
doSlackForService: (service)->
return false
doEmailForService: (service)->
return false
} | true | # configuration.coffee
utils = require('aitutils').aitutils
configuration = utils.configuration
cwd = process.env.PWD || process.cwd()
console.log configuration
exports.Configuration = {
forLiquibase: (database, environment, cliParameters, changelogOverride) ->
configuration.load("#{cwd}/config.workshop.cson")
environment = environment || configuration.current.defaults.environment
database = database || configuration.current.defaults.database
console.log "database: " + database
console.log "environment: " + environment
changelog = changelogOverride || database
conn = configuration.current.databases[database]
console.log conn
db_driver = configuration.current.databases[database][environment].driver
driver = configuration.current.databases.drivers[db_driver]
cwd = process.env.PWD || process.cwd()
return {
database: database
environment: environment
sourcePath: "_src/database_models/#{database}.jade"
outputPath: "_workshop/liquibase/#{database}.xml"
cliParameters: cliParameters
runParameters:
driver: driver.class
classpath: "#{driver.classPath.replace(/{{cwd}}/g,cwd)}"
url: "#{driver.baseUrl}#{conn[environment].host}:#{conn[environment].port}/#{conn[environment].database}"
username: conn[environment].user
password: PI:PASSWORD:<PASSWORD>END_PI[environment].PI:PASSWORD:<PASSWORD>END_PI
changeLogFile: "_workshop/liquibase/#{changelog}.xml"
}
forScriptella: ()->
return {
}
# ## Cloud SErvice related stuff
# configs for a cloud service
forService: (service)->
return @current.cloud[service.replace(/-/g,'_')]
# data directory for a cloud service
dataDirectoryForService: (service)->
return "#{@cwd()}/#{@current.cloud[service.replace(/-/g,'_')].data_path}"
doSlackForService: (service)->
return false
doEmailForService: (service)->
return false
} |
[
{
"context": "# -*- coding: utf-8 -*-\n#\n# Copyright 2015 Roy Liu\n#\n# Licensed under the Apache License, Version 2.",
"end": 50,
"score": 0.9996466040611267,
"start": 43,
"tag": "NAME",
"value": "Roy Liu"
}
] | app/assets/javascripts/application.coffee | carsomyr/nurf-stats | 0 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Roy Liu
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
((factory) ->
if typeof define is "function" and define.amd?
define ["ember",
"ember-data",
"twitter/bootstrap",
"application-base",
"components/all",
"controllers/all",
"models/all",
"routes/all",
"templates/all",
"views/all"], factory
).call(@, (Ember, #
DS, #
Bootstrap, #
app, #
AllControllers, #
AllModels, #
AllRoutes, #
AllTemplates, #
AllViews) ->
window.App = app
app.advanceReadiness()
app
)
| 117088 | # -*- coding: utf-8 -*-
#
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
((factory) ->
if typeof define is "function" and define.amd?
define ["ember",
"ember-data",
"twitter/bootstrap",
"application-base",
"components/all",
"controllers/all",
"models/all",
"routes/all",
"templates/all",
"views/all"], factory
).call(@, (Ember, #
DS, #
Bootstrap, #
app, #
AllControllers, #
AllModels, #
AllRoutes, #
AllTemplates, #
AllViews) ->
window.App = app
app.advanceReadiness()
app
)
| true | # -*- coding: utf-8 -*-
#
# Copyright 2015 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
((factory) ->
if typeof define is "function" and define.amd?
define ["ember",
"ember-data",
"twitter/bootstrap",
"application-base",
"components/all",
"controllers/all",
"models/all",
"routes/all",
"templates/all",
"views/all"], factory
).call(@, (Ember, #
DS, #
Bootstrap, #
app, #
AllControllers, #
AllModels, #
AllRoutes, #
AllTemplates, #
AllViews) ->
window.App = app
app.advanceReadiness()
app
)
|
[
{
"context": "mEvents(\n -> Bacon.once({ bacon: Bacon.once(\"sir francis\")}).flatMapWithConcurrencyLimit(1, \".bacon\")\n ",
"end": 896,
"score": 0.9977118372917175,
"start": 885,
"tag": "NAME",
"value": "sir francis"
},
{
"context": ".flatMapWithConcurrencyLimit(1, \".bacon\")\n [\"sir francis\"])\n it \"toString\", ->\n expect(Bacon.once(1).f",
"end": 961,
"score": 0.9985500574111938,
"start": 950,
"tag": "NAME",
"value": "sir francis"
}
] | spec/specs/flatmapwithconcurrencylimit.coffee | ArtFacts/bacon.js | 0 | require("../../src/flatmapwithconcurrencylimit")
Bacon = require("../../src/core").default
expect = require("chai").expect
{
expectStreamEvents,
error,
fromArray,
series,
semiunstable,
t
} = require("../SpecHelper")
describe "EventStream.flatMapWithConcurrencyLimit", ->
describe "limits the number of concurrently active spawned streams by queuing", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMapWithConcurrencyLimit(1, (value) ->
series(t(2), [value, error(), value]))
[1, error(), 1, 2, error(), 2], semiunstable)
describe "works with n=2", ->
expectStreamEvents(
-> series(1, [1,2,3]).flatMapWithConcurrencyLimit(2, (value) ->
series(t(2), [value, value]))
[1, 2, 1, 2, 3, 3], semiunstable)
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMapWithConcurrencyLimit(1, ".bacon")
["sir francis"])
it "toString", ->
expect(Bacon.once(1).flatMapWithConcurrencyLimit(2, ->).toString())
.to.equal("Bacon.once(1).flatMapWithConcurrencyLimit(2,function)")
| 835 | require("../../src/flatmapwithconcurrencylimit")
Bacon = require("../../src/core").default
expect = require("chai").expect
{
expectStreamEvents,
error,
fromArray,
series,
semiunstable,
t
} = require("../SpecHelper")
describe "EventStream.flatMapWithConcurrencyLimit", ->
describe "limits the number of concurrently active spawned streams by queuing", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMapWithConcurrencyLimit(1, (value) ->
series(t(2), [value, error(), value]))
[1, error(), 1, 2, error(), 2], semiunstable)
describe "works with n=2", ->
expectStreamEvents(
-> series(1, [1,2,3]).flatMapWithConcurrencyLimit(2, (value) ->
series(t(2), [value, value]))
[1, 2, 1, 2, 3, 3], semiunstable)
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMapWithConcurrencyLimit(1, ".bacon")
["<NAME>"])
it "toString", ->
expect(Bacon.once(1).flatMapWithConcurrencyLimit(2, ->).toString())
.to.equal("Bacon.once(1).flatMapWithConcurrencyLimit(2,function)")
| true | require("../../src/flatmapwithconcurrencylimit")
Bacon = require("../../src/core").default
expect = require("chai").expect
{
expectStreamEvents,
error,
fromArray,
series,
semiunstable,
t
} = require("../SpecHelper")
describe "EventStream.flatMapWithConcurrencyLimit", ->
describe "limits the number of concurrently active spawned streams by queuing", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMapWithConcurrencyLimit(1, (value) ->
series(t(2), [value, error(), value]))
[1, error(), 1, 2, error(), 2], semiunstable)
describe "works with n=2", ->
expectStreamEvents(
-> series(1, [1,2,3]).flatMapWithConcurrencyLimit(2, (value) ->
series(t(2), [value, value]))
[1, 2, 1, 2, 3, 3], semiunstable)
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMapWithConcurrencyLimit(1, ".bacon")
["PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.once(1).flatMapWithConcurrencyLimit(2, ->).toString())
.to.equal("Bacon.once(1).flatMapWithConcurrencyLimit(2,function)")
|
[
{
"context": "IENT_ID=1234567890 \\\n# READMILL_CLIENT_SECRET=abcdefghijk \\\n# READMILL_CLIENT_CALLBACK=http://localhost",
"end": 852,
"score": 0.9017698168754578,
"start": 841,
"tag": "KEY",
"value": "abcdefghijk"
}
] | proxy.coffee | aron/annotator.readmill.js | 0 | # Crude proxy server for imitating implicit auth on the Readmill API. This
# allows a browser client to authenticate without requiring a local server
# to handle the authentication.
#
# This server requires four environment varibles to be present when running.
#
# PROXY_DOMAIN - Domain that the proxy is hosted on eg. (https://localhost:8080)
# READMILL_CLIENT_ID - Client ID issued by Readmill when creating an app.
# READMILL_CLIENT_SECRET - Secret issued by Readmill when creating an app.
# READMILL_CLIENT_CALLBACK - Full url to the callback.html file on the client
# server.
# PORT - The port to run the app (default: 8000)
#
# Dependancies can be installed by running
#
# $ npm install .
#
# Examples:
#
# $ PORT=8000 \
# PROXY_DOMAIN=http://localhost:8000 \
# READMILL_CLIENT_ID=1234567890 \
# READMILL_CLIENT_SECRET=abcdefghijk \
# READMILL_CLIENT_CALLBACK=http://localhost:8001/callback.html \
# coffee proxy.coffee
uuid = require("node-uuid").v4
http = require "http"
url = require "url"
qs = require "querystring"
ENV = process.env
AUTH_HOST = "readmill.com"
PROXY_DOMAIN = ENV["PROXY_DOMAIN"]
CLIENT_ID = ENV["READMILL_CLIENT_ID"]
CLIENT_SECRET = ENV["READMILL_CLIENT_SECRET"]
CLIENT_CALLBACK = ENV["READMILL_CLIENT_CALLBACK"]
throw "Requires PROXY_DOMAIN environment variable" unless PROXY_DOMAIN
throw "Requires READMILL_CLIENT_ID environment variable" unless CLIENT_ID
throw "Requires READMILL_CLIENT_SECRET environment variable" unless CLIENT_SECRET
throw "Requires READMILL_CLIENT_CALLBACK environment variable" unless CLIENT_CALLBACK
callbacks = {}
decorateWithCORS = (res) ->
headers =
"Access-Control-Allow-Origin": "*"
"Access-Control-Allow-Methods": "HEAD, GET, POST, PUT, DELETE"
"Access-Control-Max-Age": 60 * 60
"Access-Control-Allow-Credentials": false
"Access-Control-Allow-Headers": "Origin, Content-Type, Accept, Authorization"
"Access-Control-Expose-Headers": "Location, Content-Type, Expires"
res.setHeader(key, value) for own key, value of headers
res
authCallback = (req, res) ->
{query:{code, error, callback_id}} = url.parse req.url, true
redirect = callbacks[callback_id]
delete callbacks[callback_id]
respond = (hash) ->
parts = url.parse redirect, true
parts.hash = hash
res.writeHead 303, "Location": url.format(parts)
res.end()
return respond qs.stringify(error: "proxy-error") unless redirect
return respond qs.stringify(error: error) if error
query =
grant_type: "authorization_code"
client_id: CLIENT_ID
client_secret: CLIENT_SECRET
redirect_uri: "#{PROXY_DOMAIN}/callback?callback_id=#{callback_id}"
code: code
queryString = qs.stringify(query)
options =
host: AUTH_HOST
path: "/oauth/token"
method: "POST"
headers:
"Content-Length": queryString.length,
"Content-Type": "application/x-www-form-urlencoded"
clientRequest = http.request options, (response) ->
body = ""
response.on "data", (data) ->
body += data
response.on "end", ->
json = JSON.parse body
respond qs.stringify(json)
clientRequest.on "error", (err) ->
respond qs.stringify(error: "proxy-error")
clientRequest.end(queryString)
authorize = (req, res) ->
{query, pathname} = url.parse req.url, true
# Fail early if callback uri is invalid.
unless CLIENT_CALLBACK.split("?")[0] is query["redirect_uri"].split("?")[0]
res.writeHead 400
res.end()
return
id = uuid()
callbacks[id] = query.redirect_uri
query.redirect_uri = "#{PROXY_DOMAIN}/callback?callback_id=#{id}"
query.scope = "non-expiring"
location = url.format
host: AUTH_HOST
query: query
pathname: pathname
res.writeHead 303, "Location": location
res.end()
server = http.createServer (req, res) ->
parsed = url.parse req.url
if req.method.toLowerCase() == "options"
res.setHeader("Content-Length", 0)
decorateWithCORS(res).end()
else if parsed.pathname.indexOf("/oauth/authorize") is 0
authorize req, res
else if parsed.pathname.indexOf("/callback") is 0
authCallback req, res
server.listen ENV["PORT"] or 8000
| 164501 | # Crude proxy server for imitating implicit auth on the Readmill API. This
# allows a browser client to authenticate without requiring a local server
# to handle the authentication.
#
# This server requires four environment varibles to be present when running.
#
# PROXY_DOMAIN - Domain that the proxy is hosted on eg. (https://localhost:8080)
# READMILL_CLIENT_ID - Client ID issued by Readmill when creating an app.
# READMILL_CLIENT_SECRET - Secret issued by Readmill when creating an app.
# READMILL_CLIENT_CALLBACK - Full url to the callback.html file on the client
# server.
# PORT - The port to run the app (default: 8000)
#
# Dependancies can be installed by running
#
# $ npm install .
#
# Examples:
#
# $ PORT=8000 \
# PROXY_DOMAIN=http://localhost:8000 \
# READMILL_CLIENT_ID=1234567890 \
# READMILL_CLIENT_SECRET=<KEY> \
# READMILL_CLIENT_CALLBACK=http://localhost:8001/callback.html \
# coffee proxy.coffee
uuid = require("node-uuid").v4
http = require "http"
url = require "url"
qs = require "querystring"
ENV = process.env
AUTH_HOST = "readmill.com"
PROXY_DOMAIN = ENV["PROXY_DOMAIN"]
CLIENT_ID = ENV["READMILL_CLIENT_ID"]
CLIENT_SECRET = ENV["READMILL_CLIENT_SECRET"]
CLIENT_CALLBACK = ENV["READMILL_CLIENT_CALLBACK"]
throw "Requires PROXY_DOMAIN environment variable" unless PROXY_DOMAIN
throw "Requires READMILL_CLIENT_ID environment variable" unless CLIENT_ID
throw "Requires READMILL_CLIENT_SECRET environment variable" unless CLIENT_SECRET
throw "Requires READMILL_CLIENT_CALLBACK environment variable" unless CLIENT_CALLBACK
callbacks = {}
decorateWithCORS = (res) ->
headers =
"Access-Control-Allow-Origin": "*"
"Access-Control-Allow-Methods": "HEAD, GET, POST, PUT, DELETE"
"Access-Control-Max-Age": 60 * 60
"Access-Control-Allow-Credentials": false
"Access-Control-Allow-Headers": "Origin, Content-Type, Accept, Authorization"
"Access-Control-Expose-Headers": "Location, Content-Type, Expires"
res.setHeader(key, value) for own key, value of headers
res
authCallback = (req, res) ->
{query:{code, error, callback_id}} = url.parse req.url, true
redirect = callbacks[callback_id]
delete callbacks[callback_id]
respond = (hash) ->
parts = url.parse redirect, true
parts.hash = hash
res.writeHead 303, "Location": url.format(parts)
res.end()
return respond qs.stringify(error: "proxy-error") unless redirect
return respond qs.stringify(error: error) if error
query =
grant_type: "authorization_code"
client_id: CLIENT_ID
client_secret: CLIENT_SECRET
redirect_uri: "#{PROXY_DOMAIN}/callback?callback_id=#{callback_id}"
code: code
queryString = qs.stringify(query)
options =
host: AUTH_HOST
path: "/oauth/token"
method: "POST"
headers:
"Content-Length": queryString.length,
"Content-Type": "application/x-www-form-urlencoded"
clientRequest = http.request options, (response) ->
body = ""
response.on "data", (data) ->
body += data
response.on "end", ->
json = JSON.parse body
respond qs.stringify(json)
clientRequest.on "error", (err) ->
respond qs.stringify(error: "proxy-error")
clientRequest.end(queryString)
authorize = (req, res) ->
{query, pathname} = url.parse req.url, true
# Fail early if callback uri is invalid.
unless CLIENT_CALLBACK.split("?")[0] is query["redirect_uri"].split("?")[0]
res.writeHead 400
res.end()
return
id = uuid()
callbacks[id] = query.redirect_uri
query.redirect_uri = "#{PROXY_DOMAIN}/callback?callback_id=#{id}"
query.scope = "non-expiring"
location = url.format
host: AUTH_HOST
query: query
pathname: pathname
res.writeHead 303, "Location": location
res.end()
server = http.createServer (req, res) ->
parsed = url.parse req.url
if req.method.toLowerCase() == "options"
res.setHeader("Content-Length", 0)
decorateWithCORS(res).end()
else if parsed.pathname.indexOf("/oauth/authorize") is 0
authorize req, res
else if parsed.pathname.indexOf("/callback") is 0
authCallback req, res
server.listen ENV["PORT"] or 8000
| true | # Crude proxy server for imitating implicit auth on the Readmill API. This
# allows a browser client to authenticate without requiring a local server
# to handle the authentication.
#
# This server requires four environment varibles to be present when running.
#
# PROXY_DOMAIN - Domain that the proxy is hosted on eg. (https://localhost:8080)
# READMILL_CLIENT_ID - Client ID issued by Readmill when creating an app.
# READMILL_CLIENT_SECRET - Secret issued by Readmill when creating an app.
# READMILL_CLIENT_CALLBACK - Full url to the callback.html file on the client
# server.
# PORT - The port to run the app (default: 8000)
#
# Dependancies can be installed by running
#
# $ npm install .
#
# Examples:
#
# $ PORT=8000 \
# PROXY_DOMAIN=http://localhost:8000 \
# READMILL_CLIENT_ID=1234567890 \
# READMILL_CLIENT_SECRET=PI:KEY:<KEY>END_PI \
# READMILL_CLIENT_CALLBACK=http://localhost:8001/callback.html \
# coffee proxy.coffee
uuid = require("node-uuid").v4
http = require "http"
url = require "url"
qs = require "querystring"
ENV = process.env
AUTH_HOST = "readmill.com"
PROXY_DOMAIN = ENV["PROXY_DOMAIN"]
CLIENT_ID = ENV["READMILL_CLIENT_ID"]
CLIENT_SECRET = ENV["READMILL_CLIENT_SECRET"]
CLIENT_CALLBACK = ENV["READMILL_CLIENT_CALLBACK"]
throw "Requires PROXY_DOMAIN environment variable" unless PROXY_DOMAIN
throw "Requires READMILL_CLIENT_ID environment variable" unless CLIENT_ID
throw "Requires READMILL_CLIENT_SECRET environment variable" unless CLIENT_SECRET
throw "Requires READMILL_CLIENT_CALLBACK environment variable" unless CLIENT_CALLBACK
callbacks = {}
decorateWithCORS = (res) ->
headers =
"Access-Control-Allow-Origin": "*"
"Access-Control-Allow-Methods": "HEAD, GET, POST, PUT, DELETE"
"Access-Control-Max-Age": 60 * 60
"Access-Control-Allow-Credentials": false
"Access-Control-Allow-Headers": "Origin, Content-Type, Accept, Authorization"
"Access-Control-Expose-Headers": "Location, Content-Type, Expires"
res.setHeader(key, value) for own key, value of headers
res
authCallback = (req, res) ->
{query:{code, error, callback_id}} = url.parse req.url, true
redirect = callbacks[callback_id]
delete callbacks[callback_id]
respond = (hash) ->
parts = url.parse redirect, true
parts.hash = hash
res.writeHead 303, "Location": url.format(parts)
res.end()
return respond qs.stringify(error: "proxy-error") unless redirect
return respond qs.stringify(error: error) if error
query =
grant_type: "authorization_code"
client_id: CLIENT_ID
client_secret: CLIENT_SECRET
redirect_uri: "#{PROXY_DOMAIN}/callback?callback_id=#{callback_id}"
code: code
queryString = qs.stringify(query)
options =
host: AUTH_HOST
path: "/oauth/token"
method: "POST"
headers:
"Content-Length": queryString.length,
"Content-Type": "application/x-www-form-urlencoded"
clientRequest = http.request options, (response) ->
body = ""
response.on "data", (data) ->
body += data
response.on "end", ->
json = JSON.parse body
respond qs.stringify(json)
clientRequest.on "error", (err) ->
respond qs.stringify(error: "proxy-error")
clientRequest.end(queryString)
authorize = (req, res) ->
{query, pathname} = url.parse req.url, true
# Fail early if callback uri is invalid.
unless CLIENT_CALLBACK.split("?")[0] is query["redirect_uri"].split("?")[0]
res.writeHead 400
res.end()
return
id = uuid()
callbacks[id] = query.redirect_uri
query.redirect_uri = "#{PROXY_DOMAIN}/callback?callback_id=#{id}"
query.scope = "non-expiring"
location = url.format
host: AUTH_HOST
query: query
pathname: pathname
res.writeHead 303, "Location": location
res.end()
server = http.createServer (req, res) ->
parsed = url.parse req.url
if req.method.toLowerCase() == "options"
res.setHeader("Content-Length", 0)
decorateWithCORS(res).end()
else if parsed.pathname.indexOf("/oauth/authorize") is 0
authorize req, res
else if parsed.pathname.indexOf("/callback") is 0
authCallback req, res
server.listen ENV["PORT"] or 8000
|
[
{
"context": "t for reverse engineering the protocol goes to\n @moosd - https://github.com/moosd/ReverseEngineeredMiLig",
"end": 110,
"score": 0.9995291829109192,
"start": 104,
"tag": "USERNAME",
"value": "@moosd"
},
{
"context": " protocol goes to\n @moosd - https://github.com/moosd/ReverseEngineeredMiLightBluetooth ###\n\nasync = re",
"end": 137,
"score": 0.9995818734169006,
"start": 132,
"tag": "USERNAME",
"value": "moosd"
},
{
"context": " async.series [\n (callback) => @send [4, 4, kelvin, 255, 0, 0, 0], callback\n (callback) => @sen",
"end": 1403,
"score": 0.5978935360908508,
"start": 1397,
"tag": "NAME",
"value": "kelvin"
}
] | src/bulb.coffee | jnordberg/milight-ble | 5 | ### Milight Bluetooth bulb control library. All credit for reverse engineering the protocol goes to
@moosd - https://github.com/moosd/ReverseEngineeredMiLightBluetooth ###
async = require 'async'
noble = require 'noble'
noop = ->
getId = (name) ->
num = (parseInt name[1...3], 16) & 0xff
num = num << 8
num = num | ((parseInt name[3...5], 16) & 0xff)
return [num >> 8, num % 256]
createPacket = (input) ->
k = input[0]
i = j = 0
while i <= 10
j += input[i++] & 0xff
checksum = (((k ^ j) & 0xff) + 131) & 0xff
xored = input.map (v) -> (v & 0xff) ^ k
rv = []
for o, i in [0, 16, 24, 1, 129, 55, 169, 87, 35, 70, 23, 0]
if xored[i]?
rv.push xored[i] + o & 0xff
rv[0] = k
rv.push checksum
return Buffer.from rv
class Bulb
constructor: (@name, @tx) ->
@id = getId @name
send: (command, callback) ->
rbit = Math.round Math.random() * 255
data = createPacket [rbit, 161, @id[0], @id[1], command...]
@tx.write data, false, callback
turnOn: (callback=noop) ->
### Turn the lamp on. ###
@send [2, 1, 0, 0, 0, 0, 0], callback
turnOff: (callback=noop) ->
### Turn the lamp off. ###
@send [2, 2, 0, 0, 0, 0, 0], callback
setTemperature: (kelvin, brightness, callback=noop) ->
### Set color temperature to *kelvin* 0-100 with *brightness* 0-100. ###
async.series [
(callback) => @send [4, 4, kelvin, 255, 0, 0, 0], callback
(callback) => @send [4, 5, kelvin, brightness, 0, 0, 0], callback
], callback
setColor: (hue, brightness, callback=noop) ->
### Set color to *hue* 0-255 with *brightness* 0-100. ###
async.series [
(callback) => @send [2, 4, hue, 100, 0, 0, 0], callback
(callback) => @send [2, 5, hue, brightness, 0, 0, 0], callback
], callback
setMode: (mode, callback=noop) ->
### Set lamp to disco *mode* 0-10. ###
@send [6, 4, mode, 0, 0, 0, 0], callback
Bulb.fromPeripheral = (peripheral, callback) ->
### Create a new Bulb instance from *peripheral*. ###
connect = (callback) ->
peripheral.connect callback
discover = (callback) ->
peripheral.discoverSomeServicesAndCharacteristics ['1000'], ['1001'], callback
setup = (services, characteristics, callback) ->
name = peripheral.advertisement.localName.trim()
bulb = new Bulb name, characteristics[0]
callback null, bulb
async.waterfall [connect, discover, setup], callback
Bulb.discover = (timeout, callback) ->
### Discover bulbs, if they don't show up unpair them in the app and try increasing the *timeout*. ###
if arguments.length is 1
callback = timeout
timeout = 2000
bulbs = []
peripherals = []
ensurePoweredOn = (callback) ->
if noble.state is 'poweredOn'
do callback
else
noble.once 'stateChange', (state) ->
unless state is 'poweredOn'
error = new Error "Invalid bluetooth state: #{ state }"
callback error
startScanning = (callback) ->
noble.on 'discover', onDiscover
noble.startScanning ['1000'], false, callback
onDiscover = (peripheral) ->
if peripheral.advertisement?.localName?[0] is 'M'
peripherals.push peripheral
waitForResults = (callback) ->
done = ->
noble.stopScanning()
do callback
setTimeout done, timeout
resolvePeripherals = (callback) ->
async.map peripherals, Bulb.fromPeripheral, (error, results) ->
unless error
bulbs = results
callback error
async.series [
ensurePoweredOn
startScanning
waitForResults
resolvePeripherals
], (error) ->
noble.removeListener 'discover', onDiscover
callback error, bulbs
module.exports = Bulb
| 12057 | ### Milight Bluetooth bulb control library. All credit for reverse engineering the protocol goes to
@moosd - https://github.com/moosd/ReverseEngineeredMiLightBluetooth ###
async = require 'async'
noble = require 'noble'
noop = ->
getId = (name) ->
num = (parseInt name[1...3], 16) & 0xff
num = num << 8
num = num | ((parseInt name[3...5], 16) & 0xff)
return [num >> 8, num % 256]
createPacket = (input) ->
k = input[0]
i = j = 0
while i <= 10
j += input[i++] & 0xff
checksum = (((k ^ j) & 0xff) + 131) & 0xff
xored = input.map (v) -> (v & 0xff) ^ k
rv = []
for o, i in [0, 16, 24, 1, 129, 55, 169, 87, 35, 70, 23, 0]
if xored[i]?
rv.push xored[i] + o & 0xff
rv[0] = k
rv.push checksum
return Buffer.from rv
class Bulb
constructor: (@name, @tx) ->
@id = getId @name
send: (command, callback) ->
rbit = Math.round Math.random() * 255
data = createPacket [rbit, 161, @id[0], @id[1], command...]
@tx.write data, false, callback
turnOn: (callback=noop) ->
### Turn the lamp on. ###
@send [2, 1, 0, 0, 0, 0, 0], callback
turnOff: (callback=noop) ->
### Turn the lamp off. ###
@send [2, 2, 0, 0, 0, 0, 0], callback
setTemperature: (kelvin, brightness, callback=noop) ->
### Set color temperature to *kelvin* 0-100 with *brightness* 0-100. ###
async.series [
(callback) => @send [4, 4, <NAME>, 255, 0, 0, 0], callback
(callback) => @send [4, 5, kelvin, brightness, 0, 0, 0], callback
], callback
setColor: (hue, brightness, callback=noop) ->
### Set color to *hue* 0-255 with *brightness* 0-100. ###
async.series [
(callback) => @send [2, 4, hue, 100, 0, 0, 0], callback
(callback) => @send [2, 5, hue, brightness, 0, 0, 0], callback
], callback
setMode: (mode, callback=noop) ->
### Set lamp to disco *mode* 0-10. ###
@send [6, 4, mode, 0, 0, 0, 0], callback
Bulb.fromPeripheral = (peripheral, callback) ->
### Create a new Bulb instance from *peripheral*. ###
connect = (callback) ->
peripheral.connect callback
discover = (callback) ->
peripheral.discoverSomeServicesAndCharacteristics ['1000'], ['1001'], callback
setup = (services, characteristics, callback) ->
name = peripheral.advertisement.localName.trim()
bulb = new Bulb name, characteristics[0]
callback null, bulb
async.waterfall [connect, discover, setup], callback
Bulb.discover = (timeout, callback) ->
### Discover bulbs, if they don't show up unpair them in the app and try increasing the *timeout*. ###
if arguments.length is 1
callback = timeout
timeout = 2000
bulbs = []
peripherals = []
ensurePoweredOn = (callback) ->
if noble.state is 'poweredOn'
do callback
else
noble.once 'stateChange', (state) ->
unless state is 'poweredOn'
error = new Error "Invalid bluetooth state: #{ state }"
callback error
startScanning = (callback) ->
noble.on 'discover', onDiscover
noble.startScanning ['1000'], false, callback
onDiscover = (peripheral) ->
if peripheral.advertisement?.localName?[0] is 'M'
peripherals.push peripheral
waitForResults = (callback) ->
done = ->
noble.stopScanning()
do callback
setTimeout done, timeout
resolvePeripherals = (callback) ->
async.map peripherals, Bulb.fromPeripheral, (error, results) ->
unless error
bulbs = results
callback error
async.series [
ensurePoweredOn
startScanning
waitForResults
resolvePeripherals
], (error) ->
noble.removeListener 'discover', onDiscover
callback error, bulbs
module.exports = Bulb
| true | ### Milight Bluetooth bulb control library. All credit for reverse engineering the protocol goes to
@moosd - https://github.com/moosd/ReverseEngineeredMiLightBluetooth ###
async = require 'async'
noble = require 'noble'
noop = ->
getId = (name) ->
num = (parseInt name[1...3], 16) & 0xff
num = num << 8
num = num | ((parseInt name[3...5], 16) & 0xff)
return [num >> 8, num % 256]
createPacket = (input) ->
k = input[0]
i = j = 0
while i <= 10
j += input[i++] & 0xff
checksum = (((k ^ j) & 0xff) + 131) & 0xff
xored = input.map (v) -> (v & 0xff) ^ k
rv = []
for o, i in [0, 16, 24, 1, 129, 55, 169, 87, 35, 70, 23, 0]
if xored[i]?
rv.push xored[i] + o & 0xff
rv[0] = k
rv.push checksum
return Buffer.from rv
class Bulb
constructor: (@name, @tx) ->
@id = getId @name
send: (command, callback) ->
rbit = Math.round Math.random() * 255
data = createPacket [rbit, 161, @id[0], @id[1], command...]
@tx.write data, false, callback
turnOn: (callback=noop) ->
### Turn the lamp on. ###
@send [2, 1, 0, 0, 0, 0, 0], callback
turnOff: (callback=noop) ->
### Turn the lamp off. ###
@send [2, 2, 0, 0, 0, 0, 0], callback
setTemperature: (kelvin, brightness, callback=noop) ->
### Set color temperature to *kelvin* 0-100 with *brightness* 0-100. ###
async.series [
(callback) => @send [4, 4, PI:NAME:<NAME>END_PI, 255, 0, 0, 0], callback
(callback) => @send [4, 5, kelvin, brightness, 0, 0, 0], callback
], callback
setColor: (hue, brightness, callback=noop) ->
### Set color to *hue* 0-255 with *brightness* 0-100. ###
async.series [
(callback) => @send [2, 4, hue, 100, 0, 0, 0], callback
(callback) => @send [2, 5, hue, brightness, 0, 0, 0], callback
], callback
setMode: (mode, callback=noop) ->
### Set lamp to disco *mode* 0-10. ###
@send [6, 4, mode, 0, 0, 0, 0], callback
Bulb.fromPeripheral = (peripheral, callback) ->
### Create a new Bulb instance from *peripheral*. ###
connect = (callback) ->
peripheral.connect callback
discover = (callback) ->
peripheral.discoverSomeServicesAndCharacteristics ['1000'], ['1001'], callback
setup = (services, characteristics, callback) ->
name = peripheral.advertisement.localName.trim()
bulb = new Bulb name, characteristics[0]
callback null, bulb
async.waterfall [connect, discover, setup], callback
Bulb.discover = (timeout, callback) ->
### Discover bulbs, if they don't show up unpair them in the app and try increasing the *timeout*. ###
if arguments.length is 1
callback = timeout
timeout = 2000
bulbs = []
peripherals = []
ensurePoweredOn = (callback) ->
if noble.state is 'poweredOn'
do callback
else
noble.once 'stateChange', (state) ->
unless state is 'poweredOn'
error = new Error "Invalid bluetooth state: #{ state }"
callback error
startScanning = (callback) ->
noble.on 'discover', onDiscover
noble.startScanning ['1000'], false, callback
onDiscover = (peripheral) ->
if peripheral.advertisement?.localName?[0] is 'M'
peripherals.push peripheral
waitForResults = (callback) ->
done = ->
noble.stopScanning()
do callback
setTimeout done, timeout
resolvePeripherals = (callback) ->
async.map peripherals, Bulb.fromPeripheral, (error, results) ->
unless error
bulbs = results
callback error
async.series [
ensurePoweredOn
startScanning
waitForResults
resolvePeripherals
], (error) ->
noble.removeListener 'discover', onDiscover
callback error, bulbs
module.exports = Bulb
|
[
{
"context": "\"Sayfalar\"\n \"posts\": \"Yazılar\"\n \"authors\": \"Yazarlar\"\n \"search\": \"Ara\"\n \"bookmarks\": \"Favoriler\"",
"end": 390,
"score": 0.9996938109397888,
"start": 382,
"tag": "NAME",
"value": "Yazarlar"
},
{
"context": " \"shared\": \"Paylaşıldı!\"\n\"authors\":\n \"title\": \"Yazarlar\"\n\"author\":\n \"title\": \"Yazar: {{name}}\"\n\"pages\"",
"end": 1350,
"score": 0.9997397065162659,
"start": 1342,
"tag": "NAME",
"value": "Yazarlar"
}
] | lib/translations/tr.cson | RoeperSchool/lukeproject | 0 | "pullToRefresh": "Yenilemek için çekin!"
"retry": "Tekrar Dene"
"back": "Geri"
"error": "Bir şeyler ters gitti, lütfen tekrar deneyin."
"attemptToConnect": "Tekrar Bağlanılıyor"
"yes": "Evet"
"no" : "Hayır"
"emptyList" : "Burada bir şey yok!"
"menu":
"title": "Menü"
"home": "Ana Sayfa"
"tags": "Etiketler"
"pages": "Sayfalar"
"posts": "Yazılar"
"authors": "Yazarlar"
"search": "Ara"
"bookmarks": "Favoriler"
"socialNetworks": "Sosyal Medya"
"categories": "Kategoriler"
"settings": "Ayarlar"
"customPosts": "Özel Gönderiler"
"customTaxonomy": "Özel Taksonomi"
"pushNotifications":
"newContent":
"title": "Yeni Yazı Yayımlandı!"
"text": "Yeni Yazı/Sayfa: '{{postTitle}}' yayımlandı, açmak ister misiniz?"
"bookmark":
"title": "Favoriler"
"emptyList" : "Henüz favori yazı yok!"
"bookmarked" : "Favorilere eklendi!"
"removed" : "Favorilerden kaldırıldı!"
"tags":
"title": "Etiketler"
"tag":
"title": "Etiket: {{name}}"
"customTaxonomy":
"title": "{{term}}: {{name}}"
"categories":
"title": "Kategoriler"
"category":
"title": "Kategori: {{name}}"
"home":
"title": "Ana sayfa"
"search":
"inputPlaceholder": "Ara"
"title": "Ara"
"titleQuery": "Ara: {{query}}"
"sharing":
"shared": "Paylaşıldı!"
"authors":
"title": "Yazarlar"
"author":
"title": "Yazar: {{name}}"
"pages":
"title": "Sayfalar"
"posts":
"title": "Yazılar"
"featured": "Öne çıkan"
"post":
"comments": "Yorumlar"
"openInBrowser": "Tarayıcıda aç"
"about":
"title": "Hakkımızda"
"params":
"title": "Ayarlar"
"offlineMode": "Çevrimdışı mod (yakında)"
"fontSize": "Yazı Boyutu"
"language": "Dil"
"languages":
"en": "İngilizce"
"tr": "Türkçe"
"fr": "Fransızca"
"zh": "Çince"
"es": "İspanyolca"
"pl": "Lehçe (Polonezce)"
"de": "Almanca"
"pt": "Portekizce"
"it": "İtalyanca"
"nl": "Felemenkçe"
"ru": "Rusça"
"fontSize":
"small": "Küçük"
"medium": "Orta"
"large": "Büyük"
"x-large": "XBüyük"
"xx-large": "XXBüyük"
| 122967 | "pullToRefresh": "Yenilemek için çekin!"
"retry": "Tekrar Dene"
"back": "Geri"
"error": "Bir şeyler ters gitti, lütfen tekrar deneyin."
"attemptToConnect": "Tekrar Bağlanılıyor"
"yes": "Evet"
"no" : "Hayır"
"emptyList" : "Burada bir şey yok!"
"menu":
"title": "Menü"
"home": "Ana Sayfa"
"tags": "Etiketler"
"pages": "Sayfalar"
"posts": "Yazılar"
"authors": "<NAME>"
"search": "Ara"
"bookmarks": "Favoriler"
"socialNetworks": "Sosyal Medya"
"categories": "Kategoriler"
"settings": "Ayarlar"
"customPosts": "Özel Gönderiler"
"customTaxonomy": "Özel Taksonomi"
"pushNotifications":
"newContent":
"title": "Yeni Yazı Yayımlandı!"
"text": "Yeni Yazı/Sayfa: '{{postTitle}}' yayımlandı, açmak ister misiniz?"
"bookmark":
"title": "Favoriler"
"emptyList" : "Henüz favori yazı yok!"
"bookmarked" : "Favorilere eklendi!"
"removed" : "Favorilerden kaldırıldı!"
"tags":
"title": "Etiketler"
"tag":
"title": "Etiket: {{name}}"
"customTaxonomy":
"title": "{{term}}: {{name}}"
"categories":
"title": "Kategoriler"
"category":
"title": "Kategori: {{name}}"
"home":
"title": "Ana sayfa"
"search":
"inputPlaceholder": "Ara"
"title": "Ara"
"titleQuery": "Ara: {{query}}"
"sharing":
"shared": "Paylaşıldı!"
"authors":
"title": "<NAME>"
"author":
"title": "Yazar: {{name}}"
"pages":
"title": "Sayfalar"
"posts":
"title": "Yazılar"
"featured": "Öne çıkan"
"post":
"comments": "Yorumlar"
"openInBrowser": "Tarayıcıda aç"
"about":
"title": "Hakkımızda"
"params":
"title": "Ayarlar"
"offlineMode": "Çevrimdışı mod (yakında)"
"fontSize": "Yazı Boyutu"
"language": "Dil"
"languages":
"en": "İngilizce"
"tr": "Türkçe"
"fr": "Fransızca"
"zh": "Çince"
"es": "İspanyolca"
"pl": "Lehçe (Polonezce)"
"de": "Almanca"
"pt": "Portekizce"
"it": "İtalyanca"
"nl": "Felemenkçe"
"ru": "Rusça"
"fontSize":
"small": "Küçük"
"medium": "Orta"
"large": "Büyük"
"x-large": "XBüyük"
"xx-large": "XXBüyük"
| true | "pullToRefresh": "Yenilemek için çekin!"
"retry": "Tekrar Dene"
"back": "Geri"
"error": "Bir şeyler ters gitti, lütfen tekrar deneyin."
"attemptToConnect": "Tekrar Bağlanılıyor"
"yes": "Evet"
"no" : "Hayır"
"emptyList" : "Burada bir şey yok!"
"menu":
"title": "Menü"
"home": "Ana Sayfa"
"tags": "Etiketler"
"pages": "Sayfalar"
"posts": "Yazılar"
"authors": "PI:NAME:<NAME>END_PI"
"search": "Ara"
"bookmarks": "Favoriler"
"socialNetworks": "Sosyal Medya"
"categories": "Kategoriler"
"settings": "Ayarlar"
"customPosts": "Özel Gönderiler"
"customTaxonomy": "Özel Taksonomi"
"pushNotifications":
"newContent":
"title": "Yeni Yazı Yayımlandı!"
"text": "Yeni Yazı/Sayfa: '{{postTitle}}' yayımlandı, açmak ister misiniz?"
"bookmark":
"title": "Favoriler"
"emptyList" : "Henüz favori yazı yok!"
"bookmarked" : "Favorilere eklendi!"
"removed" : "Favorilerden kaldırıldı!"
"tags":
"title": "Etiketler"
"tag":
"title": "Etiket: {{name}}"
"customTaxonomy":
"title": "{{term}}: {{name}}"
"categories":
"title": "Kategoriler"
"category":
"title": "Kategori: {{name}}"
"home":
"title": "Ana sayfa"
"search":
"inputPlaceholder": "Ara"
"title": "Ara"
"titleQuery": "Ara: {{query}}"
"sharing":
"shared": "Paylaşıldı!"
"authors":
"title": "PI:NAME:<NAME>END_PI"
"author":
"title": "Yazar: {{name}}"
"pages":
"title": "Sayfalar"
"posts":
"title": "Yazılar"
"featured": "Öne çıkan"
"post":
"comments": "Yorumlar"
"openInBrowser": "Tarayıcıda aç"
"about":
"title": "Hakkımızda"
"params":
"title": "Ayarlar"
"offlineMode": "Çevrimdışı mod (yakında)"
"fontSize": "Yazı Boyutu"
"language": "Dil"
"languages":
"en": "İngilizce"
"tr": "Türkçe"
"fr": "Fransızca"
"zh": "Çince"
"es": "İspanyolca"
"pl": "Lehçe (Polonezce)"
"de": "Almanca"
"pt": "Portekizce"
"it": "İtalyanca"
"nl": "Felemenkçe"
"ru": "Rusça"
"fontSize":
"small": "Küçük"
"medium": "Orta"
"large": "Büyük"
"x-large": "XBüyük"
"xx-large": "XXBüyük"
|
[
{
"context": ">\n activeClass = 'active'\n editorKey = 'comment'\n\n #重设编辑器的大小\n resizeCommentEditor = ()-",
"end": 2013,
"score": 0.9444513916969299,
"start": 2006,
"tag": "KEY",
"value": "comment"
}
] | src/js/comment/comment-directives.coffee | Kiteam/kiteam-angular | 0 | define [
'../ng-module'
'../utils'
'./views/comment-all.html'
'../../package/highlight/highlight.pack'
], (_module,_utils, _template, _hljs) ->
_module.directiveModule
#评论列表
.directive('commentList', ['$rootScope', '$stateParams', 'API', '$timeout', ($rootScope, $stateParams, API, $timeout)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-list', _template
link: (scope, element, attr)->
searchComment = (pageIndex, cb)->
API.project($stateParams.project_id)
.issue($stateParams.issue_id)
.comment()
.retrieve({
pageSize: 20
pageIndex: pageIndex
})
.then((result)->
scope.comments = result
$timeout(_hljs.initHighlighting,200)
)
#收到重新加载评论列表的事件
scope.$on 'comment:list:reload', -> searchComment(1,null)
$rootScope.$on 'pagination:change',(event, page, uuid, cb)->
return if uuid isnt 'comment_list'
searchComment(page)
searchComment()
])
#评论详细
.directive('commentCell', ['$stateParams', 'API', 'NOTIFY', ($stateParams, API, NOTIFY)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-cell', _template
link: (scope, element, attr)->
scope.onClickEdit = (event, comment)->
return
scope.onClickDelete = (event, comment)->
return if not confirm('您确定要删除这个评论么,删除将无法恢复')
API.project($stateParams.project_id).issue($stateParams.issue_id)
.comment(comment.id).delete().then ->
NOTIFY.success '删除评论成功'
element.fadeOut()
return
])
#评论的编辑框
.directive('commentEditor', ['$stateParams', '$timeout', 'API', ($stateParams, $timeout, API)->
restrict: 'E'
replace: true
scope: {}
template: _utils.extractTemplate '#tmpl-comment-editor', _template
link: (scope, element, attrs)->
activeClass = 'active'
editorKey = 'comment'
#重设编辑器的大小
resizeCommentEditor = ()->
element.css('width', element.parent().css('width')) if element.parent()
#focus后,弹出大的编辑器
scope.onFocusEditor = ()->
issue_id = $stateParams.issue_id || null
element.addClass activeClass
scope.$broadcast 'editor:content', editorKey, issue_id, null, attrs.uploadUrl
#绑定body的one事件,点击任何地方隐藏当前
$('body').one 'click', -> scope.$broadcast 'editor:will:cancel', editorKey
return true
scope.$on 'editor:cancel', (event, name)->
return if editorKey isnt name
element.removeClass activeClass
scope.$on 'editor:submit', (event, name, data)->
return if editorKey isnt name
element.removeClass activeClass
#阻止click的冒泡
element.bind 'click', (e)-> e.stopPropagation()
#调整编辑器的大小
$(window).on 'onResizeEx', resizeCommentEditor
scope.$on '$destroy', -> $(window).off 'onResizeEx', resizeCommentEditor
#立即监控resize会有问题,暂时用这种方式解决,未来需要调整
$timeout(resizeCommentEditor, 1000)
]) | 68980 | define [
'../ng-module'
'../utils'
'./views/comment-all.html'
'../../package/highlight/highlight.pack'
], (_module,_utils, _template, _hljs) ->
_module.directiveModule
#评论列表
.directive('commentList', ['$rootScope', '$stateParams', 'API', '$timeout', ($rootScope, $stateParams, API, $timeout)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-list', _template
link: (scope, element, attr)->
searchComment = (pageIndex, cb)->
API.project($stateParams.project_id)
.issue($stateParams.issue_id)
.comment()
.retrieve({
pageSize: 20
pageIndex: pageIndex
})
.then((result)->
scope.comments = result
$timeout(_hljs.initHighlighting,200)
)
#收到重新加载评论列表的事件
scope.$on 'comment:list:reload', -> searchComment(1,null)
$rootScope.$on 'pagination:change',(event, page, uuid, cb)->
return if uuid isnt 'comment_list'
searchComment(page)
searchComment()
])
#评论详细
.directive('commentCell', ['$stateParams', 'API', 'NOTIFY', ($stateParams, API, NOTIFY)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-cell', _template
link: (scope, element, attr)->
scope.onClickEdit = (event, comment)->
return
scope.onClickDelete = (event, comment)->
return if not confirm('您确定要删除这个评论么,删除将无法恢复')
API.project($stateParams.project_id).issue($stateParams.issue_id)
.comment(comment.id).delete().then ->
NOTIFY.success '删除评论成功'
element.fadeOut()
return
])
#评论的编辑框
.directive('commentEditor', ['$stateParams', '$timeout', 'API', ($stateParams, $timeout, API)->
restrict: 'E'
replace: true
scope: {}
template: _utils.extractTemplate '#tmpl-comment-editor', _template
link: (scope, element, attrs)->
activeClass = 'active'
editorKey = '<KEY>'
#重设编辑器的大小
resizeCommentEditor = ()->
element.css('width', element.parent().css('width')) if element.parent()
#focus后,弹出大的编辑器
scope.onFocusEditor = ()->
issue_id = $stateParams.issue_id || null
element.addClass activeClass
scope.$broadcast 'editor:content', editorKey, issue_id, null, attrs.uploadUrl
#绑定body的one事件,点击任何地方隐藏当前
$('body').one 'click', -> scope.$broadcast 'editor:will:cancel', editorKey
return true
scope.$on 'editor:cancel', (event, name)->
return if editorKey isnt name
element.removeClass activeClass
scope.$on 'editor:submit', (event, name, data)->
return if editorKey isnt name
element.removeClass activeClass
#阻止click的冒泡
element.bind 'click', (e)-> e.stopPropagation()
#调整编辑器的大小
$(window).on 'onResizeEx', resizeCommentEditor
scope.$on '$destroy', -> $(window).off 'onResizeEx', resizeCommentEditor
#立即监控resize会有问题,暂时用这种方式解决,未来需要调整
$timeout(resizeCommentEditor, 1000)
]) | true | define [
'../ng-module'
'../utils'
'./views/comment-all.html'
'../../package/highlight/highlight.pack'
], (_module,_utils, _template, _hljs) ->
_module.directiveModule
#评论列表
.directive('commentList', ['$rootScope', '$stateParams', 'API', '$timeout', ($rootScope, $stateParams, API, $timeout)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-list', _template
link: (scope, element, attr)->
searchComment = (pageIndex, cb)->
API.project($stateParams.project_id)
.issue($stateParams.issue_id)
.comment()
.retrieve({
pageSize: 20
pageIndex: pageIndex
})
.then((result)->
scope.comments = result
$timeout(_hljs.initHighlighting,200)
)
#收到重新加载评论列表的事件
scope.$on 'comment:list:reload', -> searchComment(1,null)
$rootScope.$on 'pagination:change',(event, page, uuid, cb)->
return if uuid isnt 'comment_list'
searchComment(page)
searchComment()
])
#评论详细
.directive('commentCell', ['$stateParams', 'API', 'NOTIFY', ($stateParams, API, NOTIFY)->
restrict: 'E'
scope: data: '='
replace: true
template: _utils.extractTemplate '#tmpl-comment-cell', _template
link: (scope, element, attr)->
scope.onClickEdit = (event, comment)->
return
scope.onClickDelete = (event, comment)->
return if not confirm('您确定要删除这个评论么,删除将无法恢复')
API.project($stateParams.project_id).issue($stateParams.issue_id)
.comment(comment.id).delete().then ->
NOTIFY.success '删除评论成功'
element.fadeOut()
return
])
#评论的编辑框
.directive('commentEditor', ['$stateParams', '$timeout', 'API', ($stateParams, $timeout, API)->
restrict: 'E'
replace: true
scope: {}
template: _utils.extractTemplate '#tmpl-comment-editor', _template
link: (scope, element, attrs)->
activeClass = 'active'
editorKey = 'PI:KEY:<KEY>END_PI'
#重设编辑器的大小
resizeCommentEditor = ()->
element.css('width', element.parent().css('width')) if element.parent()
#focus后,弹出大的编辑器
scope.onFocusEditor = ()->
issue_id = $stateParams.issue_id || null
element.addClass activeClass
scope.$broadcast 'editor:content', editorKey, issue_id, null, attrs.uploadUrl
#绑定body的one事件,点击任何地方隐藏当前
$('body').one 'click', -> scope.$broadcast 'editor:will:cancel', editorKey
return true
scope.$on 'editor:cancel', (event, name)->
return if editorKey isnt name
element.removeClass activeClass
scope.$on 'editor:submit', (event, name, data)->
return if editorKey isnt name
element.removeClass activeClass
#阻止click的冒泡
element.bind 'click', (e)-> e.stopPropagation()
#调整编辑器的大小
$(window).on 'onResizeEx', resizeCommentEditor
scope.$on '$destroy', -> $(window).off 'onResizeEx', resizeCommentEditor
#立即监控resize会有问题,暂时用这种方式解决,未来需要调整
$timeout(resizeCommentEditor, 1000)
]) |
[
{
"context": "ored on user', (done) ->\n user = new User(salt: 'sodiumchloride')\n user.save( (err, theUser) ->\n return done(",
"end": 239,
"score": 0.8863686323165894,
"start": 225,
"tag": "PASSWORD",
"value": "sodiumchloride"
},
{
"context": "password\", (done) ->\n user = new User(password: \"password\")\n\n user.save( (err, savedUser) ->\n return do",
"end": 1102,
"score": 0.9990542531013489,
"start": 1094,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "done(err) if err?\n\n savedUser.isValidPassword('password').then( (isValid) ->\n assert.isTrue(\n ",
"end": 1208,
"score": 0.9840291142463684,
"start": 1200,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "rue(\n isValid,\n \"Expected password 'password' for user to be valid\"\n )\n\n done()\n ",
"end": 1304,
"score": 0.8337424397468567,
"start": 1296,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "password\", (done) ->\n user = new User(password: \"password\")\n\n user.save( (err, savedUser) ->\n return do",
"end": 1505,
"score": 0.9937509894371033,
"start": 1497,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "done(err) if err?\n\n savedUser.isValidPassword('hats').then( (isValid) ->\n assert.isFalse(\n ",
"end": 1607,
"score": 0.8320217132568359,
"start": 1603,
"tag": "PASSWORD",
"value": "hats"
},
{
"context": "e saving\", (done) ->\n user = new User(password: \"password\")\n\n user.save( (err, savedUser) ->\n return do",
"end": 1875,
"score": 0.8100616335868835,
"start": 1867,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "pers.createUser(\n email: \"hats\"\n password: \"boats\"\n ).then( (user) ->\n authenticationCallback =",
"end": 2966,
"score": 0.9993705749511719,
"start": 2961,
"tag": "PASSWORD",
"value": "boats"
},
{
"context": "pers.createUser(\n email: \"hats\"\n password: \"boats\"\n ).then( (user) ->\n callback = (err, user) -",
"end": 3390,
"score": 0.9993557929992676,
"start": 3385,
"tag": "PASSWORD",
"value": "boats"
}
] | server/test/units/user.coffee | unepwcmc/NRT | 0 | assert = require('chai').assert
Q = require('q')
User = require('../../models/user').model
helpers = require '../helpers'
suite('User')
test('Password salt values can be stored on user', (done) ->
user = new User(salt: 'sodiumchloride')
user.save( (err, theUser) ->
return done(err) if err?
assert.strictEqual(
theUser.salt,
'sodiumchloride',
"Expected user's salt to be sodiumchloride"
)
done()
)
)
test('.canEdit resolves when given a page whose parent is owned by the user', (done) ->
theOwner = theIndicator = thePage = null
helpers.createUser().then((user) ->
theOwner = user
Q.nfcall(
helpers.createIndicator,
owner: theOwner
)
).then((indicator) ->
theIndicator = indicator
helpers.createPage(
parent_id: indicator._id
parent_type: "Indicator"
)
).then((page) ->
thePage = page
theOwner.canEdit(page)
).then(->
done()
).catch(done)
)
test(".isValidPassword returns false if bcrypt(password)
does not match stored password", (done) ->
user = new User(password: "password")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('password').then( (isValid) ->
assert.isTrue(
isValid,
"Expected password 'password' for user to be valid"
)
done()
).catch(done)
)
)
test(".isValidPassword returns true if bcrypt(password)
matches stored password", (done) ->
user = new User(password: "password")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('hats').then( (isValid) ->
assert.isFalse(
isValid,
"Expected password 'hats' for user to be invalid"
)
done()
).catch(done)
)
)
test(".save hashes the user's password before saving", (done) ->
user = new User(password: "password")
user.save( (err, savedUser) ->
return done(err) if err?
assert.notStrictEqual(
"password",
savedUser.password,
"Expected User's saved password to not match the plaintext"
)
done()
)
)
test('.save with a distinguishedName', (done) ->
dn = "CN=The Queen,OU=Royalty"
user = new User(distinguishedName: dn)
user.save( (err, savedUser) ->
return done(err) if err?
assert.strictEqual(
dn,
savedUser.distinguishedName,
"Expected user to have a distinguished name"
)
done()
)
)
test(".isLDAPAccount should return true if the account has a distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".isLDAPAccount should return false if the account does not have a
distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".loginFromLocalDb succeeds if the user's password is correct", (done) ->
helpers.createUser(
email: "hats"
password: "boats"
).then( (user) ->
authenticationCallback = (err, user) ->
assert.ok user, "Expected user to be returned when authentication successful"
assert.strictEqual "hats", user.email
done()
user.loginFromLocalDb("boats", authenticationCallback)
).catch(done)
)
test(".loginFromLocalDb fails if the user's password is incorrect", (done) ->
helpers.createUser(
email: "hats"
password: "boats"
).then( (user) ->
callback = (err, user) ->
assert.notOk user, "Expected user to not be returned when authentication fails"
done()
user.loginFromLocalDb("ships", callback)
).catch(done)
)
test("Usernames must be unique", (done) ->
helpers.createUser().then( (user) ->
helpers.createUser()
).then( ->
done(new Error("Expected duplicate user creation to fail"))
).catch( (err) ->
assert.match err, /duplicate key error index/
done()
)
)
| 119831 | assert = require('chai').assert
Q = require('q')
User = require('../../models/user').model
helpers = require '../helpers'
suite('User')
test('Password salt values can be stored on user', (done) ->
user = new User(salt: '<PASSWORD>')
user.save( (err, theUser) ->
return done(err) if err?
assert.strictEqual(
theUser.salt,
'sodiumchloride',
"Expected user's salt to be sodiumchloride"
)
done()
)
)
test('.canEdit resolves when given a page whose parent is owned by the user', (done) ->
theOwner = theIndicator = thePage = null
helpers.createUser().then((user) ->
theOwner = user
Q.nfcall(
helpers.createIndicator,
owner: theOwner
)
).then((indicator) ->
theIndicator = indicator
helpers.createPage(
parent_id: indicator._id
parent_type: "Indicator"
)
).then((page) ->
thePage = page
theOwner.canEdit(page)
).then(->
done()
).catch(done)
)
test(".isValidPassword returns false if bcrypt(password)
does not match stored password", (done) ->
user = new User(password: "<PASSWORD>")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('<PASSWORD>').then( (isValid) ->
assert.isTrue(
isValid,
"Expected password '<PASSWORD>' for user to be valid"
)
done()
).catch(done)
)
)
test(".isValidPassword returns true if bcrypt(password)
matches stored password", (done) ->
user = new User(password: "<PASSWORD>")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('<PASSWORD>').then( (isValid) ->
assert.isFalse(
isValid,
"Expected password 'hats' for user to be invalid"
)
done()
).catch(done)
)
)
test(".save hashes the user's password before saving", (done) ->
user = new User(password: "<PASSWORD>")
user.save( (err, savedUser) ->
return done(err) if err?
assert.notStrictEqual(
"password",
savedUser.password,
"Expected User's saved password to not match the plaintext"
)
done()
)
)
test('.save with a distinguishedName', (done) ->
dn = "CN=The Queen,OU=Royalty"
user = new User(distinguishedName: dn)
user.save( (err, savedUser) ->
return done(err) if err?
assert.strictEqual(
dn,
savedUser.distinguishedName,
"Expected user to have a distinguished name"
)
done()
)
)
test(".isLDAPAccount should return true if the account has a distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".isLDAPAccount should return false if the account does not have a
distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".loginFromLocalDb succeeds if the user's password is correct", (done) ->
helpers.createUser(
email: "hats"
password: "<PASSWORD>"
).then( (user) ->
authenticationCallback = (err, user) ->
assert.ok user, "Expected user to be returned when authentication successful"
assert.strictEqual "hats", user.email
done()
user.loginFromLocalDb("boats", authenticationCallback)
).catch(done)
)
test(".loginFromLocalDb fails if the user's password is incorrect", (done) ->
helpers.createUser(
email: "hats"
password: "<PASSWORD>"
).then( (user) ->
callback = (err, user) ->
assert.notOk user, "Expected user to not be returned when authentication fails"
done()
user.loginFromLocalDb("ships", callback)
).catch(done)
)
test("Usernames must be unique", (done) ->
helpers.createUser().then( (user) ->
helpers.createUser()
).then( ->
done(new Error("Expected duplicate user creation to fail"))
).catch( (err) ->
assert.match err, /duplicate key error index/
done()
)
)
| true | assert = require('chai').assert
Q = require('q')
User = require('../../models/user').model
helpers = require '../helpers'
suite('User')
test('Password salt values can be stored on user', (done) ->
user = new User(salt: 'PI:PASSWORD:<PASSWORD>END_PI')
user.save( (err, theUser) ->
return done(err) if err?
assert.strictEqual(
theUser.salt,
'sodiumchloride',
"Expected user's salt to be sodiumchloride"
)
done()
)
)
test('.canEdit resolves when given a page whose parent is owned by the user', (done) ->
theOwner = theIndicator = thePage = null
helpers.createUser().then((user) ->
theOwner = user
Q.nfcall(
helpers.createIndicator,
owner: theOwner
)
).then((indicator) ->
theIndicator = indicator
helpers.createPage(
parent_id: indicator._id
parent_type: "Indicator"
)
).then((page) ->
thePage = page
theOwner.canEdit(page)
).then(->
done()
).catch(done)
)
test(".isValidPassword returns false if bcrypt(password)
does not match stored password", (done) ->
user = new User(password: "PI:PASSWORD:<PASSWORD>END_PI")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('PI:PASSWORD:<PASSWORD>END_PI').then( (isValid) ->
assert.isTrue(
isValid,
"Expected password 'PI:PASSWORD:<PASSWORD>END_PI' for user to be valid"
)
done()
).catch(done)
)
)
test(".isValidPassword returns true if bcrypt(password)
matches stored password", (done) ->
user = new User(password: "PI:PASSWORD:<PASSWORD>END_PI")
user.save( (err, savedUser) ->
return done(err) if err?
savedUser.isValidPassword('PI:PASSWORD:<PASSWORD>END_PI').then( (isValid) ->
assert.isFalse(
isValid,
"Expected password 'hats' for user to be invalid"
)
done()
).catch(done)
)
)
test(".save hashes the user's password before saving", (done) ->
user = new User(password: "PI:PASSWORD:<PASSWORD>END_PI")
user.save( (err, savedUser) ->
return done(err) if err?
assert.notStrictEqual(
"password",
savedUser.password,
"Expected User's saved password to not match the plaintext"
)
done()
)
)
test('.save with a distinguishedName', (done) ->
dn = "CN=The Queen,OU=Royalty"
user = new User(distinguishedName: dn)
user.save( (err, savedUser) ->
return done(err) if err?
assert.strictEqual(
dn,
savedUser.distinguishedName,
"Expected user to have a distinguished name"
)
done()
)
)
test(".isLDAPAccount should return true if the account has a distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".isLDAPAccount should return false if the account does not have a
distinguished name", ->
user = new User(distinguishedName: "CN=The Queen,OU=Royalty")
assert.isTrue user.isLDAPAccount()
)
test(".loginFromLocalDb succeeds if the user's password is correct", (done) ->
helpers.createUser(
email: "hats"
password: "PI:PASSWORD:<PASSWORD>END_PI"
).then( (user) ->
authenticationCallback = (err, user) ->
assert.ok user, "Expected user to be returned when authentication successful"
assert.strictEqual "hats", user.email
done()
user.loginFromLocalDb("boats", authenticationCallback)
).catch(done)
)
test(".loginFromLocalDb fails if the user's password is incorrect", (done) ->
helpers.createUser(
email: "hats"
password: "PI:PASSWORD:<PASSWORD>END_PI"
).then( (user) ->
callback = (err, user) ->
assert.notOk user, "Expected user to not be returned when authentication fails"
done()
user.loginFromLocalDb("ships", callback)
).catch(done)
)
test("Usernames must be unique", (done) ->
helpers.createUser().then( (user) ->
helpers.createUser()
).then( ->
done(new Error("Expected duplicate user creation to fail"))
).catch( (err) ->
assert.match err, /duplicate key error index/
done()
)
)
|
[
{
"context": "###\n# @author Will Steinmetz\n# Express middleware to provide basic respond_to ",
"end": 28,
"score": 0.9998465776443481,
"start": 14,
"tag": "NAME",
"value": "Will Steinmetz"
},
{
"context": "ar to that\n# of Ruby on Rails\n# Copyright (c)2016, Will Steinmetz\n# Licensed under the MIT license.\n###\n\nmodule.exp",
"end": 162,
"score": 0.9998070597648621,
"start": 148,
"tag": "NAME",
"value": "Will Steinmetz"
}
] | Gruntfile.coffee | willsteinmetz/express-respondsto | 1 | ###
# @author Will Steinmetz
# Express middleware to provide basic respond_to functionality similar to that
# of Ruby on Rails
# Copyright (c)2016, Will Steinmetz
# Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
coffee:
compile:
options:
sourceMap: true
bare: true
files:
'index.js': 'src/index.coffee'
watch:
coffee:
files: [
'src/*.coffee'
]
tasks: [ 'coffee', 'simplemocha' ]
options:
spawn: false
simplemocha:
all:
src: [ 'test/**/*.coffee', 'text/**/*.js' ]
options:
compilers: 'coffee:coffee-script/register'
)
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-simple-mocha'
grunt.registerTask 'default', [
'coffee'
'simplemocha'
'watch'
]
| 154452 | ###
# @author <NAME>
# Express middleware to provide basic respond_to functionality similar to that
# of Ruby on Rails
# Copyright (c)2016, <NAME>
# Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
coffee:
compile:
options:
sourceMap: true
bare: true
files:
'index.js': 'src/index.coffee'
watch:
coffee:
files: [
'src/*.coffee'
]
tasks: [ 'coffee', 'simplemocha' ]
options:
spawn: false
simplemocha:
all:
src: [ 'test/**/*.coffee', 'text/**/*.js' ]
options:
compilers: 'coffee:coffee-script/register'
)
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-simple-mocha'
grunt.registerTask 'default', [
'coffee'
'simplemocha'
'watch'
]
| true | ###
# @author PI:NAME:<NAME>END_PI
# Express middleware to provide basic respond_to functionality similar to that
# of Ruby on Rails
# Copyright (c)2016, PI:NAME:<NAME>END_PI
# Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
coffee:
compile:
options:
sourceMap: true
bare: true
files:
'index.js': 'src/index.coffee'
watch:
coffee:
files: [
'src/*.coffee'
]
tasks: [ 'coffee', 'simplemocha' ]
options:
spawn: false
simplemocha:
all:
src: [ 'test/**/*.coffee', 'text/**/*.js' ]
options:
compilers: 'coffee:coffee-script/register'
)
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-simple-mocha'
grunt.registerTask 'default', [
'coffee'
'simplemocha'
'watch'
]
|
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998899698257446,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino Garcia <jespin",
"end": 52,
"score": 0.9999305009841919,
"start": 40,
"tag": "EMAIL",
"value": "niwi@niwi.be"
},
{
"context": " Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>\n# Copyright (C) 2014 David B",
"end": 94,
"score": 0.9998807311058044,
"start": 75,
"tag": "NAME",
"value": "Jesús Espino Garcia"
},
{
"context": "iwi.be>\n# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>\n# Copyright (C) 2014 David Barragán Merino <bame",
"end": 114,
"score": 0.9999350905418396,
"start": 96,
"tag": "EMAIL",
"value": "jespinog@gmail.com"
},
{
"context": "o Garcia <jespinog@gmail.com>\n# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>\n#\n# This program is free s",
"end": 158,
"score": 0.9998841881752014,
"start": 137,
"tag": "NAME",
"value": "David Barragán Merino"
},
{
"context": ".com>\n# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>\n#\n# This program is free software: you can redis",
"end": 180,
"score": 0.999935507774353,
"start": 160,
"tag": "EMAIL",
"value": "bameda@dbarragan.com"
}
] | public/taiga-front/app/coffee/modules/user-settings/change-password.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/user-settings/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
debounce = @.taiga.debounce
module = angular.module("taigaUserSettings")
#############################################################################
## User ChangePassword Controller
#############################################################################
class UserChangePasswordController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$tgAuth"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @auth) ->
@scope.sectionName = "Change Password" #i18n
@scope.project = {}
@scope.user = @auth.getUser()
promise = @.loadInitialData()
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
module.controller("UserChangePasswordController", UserChangePasswordController)
#############################################################################
## User ChangePassword Directive
#############################################################################
UserChangePasswordDirective = ($rs, $confirm, $loading) ->
link = ($scope, $el, $attrs, ctrl) ->
submit = debounce 2000, (event) =>
event.preventDefault()
if $scope.newPassword1 != $scope.newPassword2
$confirm.notify('error', "The passwords dosn't match")
return
$loading.start(submitButton)
promise = $rs.userSettings.changePassword($scope.currentPassword, $scope.newPassword1)
promise.then =>
$loading.finish(submitButton)
$confirm.notify('success')
promise.then null, (response) =>
$loading.finish(submitButton)
$confirm.notify('error', response.data._error_message)
submitButton = $el.find(".submit-button")
$el.on "submit", "form", submit
$el.on "click", ".submit-button", submit
$scope.$on "$destroy", ->
$el.off()
return {
link:link
}
module.directive("tgUserChangePassword", ["$tgResources", "$tgConfirm", "$tgLoading", UserChangePasswordDirective])
| 200241 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/user-settings/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
debounce = @.taiga.debounce
module = angular.module("taigaUserSettings")
#############################################################################
## User ChangePassword Controller
#############################################################################
class UserChangePasswordController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$tgAuth"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @auth) ->
@scope.sectionName = "Change Password" #i18n
@scope.project = {}
@scope.user = @auth.getUser()
promise = @.loadInitialData()
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
module.controller("UserChangePasswordController", UserChangePasswordController)
#############################################################################
## User ChangePassword Directive
#############################################################################
UserChangePasswordDirective = ($rs, $confirm, $loading) ->
link = ($scope, $el, $attrs, ctrl) ->
submit = debounce 2000, (event) =>
event.preventDefault()
if $scope.newPassword1 != $scope.newPassword2
$confirm.notify('error', "The passwords dosn't match")
return
$loading.start(submitButton)
promise = $rs.userSettings.changePassword($scope.currentPassword, $scope.newPassword1)
promise.then =>
$loading.finish(submitButton)
$confirm.notify('success')
promise.then null, (response) =>
$loading.finish(submitButton)
$confirm.notify('error', response.data._error_message)
submitButton = $el.find(".submit-button")
$el.on "submit", "form", submit
$el.on "click", ".submit-button", submit
$scope.$on "$destroy", ->
$el.off()
return {
link:link
}
module.directive("tgUserChangePassword", ["$tgResources", "$tgConfirm", "$tgLoading", UserChangePasswordDirective])
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/user-settings/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
debounce = @.taiga.debounce
module = angular.module("taigaUserSettings")
#############################################################################
## User ChangePassword Controller
#############################################################################
class UserChangePasswordController extends mixOf(taiga.Controller, taiga.PageMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$tgNavUrls",
"$tgAuth"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location, @navUrls, @auth) ->
@scope.sectionName = "Change Password" #i18n
@scope.project = {}
@scope.user = @auth.getUser()
promise = @.loadInitialData()
promise.then null, @.onInitialDataError.bind(@)
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.$emit('project:loaded', project)
return project
loadInitialData: ->
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
return data
return promise.then(=> @.loadProject())
module.controller("UserChangePasswordController", UserChangePasswordController)
#############################################################################
## User ChangePassword Directive
#############################################################################
UserChangePasswordDirective = ($rs, $confirm, $loading) ->
link = ($scope, $el, $attrs, ctrl) ->
submit = debounce 2000, (event) =>
event.preventDefault()
if $scope.newPassword1 != $scope.newPassword2
$confirm.notify('error', "The passwords dosn't match")
return
$loading.start(submitButton)
promise = $rs.userSettings.changePassword($scope.currentPassword, $scope.newPassword1)
promise.then =>
$loading.finish(submitButton)
$confirm.notify('success')
promise.then null, (response) =>
$loading.finish(submitButton)
$confirm.notify('error', response.data._error_message)
submitButton = $el.find(".submit-button")
$el.on "submit", "form", submit
$el.on "click", ".submit-button", submit
$scope.$on "$destroy", ->
$el.off()
return {
link:link
}
module.directive("tgUserChangePassword", ["$tgResources", "$tgConfirm", "$tgLoading", UserChangePasswordDirective])
|
[
{
"context": "g 'Beyond magical.'.green\n\tconsole.log 'Created by Etienne Pinchon (@etiennepinchon)'.green\n\tconsole.log 'Copyright ",
"end": 3408,
"score": 0.9998621940612793,
"start": 3393,
"tag": "NAME",
"value": "Etienne Pinchon"
},
{
"context": "l.'.green\n\tconsole.log 'Created by Etienne Pinchon (@etiennepinchon)'.green\n\tconsole.log 'Copyright ©2016'.green\n\tcon",
"end": 3425,
"score": 0.999352216720581,
"start": 3409,
"tag": "USERNAME",
"value": "(@etiennepinchon"
},
{
"context": "restify.createServer\n\t\tname: 'server'\n\t\tversion: '1.0.0'\n\n\tserver.use restify.acceptParser(server.accepta",
"end": 6700,
"score": 0.9830590486526489,
"start": 6695,
"tag": "IP_ADDRESS",
"value": "1.0.0"
},
{
"context": "', ->\n\t\t\tif message\n\t\t\t\turl = server.url.replace('127.0.0.1', 'localhost')\n\t\t\t\t\n\t\t\t\tif not isProd\n\t\t\t\t\tconsol",
"end": 7378,
"score": 0.9995718002319336,
"start": 7369,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
}
] | index.coffee | etiennepinchon/magixjs-npm | 0 | #!/usr/bin/env node
console.error = ->
##############################################################
# REQUIRE
prompt = require 'prompt'
program = require('commander')
colors = require('colors')
fs = require('fs-extra')
walk = require('walk')
path = require('path')
watcher = require('node-watch')
Generate = require('./lib/Generate')
CoffeeScript = require('coffee-script')
uglify = require('uglify-js')
restify = require('restify')
CookieParser = require('restify-cookies')
mkdirp = require('mkdirp')
openurl = require('openurl')
reload = require('reload')
##############################################################
# CONFIG
prompt.message = 'MagiX'
reloadServer = undefined
##############################################################
# HELPERS
Array::move = (old_index, new_index) ->
if new_index >= @length
k = new_index - (@length)
while k-- + 1
@push undefined
@splice new_index, 0, @splice(old_index, 1)[0]
this
reorderFiles = (files)->
new_files = JSON.parse(JSON.stringify(files));
no_match = []
for item of files
index = -1
for item2 of new_files
if new_files[item2].name is files[item].extends
index = item2
if index > -1
if index > item
new_files = new_files.move(index, item)
else
no_match.push files[item].id
for id in no_match
if id isnt undefined
for item of new_files
if new_files[item].id is id
new_files.move(item, 0)
return new_files
capitalizeFirstLetter = (string) ->
string.charAt(0).toUpperCase() + string.slice(1)
getDirectories = (srcpath) ->
fs.readdirSync(srcpath).filter (file) ->
fs.statSync(path.join(srcpath, file)).isDirectory()
# Indent text
indent = (str, numOfIndents, opt_spacesPerIndent) ->
str = str.replace(/^(?=.)/gm, new Array(numOfIndents + 1).join('\t'));
numOfIndents = new Array(opt_spacesPerIndent + 1 or 0).join(' ')
# re-use
if opt_spacesPerIndent then str.replace(/^\t+/g, ((tabs) ->
tabs.replace /./g, numOfIndents
)) else str
deleteFolderRecursive = (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) ->
curPath = path + '/' + file
if fs.lstatSync(curPath).isDirectory()
# recurse
deleteFolderRecursive curPath
else
# delete file
fs.unlinkSync curPath
return
fs.rmdirSync path
return
makeID = ->
text = ''
possible = 'abcdefghijklmnopqrstuvwxyz0123456789'
i = 0
while i < 8
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
##############################################################
# PROGRAMS FUNCTIONS
about = ->
console.log '\n'
console.log ' d8bY88b d88P '
console.log ' Y8P Y88b d88P '
console.log ' Y88o88P '
console.log '88888b.d88b. 8888b. .d88b. 888 Y888P '
console.log '888 "888 "88b "88bd88P"88b888 d888b '
console.log '888 888 888.d888888888 888888 d88888b '
console.log '888 888 888888 888Y88b 888888 d88P Y88b '
console.log '888 888 888"Y888888 "Y88888888d88P Y88b '
console.log ' 888 '
console.log ' Y8b d88P '
console.log ' "Y88P" '
console.log 'MagiX | magixjs.com'.green
console.log 'Beyond magical.'.green
console.log 'Created by Etienne Pinchon (@etiennepinchon)'.green
console.log 'Copyright ©2016'.green
console.log '\n'
console.log 'Usage:'.green
console.log '* create [name] | Create a new project.'
console.log '* launch [dir] [port] | Create a new project.'
console.log '* build [dir] [env] | Build a project.'
console.log '* clean [dir] | Clear the build of a project.'
console.log '* watch [dir] | Observe change on a project and compile on the fly.'
console.log '\n'
return
create = (name) ->
if not name or not /^[a-zA-Z0-9\_\s\-]{1,100}$/.test name
console.log 'MagiX: [ERR] Name must be only letters, numbers, underscores, spaces or dashes.'.red
return
dir_project = './' + name
if not fs.existsSync(dir_project)
fs.mkdirSync(dir_project)
# Generate App content in both JS and CS
appJS = Generate.JS()
appCS = Generate.CS()
done = ->
process.chdir(dir_project)
console.log 'MagiX: Project created successfully.'.green
createJSON = ->
packageFile =
name: name
version: '0.1.0'
description: ''
tags: ''
created_at: new Date
fs.writeFile dir_project + '/package.json', JSON.stringify(packageFile, null, 2), (err) ->
return console.log(err) if err
done()
createIndexJS = ->
indexJS = Generate.indexJS()
fs.writeFile dir_project + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
createCatalog()
createCatalog = ->
catalog = Generate.catalog()
fs.writeFile dir_project + '/build/catalog.js', catalog, (err) ->
return console.log(err) if err
createJSON()
# Folders and files generation
dirBuild = dir_project + '/build'
dirSources = dir_project + '/documents'
fs.mkdirSync dir_project if not fs.existsSync(dir_project)
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
fs.mkdirSync dirSources if not fs.existsSync(dirSources)
htmlContent = Generate.HTML(name, '', '', no)
appNameCS = '/App.coffee'
appJS = Generate.appRunJS indent(appJS, 1)
# Write HTML content
fs.writeFile dir_project + '/index.html', htmlContent, (err) ->
return console.log(err) if err
# Write BUILD content
fs.writeFile dirBuild + '/App.js', appJS, (err) ->
return console.log(err) if err
# Write SOURCES content
fs.writeFile dirSources + appNameCS, appCS, (err) ->
return console.log(err) if err
createIndexJS()
return
return
return
return
launch = (dir, server_port, env)->
######################################################
# PROD CHECK
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else if prod.indexOf(server_port) > -1
isProd = yes
server_port = undefined
else
isProd = yes if prod.indexOf(env) > -1
######################################################
if not dir
dir = undefined
i = parseInt(dir)
if Number.isInteger(i) and i > 0
server_port = dir
dir = undefined
dir = process.cwd() if not dir
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
# If dir ends with / remove it
if dir.endsWith('/')
dir = dir.slice(0, -1)
server_port = 9000 if not server_port
######################################################
# SERVER
server = restify.createServer
name: 'server'
version: '1.0.0'
server.use restify.acceptParser(server.acceptable)
server.use restify.queryParser()
server.use restify.bodyParser()
server.use CookieParser.parse
server.use restify.gzipResponse()
server.get /^\/build\/?.*/, restify.serveStatic directory: dir
server.get /^\/documents\/?.*/, restify.serveStatic directory: dir
server.get /\/?/, (req, res, next) ->
res.writeHead 200
fs.createReadStream(dir + '/index.html').pipe res
next()
# Reload server
if not isProd
reloadServer = reload(server, server, no)
server.__port = server_port
server.start = (message)->
server.listen server.__port, 'localhost', ->
if message
url = server.url.replace('127.0.0.1', 'localhost')
if not isProd
console.log(('MagiX: Project launched! Running! Address ' + url).green)
openurl.open(url)
else
console.log(('MagiX: Project launched in Production mode! Running! Address ' + url).green)
return
server.start(yes)
if not isProd
if fs.existsSync(dir + '/documents')
watch(dir, server)
build = (dir, env) ->
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else
isProd = yes if prod.indexOf(env) > -1
dir = '.' if not dir
return if not dirCheck dir
files = []
startA = +new Date()
# Walker options
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
filename = stat.name
if filename.endsWith('.coffee')
files.push root + '/' + filename
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
compileFile name, root, next
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
file = root + '/' + filename
file_build = file.replace('documents', 'build')
files.push file
console.log ('MagiX: Copy ' + filename).magenta
fs.copy file, file_build, (err) ->
return console.error(err) if err
next()
else
next()
return
walker.on 'end', ->
buildAutoImport(dir)
endA = +new Date()
console.log "MagiX: Done: #{files.length} files built in #{(endA-startA)} ms.".green
# Build production project
buildProduction(dir) if isProd
return
watch = (dir, server) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Now observing changes in your project..'.green
watcher dir + '/documents', (filename) ->
# Fire server-side reload event
if reloadServer
reloadServer.reload()
# If file is coffeescript
if filename and filename.endsWith('.coffee')
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
path = filename.split('/')
path.pop()
path = path.join('/')
if fs.existsSync(filename)
compileFile name, path, undefined, yes
buildAutoImport(dir)
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
name = filename.split('/')
name = name[name.length-1]
file_build = filename.replace('documents', 'build')
# If the path exist, simply copy the JS file to the build
if fs.existsSync(filename)
console.log ('MagiX: Updating ' + name).magenta
fs.copy filename, file_build, (err) ->
return console.error(err) if err
else if fs.existsSync(file_build)
name = file_build.split('/')
name = name[name.length-1]
console.log ('MagiX: Removing ' + name).magenta
fs.unlink file_build, (err) ->
console.log err if err
#else if filename and filename.index#filename isnt '.DS_Store'
if server and server.close
server.close()
server.start(no)
return
clean = (dir) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Cleaning..'.magenta
pathBuild = dir + '/build'
deleteFolderRecursive pathBuild
build(dir)
console.log "MagiX: Done: build cleaned.".green
return
##############################################################
# PROCESSES
dirCheck = (dir)->
dir = '.' if not dir
dir_build_check = no
dir_documents_check = no
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
directories = getDirectories(dir)
for directory in directories
if directory is 'build'
dir_build_check = yes
else if directory is 'documents'
dir_documents_check = yes
if not dir_build_check or not dir_documents_check
if not dir_build_check and not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
console.log 'MagiX: [HELP] Are you sure you are in the right folder? (cd magix-yourProjectName ;) ).'.magenta
else
if not dir_build_check
dirBuild = __dirname + '/build'
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
return yes
if not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
return no
return yes
compileFile = (name, dir, next, notification)->
console.log ('MagiX: Processing ' + name + ' ..').magenta
fs.readFile dir + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
contentCopy = data
file = {}
if name isnt 'App'
if /(Extends )\w+[ ]*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Extends )\w+[ ]*\n/, (match) ->
file.type = match.replace('Extends ', '')
file.type = file.type.replace(/[ ]*\n/, '')
return ''
if /(Kind )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = params.contentCopy.replace /(Kind )([-a-zA-Z0-9])*\n/, (match) ->
file.kind = match.replace('Kind ', '')
file.kind = file.kind.replace(/\n/, '')
return ''
if /(Element )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Element )([-a-zA-Z0-9])*\n/, (match) ->
file.element = match.replace('Element ', '')
file.element = file.element.replace(/\n/, '')
return ''
# We signal the code that we are about to add a class wrapper around the code
addClass = true
# We indent the code
contentCopy = indent(contentCopy, 2)
classes = ['Page', 'View', 'Text', 'Button', 'Link', 'CheckBox', 'Dropdown', 'RadioButton', 'Image', 'List', 'ListItem', 'TextInput', 'SpeechRecognition', 'Say', 'FileInput', 'Player', 'Slider', 'ProgressBar', 'Canvas', 'WebView']
# If extend framework, inject class with init
if classes.indexOf(file.type) > -1
# Create a class that extends another one
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else
# Create an empty class
if file.type is 'None'
classFile = "class #{name}\n\t"
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += "constructor: (options) ->\n\t\t\
super\n\
#{contentCopy}"
else
# Create an empty class
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(contentCopy)
file.element = contentCopy.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
classFile = contentCopy
else
classFile = contentCopy
else
classFile = contentCopy
# Convert CS to JS
converted = null
try
converted = CoffeeScript.compile(classFile, 'bare': true)
catch err
convert_error = err
# Define paths
dirBuild = dir.replace('documents', 'build')
nextStep = ->
filePathBuild = dirBuild + '/' + name + '.js'
if converted
if name is 'App'
convertedFinal = Generate.appRunJS indent(converted, 1)
else
convertedFinal = converted
fs.writeFile filePathBuild, convertedFinal, (err) ->
#console.log err if err
console.log 'MagiX: ↳ success'.green
next() if next
else
lines_info = String(convert_error).replace('[stdin]:', '').split(':')
error = capitalizeFirstLetter "#{convert_error.message} at line #{lines_info[0]} column #{lines_info[1]}"
console.log "MagiX: ↳ #{error}".red
# Show user notification when watching changes
if notification
notifier = require('node-notifier')
path = require('path')
notifier.notify {
title: 'MagiX | Error on ' + name
message: error
icon: path.join(__dirname, 'images/icon.png')
sound: no
wait: no
}, (err, response) ->
# Response is response from notification
return
if not fs.existsSync(dirBuild)
mkdirp dirBuild, (err) ->
if err
console.error err
else
nextStep()
else
nextStep()
buildAutoImport = (dir)->
autoImport = []
catalog = []
# documents = reorderFiles documents
documents = []
# CREATE DOCUMENT ARRAY WITH FILE EXTENDS
# LOOP THROUGH DOCUMENTS AND REORDER
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
if stat.name.endsWith('.coffee')
doc = {}
doc.name = stat.name.split('/')
doc.name = doc.name[doc.name.length-1]
doc.name = doc.name.replace('.coffee', '')
fs.readFile root + '/' + doc.name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
if data[0] isnt '!'
root = root.substring(root.indexOf("/documents") + 1)
path = '/' + root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if path isnt '/build/App.js'
if /(Element )([-a-zA-Z0-9])*\n/.test(data)
doc.extends = data.match(/(Element )([-a-zA-Z0-9])*\n/)[0].replace('Element ', '')
doc.extends = doc.extends.replace(/\n/, '')
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(data)
doc.extends = data.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
else
doc.extends = null
doc.path = path
doc.id = makeID()
documents.push doc
next()
else if stat.name.endsWith('.png') or stat.name.endsWith('.svg') or stat.name.endsWith('.jpg') or stat.name.endsWith('.jpeg') or stat.name.endsWith('.gif') or stat.name.endsWith('.webm') or stat.name.endsWith('.ogg') or stat.name.endsWith('.mpeg') or stat.name.endsWith('.mp3') or stat.name.endsWith('.wav') or stat.name.endsWith('.webm') or stat.name.endsWith('.mp4') or stat.name.endsWith('.ogg')
catalog.push (root.substring(root.indexOf("/documents") + 1)).replace('documents/', '') + '/' + stat.name
next()
else
next()
return
walker.on 'end', ->
documents = reorderFiles documents
for file in documents
autoImport.push file.path
autoImport.push '/build/App.js'
indexJS = Generate.indexJS(JSON.stringify(autoImport))
fs.writeFile dir + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
fs.writeFile dir + '/build/catalog.js', Generate.catalog(catalog), (err) ->
return console.log(err) if err
return
buildProduction = (dir)->
dir = process.cwd() if not dir
#if dir is '.'
dirName = dir.split('/')
dirName = dirName[dirName.length-1]
files = []
folder = path.dirname(dir) + '/' + dirName
paths_to_remove = []
magixJSON = fs.readFileSync(folder + '/package.json', 'utf8')
config = JSON.parse(magixJSON)
if not config.name
console.log 'MagiX: [ERR] Invalid JSON project file, name missing.'.red
return
prodFolder = folder + "/../magix-#{config.name}-production"
# Generate scriptID
scriptID = makeID()
# fs.copy folder, prodFolder, (err) ->
# return console.error err if err
if not fs.existsSync(prodFolder)
console.log 'MagiX: Cloning working project..'.magenta
fs.mkdirSync prodFolder
# Clean build
if fs.existsSync(prodFolder + '/build')
deleteFolderRecursive prodFolder + '/build'
# Copy project folder to production dir
fs.copy folder, prodFolder, (err) ->
return console.error err if err
console.log ('MagiX: Production path: ' + prodFolder).green
fs.writeFile prodFolder + '/build/index.js', Generate.indexJS(JSON.stringify('/build/' + scriptID + '.js')), (err) ->
return console.log(err) if err
minify()
return
return
minify = ->
walker = walk.walk(prodFolder + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
name = stat.name.split('/')
name = name[name.length-1]
# REMOVE DS STORE ON MAC OS
if stat.name.endsWith('.DS_Store')
fs.unlinkSync root+'/'+stat.name
# PUSH PATHS TO REMOVE LATER ON..
if stat.name.endsWith('.coffee') or stat.name.endsWith('.js') or stat.name.endsWith('.css')
name = name.replace('.coffee', '').replace('.js', '').replace('.css', '')
if name isnt 'App'
paths_to_remove.push root+'/'+stat.name
# IF COFFEE, PREPARE FOR MINIFING
if stat.name.endsWith('.coffee')
name = name.replace('.coffee', '')
fs.readFile root + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
path = root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if data[0] isnt '!'
if path isnt prodFolder + '/build/App.js'
files.push path
next()
else
next()
else
uglified = uglify.minify([path]).code
fs.writeFile path, uglified, 'utf8', (err) ->
return console.log(err) if err
next()
else
next()
return
walker.on 'end', ->
files.push prodFolder + '/build/App.js'
# Minify files
uglified = '/* Made with MagiX (magixjs.com) and a smile :) */ ' + uglify.minify(files).code
console.log 'MagiX: Minify script..'.magenta
appPath = prodFolder + '/build/' + scriptID + '.js'
fs.writeFile appPath, uglified, (err) ->
console.log 'MagiX: Cleaning..'.magenta
cleaning ->
console.log "MagiX: Done: project built for production.".green
return
cleaning = (cb)->
# ADD BUILD FOLDER PATHS TO DOCUMENTS PATH
paths_to_remove = paths_to_remove.concat(files)
# CLEAN BUILD AND DOCUMENTS FOLDER
try
for item in paths_to_remove
if fs.existsSync(item)
fs.unlinkSync item
catch e
console.log e
cleanEmptyFoldersRecursively = (folder) ->
fs = require('fs')
path = require('path')
isDir = fs.statSync(folder).isDirectory()
if !isDir
return
files = fs.readdirSync(folder)
if files.length > 0
files.forEach (file) ->
fullPath = path.join(folder, file)
cleanEmptyFoldersRecursively fullPath
return
# re-evaluate files; after deleting subfolder
# we may have parent folder empty now
files = fs.readdirSync(folder)
if files.length == 0
console.log ('MagiX: removing: '+folder).magenta
fs.rmdirSync folder
return
return
cleanEmptyFoldersRecursively prodFolder + '/documents/'
cleanEmptyFoldersRecursively prodFolder + '/build/'
cb() if cb
##############################################################
# PROGRAMS
program
.command('about', {isDefault: yes})
.description('About magiX.')
.action about
program
.command('create [name]')
.description('Create a new project.')
.action create
program
.command('launch [dir] [port] [env]')
.description('Launch a local server to help you code an magix project.')
.action launch
# Maybe for later
###
program
.command('forever start [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStart
program
.command('forever stop [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStop
###
program
.command('build [dir] [env]')
.description('Build a project.')
.action build
program
.command('clean [dir]')
.description('Clear the build of a project.')
.action clean
program
.command('watch [dir]')
.description('Observe change on a project and compile on the fly.')
.action watch
# program
# .command('install [name] [dir]')
# .description('Add module to your project.')
# .action install
program.parse process.argv
| 114786 | #!/usr/bin/env node
console.error = ->
##############################################################
# REQUIRE
prompt = require 'prompt'
program = require('commander')
colors = require('colors')
fs = require('fs-extra')
walk = require('walk')
path = require('path')
watcher = require('node-watch')
Generate = require('./lib/Generate')
CoffeeScript = require('coffee-script')
uglify = require('uglify-js')
restify = require('restify')
CookieParser = require('restify-cookies')
mkdirp = require('mkdirp')
openurl = require('openurl')
reload = require('reload')
##############################################################
# CONFIG
prompt.message = 'MagiX'
reloadServer = undefined
##############################################################
# HELPERS
Array::move = (old_index, new_index) ->
if new_index >= @length
k = new_index - (@length)
while k-- + 1
@push undefined
@splice new_index, 0, @splice(old_index, 1)[0]
this
reorderFiles = (files)->
new_files = JSON.parse(JSON.stringify(files));
no_match = []
for item of files
index = -1
for item2 of new_files
if new_files[item2].name is files[item].extends
index = item2
if index > -1
if index > item
new_files = new_files.move(index, item)
else
no_match.push files[item].id
for id in no_match
if id isnt undefined
for item of new_files
if new_files[item].id is id
new_files.move(item, 0)
return new_files
capitalizeFirstLetter = (string) ->
string.charAt(0).toUpperCase() + string.slice(1)
getDirectories = (srcpath) ->
fs.readdirSync(srcpath).filter (file) ->
fs.statSync(path.join(srcpath, file)).isDirectory()
# Indent text
indent = (str, numOfIndents, opt_spacesPerIndent) ->
str = str.replace(/^(?=.)/gm, new Array(numOfIndents + 1).join('\t'));
numOfIndents = new Array(opt_spacesPerIndent + 1 or 0).join(' ')
# re-use
if opt_spacesPerIndent then str.replace(/^\t+/g, ((tabs) ->
tabs.replace /./g, numOfIndents
)) else str
deleteFolderRecursive = (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) ->
curPath = path + '/' + file
if fs.lstatSync(curPath).isDirectory()
# recurse
deleteFolderRecursive curPath
else
# delete file
fs.unlinkSync curPath
return
fs.rmdirSync path
return
makeID = ->
text = ''
possible = 'abcdefghijklmnopqrstuvwxyz0123456789'
i = 0
while i < 8
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
##############################################################
# PROGRAMS FUNCTIONS
about = ->
console.log '\n'
console.log ' d8bY88b d88P '
console.log ' Y8P Y88b d88P '
console.log ' Y88o88P '
console.log '88888b.d88b. 8888b. .d88b. 888 Y888P '
console.log '888 "888 "88b "88bd88P"88b888 d888b '
console.log '888 888 888.d888888888 888888 d88888b '
console.log '888 888 888888 888Y88b 888888 d88P Y88b '
console.log '888 888 888"Y888888 "Y88888888d88P Y88b '
console.log ' 888 '
console.log ' Y8b d88P '
console.log ' "Y88P" '
console.log 'MagiX | magixjs.com'.green
console.log 'Beyond magical.'.green
console.log 'Created by <NAME> (@etiennepinchon)'.green
console.log 'Copyright ©2016'.green
console.log '\n'
console.log 'Usage:'.green
console.log '* create [name] | Create a new project.'
console.log '* launch [dir] [port] | Create a new project.'
console.log '* build [dir] [env] | Build a project.'
console.log '* clean [dir] | Clear the build of a project.'
console.log '* watch [dir] | Observe change on a project and compile on the fly.'
console.log '\n'
return
create = (name) ->
if not name or not /^[a-zA-Z0-9\_\s\-]{1,100}$/.test name
console.log 'MagiX: [ERR] Name must be only letters, numbers, underscores, spaces or dashes.'.red
return
dir_project = './' + name
if not fs.existsSync(dir_project)
fs.mkdirSync(dir_project)
# Generate App content in both JS and CS
appJS = Generate.JS()
appCS = Generate.CS()
done = ->
process.chdir(dir_project)
console.log 'MagiX: Project created successfully.'.green
createJSON = ->
packageFile =
name: name
version: '0.1.0'
description: ''
tags: ''
created_at: new Date
fs.writeFile dir_project + '/package.json', JSON.stringify(packageFile, null, 2), (err) ->
return console.log(err) if err
done()
createIndexJS = ->
indexJS = Generate.indexJS()
fs.writeFile dir_project + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
createCatalog()
createCatalog = ->
catalog = Generate.catalog()
fs.writeFile dir_project + '/build/catalog.js', catalog, (err) ->
return console.log(err) if err
createJSON()
# Folders and files generation
dirBuild = dir_project + '/build'
dirSources = dir_project + '/documents'
fs.mkdirSync dir_project if not fs.existsSync(dir_project)
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
fs.mkdirSync dirSources if not fs.existsSync(dirSources)
htmlContent = Generate.HTML(name, '', '', no)
appNameCS = '/App.coffee'
appJS = Generate.appRunJS indent(appJS, 1)
# Write HTML content
fs.writeFile dir_project + '/index.html', htmlContent, (err) ->
return console.log(err) if err
# Write BUILD content
fs.writeFile dirBuild + '/App.js', appJS, (err) ->
return console.log(err) if err
# Write SOURCES content
fs.writeFile dirSources + appNameCS, appCS, (err) ->
return console.log(err) if err
createIndexJS()
return
return
return
return
launch = (dir, server_port, env)->
######################################################
# PROD CHECK
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else if prod.indexOf(server_port) > -1
isProd = yes
server_port = undefined
else
isProd = yes if prod.indexOf(env) > -1
######################################################
if not dir
dir = undefined
i = parseInt(dir)
if Number.isInteger(i) and i > 0
server_port = dir
dir = undefined
dir = process.cwd() if not dir
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
# If dir ends with / remove it
if dir.endsWith('/')
dir = dir.slice(0, -1)
server_port = 9000 if not server_port
######################################################
# SERVER
server = restify.createServer
name: 'server'
version: '1.0.0'
server.use restify.acceptParser(server.acceptable)
server.use restify.queryParser()
server.use restify.bodyParser()
server.use CookieParser.parse
server.use restify.gzipResponse()
server.get /^\/build\/?.*/, restify.serveStatic directory: dir
server.get /^\/documents\/?.*/, restify.serveStatic directory: dir
server.get /\/?/, (req, res, next) ->
res.writeHead 200
fs.createReadStream(dir + '/index.html').pipe res
next()
# Reload server
if not isProd
reloadServer = reload(server, server, no)
server.__port = server_port
server.start = (message)->
server.listen server.__port, 'localhost', ->
if message
url = server.url.replace('127.0.0.1', 'localhost')
if not isProd
console.log(('MagiX: Project launched! Running! Address ' + url).green)
openurl.open(url)
else
console.log(('MagiX: Project launched in Production mode! Running! Address ' + url).green)
return
server.start(yes)
if not isProd
if fs.existsSync(dir + '/documents')
watch(dir, server)
build = (dir, env) ->
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else
isProd = yes if prod.indexOf(env) > -1
dir = '.' if not dir
return if not dirCheck dir
files = []
startA = +new Date()
# Walker options
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
filename = stat.name
if filename.endsWith('.coffee')
files.push root + '/' + filename
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
compileFile name, root, next
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
file = root + '/' + filename
file_build = file.replace('documents', 'build')
files.push file
console.log ('MagiX: Copy ' + filename).magenta
fs.copy file, file_build, (err) ->
return console.error(err) if err
next()
else
next()
return
walker.on 'end', ->
buildAutoImport(dir)
endA = +new Date()
console.log "MagiX: Done: #{files.length} files built in #{(endA-startA)} ms.".green
# Build production project
buildProduction(dir) if isProd
return
watch = (dir, server) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Now observing changes in your project..'.green
watcher dir + '/documents', (filename) ->
# Fire server-side reload event
if reloadServer
reloadServer.reload()
# If file is coffeescript
if filename and filename.endsWith('.coffee')
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
path = filename.split('/')
path.pop()
path = path.join('/')
if fs.existsSync(filename)
compileFile name, path, undefined, yes
buildAutoImport(dir)
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
name = filename.split('/')
name = name[name.length-1]
file_build = filename.replace('documents', 'build')
# If the path exist, simply copy the JS file to the build
if fs.existsSync(filename)
console.log ('MagiX: Updating ' + name).magenta
fs.copy filename, file_build, (err) ->
return console.error(err) if err
else if fs.existsSync(file_build)
name = file_build.split('/')
name = name[name.length-1]
console.log ('MagiX: Removing ' + name).magenta
fs.unlink file_build, (err) ->
console.log err if err
#else if filename and filename.index#filename isnt '.DS_Store'
if server and server.close
server.close()
server.start(no)
return
clean = (dir) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Cleaning..'.magenta
pathBuild = dir + '/build'
deleteFolderRecursive pathBuild
build(dir)
console.log "MagiX: Done: build cleaned.".green
return
##############################################################
# PROCESSES
dirCheck = (dir)->
dir = '.' if not dir
dir_build_check = no
dir_documents_check = no
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
directories = getDirectories(dir)
for directory in directories
if directory is 'build'
dir_build_check = yes
else if directory is 'documents'
dir_documents_check = yes
if not dir_build_check or not dir_documents_check
if not dir_build_check and not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
console.log 'MagiX: [HELP] Are you sure you are in the right folder? (cd magix-yourProjectName ;) ).'.magenta
else
if not dir_build_check
dirBuild = __dirname + '/build'
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
return yes
if not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
return no
return yes
compileFile = (name, dir, next, notification)->
console.log ('MagiX: Processing ' + name + ' ..').magenta
fs.readFile dir + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
contentCopy = data
file = {}
if name isnt 'App'
if /(Extends )\w+[ ]*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Extends )\w+[ ]*\n/, (match) ->
file.type = match.replace('Extends ', '')
file.type = file.type.replace(/[ ]*\n/, '')
return ''
if /(Kind )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = params.contentCopy.replace /(Kind )([-a-zA-Z0-9])*\n/, (match) ->
file.kind = match.replace('Kind ', '')
file.kind = file.kind.replace(/\n/, '')
return ''
if /(Element )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Element )([-a-zA-Z0-9])*\n/, (match) ->
file.element = match.replace('Element ', '')
file.element = file.element.replace(/\n/, '')
return ''
# We signal the code that we are about to add a class wrapper around the code
addClass = true
# We indent the code
contentCopy = indent(contentCopy, 2)
classes = ['Page', 'View', 'Text', 'Button', 'Link', 'CheckBox', 'Dropdown', 'RadioButton', 'Image', 'List', 'ListItem', 'TextInput', 'SpeechRecognition', 'Say', 'FileInput', 'Player', 'Slider', 'ProgressBar', 'Canvas', 'WebView']
# If extend framework, inject class with init
if classes.indexOf(file.type) > -1
# Create a class that extends another one
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else
# Create an empty class
if file.type is 'None'
classFile = "class #{name}\n\t"
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += "constructor: (options) ->\n\t\t\
super\n\
#{contentCopy}"
else
# Create an empty class
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(contentCopy)
file.element = contentCopy.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
classFile = contentCopy
else
classFile = contentCopy
else
classFile = contentCopy
# Convert CS to JS
converted = null
try
converted = CoffeeScript.compile(classFile, 'bare': true)
catch err
convert_error = err
# Define paths
dirBuild = dir.replace('documents', 'build')
nextStep = ->
filePathBuild = dirBuild + '/' + name + '.js'
if converted
if name is 'App'
convertedFinal = Generate.appRunJS indent(converted, 1)
else
convertedFinal = converted
fs.writeFile filePathBuild, convertedFinal, (err) ->
#console.log err if err
console.log 'MagiX: ↳ success'.green
next() if next
else
lines_info = String(convert_error).replace('[stdin]:', '').split(':')
error = capitalizeFirstLetter "#{convert_error.message} at line #{lines_info[0]} column #{lines_info[1]}"
console.log "MagiX: ↳ #{error}".red
# Show user notification when watching changes
if notification
notifier = require('node-notifier')
path = require('path')
notifier.notify {
title: 'MagiX | Error on ' + name
message: error
icon: path.join(__dirname, 'images/icon.png')
sound: no
wait: no
}, (err, response) ->
# Response is response from notification
return
if not fs.existsSync(dirBuild)
mkdirp dirBuild, (err) ->
if err
console.error err
else
nextStep()
else
nextStep()
buildAutoImport = (dir)->
autoImport = []
catalog = []
# documents = reorderFiles documents
documents = []
# CREATE DOCUMENT ARRAY WITH FILE EXTENDS
# LOOP THROUGH DOCUMENTS AND REORDER
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
if stat.name.endsWith('.coffee')
doc = {}
doc.name = stat.name.split('/')
doc.name = doc.name[doc.name.length-1]
doc.name = doc.name.replace('.coffee', '')
fs.readFile root + '/' + doc.name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
if data[0] isnt '!'
root = root.substring(root.indexOf("/documents") + 1)
path = '/' + root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if path isnt '/build/App.js'
if /(Element )([-a-zA-Z0-9])*\n/.test(data)
doc.extends = data.match(/(Element )([-a-zA-Z0-9])*\n/)[0].replace('Element ', '')
doc.extends = doc.extends.replace(/\n/, '')
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(data)
doc.extends = data.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
else
doc.extends = null
doc.path = path
doc.id = makeID()
documents.push doc
next()
else if stat.name.endsWith('.png') or stat.name.endsWith('.svg') or stat.name.endsWith('.jpg') or stat.name.endsWith('.jpeg') or stat.name.endsWith('.gif') or stat.name.endsWith('.webm') or stat.name.endsWith('.ogg') or stat.name.endsWith('.mpeg') or stat.name.endsWith('.mp3') or stat.name.endsWith('.wav') or stat.name.endsWith('.webm') or stat.name.endsWith('.mp4') or stat.name.endsWith('.ogg')
catalog.push (root.substring(root.indexOf("/documents") + 1)).replace('documents/', '') + '/' + stat.name
next()
else
next()
return
walker.on 'end', ->
documents = reorderFiles documents
for file in documents
autoImport.push file.path
autoImport.push '/build/App.js'
indexJS = Generate.indexJS(JSON.stringify(autoImport))
fs.writeFile dir + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
fs.writeFile dir + '/build/catalog.js', Generate.catalog(catalog), (err) ->
return console.log(err) if err
return
buildProduction = (dir)->
dir = process.cwd() if not dir
#if dir is '.'
dirName = dir.split('/')
dirName = dirName[dirName.length-1]
files = []
folder = path.dirname(dir) + '/' + dirName
paths_to_remove = []
magixJSON = fs.readFileSync(folder + '/package.json', 'utf8')
config = JSON.parse(magixJSON)
if not config.name
console.log 'MagiX: [ERR] Invalid JSON project file, name missing.'.red
return
prodFolder = folder + "/../magix-#{config.name}-production"
# Generate scriptID
scriptID = makeID()
# fs.copy folder, prodFolder, (err) ->
# return console.error err if err
if not fs.existsSync(prodFolder)
console.log 'MagiX: Cloning working project..'.magenta
fs.mkdirSync prodFolder
# Clean build
if fs.existsSync(prodFolder + '/build')
deleteFolderRecursive prodFolder + '/build'
# Copy project folder to production dir
fs.copy folder, prodFolder, (err) ->
return console.error err if err
console.log ('MagiX: Production path: ' + prodFolder).green
fs.writeFile prodFolder + '/build/index.js', Generate.indexJS(JSON.stringify('/build/' + scriptID + '.js')), (err) ->
return console.log(err) if err
minify()
return
return
minify = ->
walker = walk.walk(prodFolder + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
name = stat.name.split('/')
name = name[name.length-1]
# REMOVE DS STORE ON MAC OS
if stat.name.endsWith('.DS_Store')
fs.unlinkSync root+'/'+stat.name
# PUSH PATHS TO REMOVE LATER ON..
if stat.name.endsWith('.coffee') or stat.name.endsWith('.js') or stat.name.endsWith('.css')
name = name.replace('.coffee', '').replace('.js', '').replace('.css', '')
if name isnt 'App'
paths_to_remove.push root+'/'+stat.name
# IF COFFEE, PREPARE FOR MINIFING
if stat.name.endsWith('.coffee')
name = name.replace('.coffee', '')
fs.readFile root + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
path = root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if data[0] isnt '!'
if path isnt prodFolder + '/build/App.js'
files.push path
next()
else
next()
else
uglified = uglify.minify([path]).code
fs.writeFile path, uglified, 'utf8', (err) ->
return console.log(err) if err
next()
else
next()
return
walker.on 'end', ->
files.push prodFolder + '/build/App.js'
# Minify files
uglified = '/* Made with MagiX (magixjs.com) and a smile :) */ ' + uglify.minify(files).code
console.log 'MagiX: Minify script..'.magenta
appPath = prodFolder + '/build/' + scriptID + '.js'
fs.writeFile appPath, uglified, (err) ->
console.log 'MagiX: Cleaning..'.magenta
cleaning ->
console.log "MagiX: Done: project built for production.".green
return
cleaning = (cb)->
# ADD BUILD FOLDER PATHS TO DOCUMENTS PATH
paths_to_remove = paths_to_remove.concat(files)
# CLEAN BUILD AND DOCUMENTS FOLDER
try
for item in paths_to_remove
if fs.existsSync(item)
fs.unlinkSync item
catch e
console.log e
cleanEmptyFoldersRecursively = (folder) ->
fs = require('fs')
path = require('path')
isDir = fs.statSync(folder).isDirectory()
if !isDir
return
files = fs.readdirSync(folder)
if files.length > 0
files.forEach (file) ->
fullPath = path.join(folder, file)
cleanEmptyFoldersRecursively fullPath
return
# re-evaluate files; after deleting subfolder
# we may have parent folder empty now
files = fs.readdirSync(folder)
if files.length == 0
console.log ('MagiX: removing: '+folder).magenta
fs.rmdirSync folder
return
return
cleanEmptyFoldersRecursively prodFolder + '/documents/'
cleanEmptyFoldersRecursively prodFolder + '/build/'
cb() if cb
##############################################################
# PROGRAMS
program
.command('about', {isDefault: yes})
.description('About magiX.')
.action about
program
.command('create [name]')
.description('Create a new project.')
.action create
program
.command('launch [dir] [port] [env]')
.description('Launch a local server to help you code an magix project.')
.action launch
# Maybe for later
###
program
.command('forever start [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStart
program
.command('forever stop [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStop
###
program
.command('build [dir] [env]')
.description('Build a project.')
.action build
program
.command('clean [dir]')
.description('Clear the build of a project.')
.action clean
program
.command('watch [dir]')
.description('Observe change on a project and compile on the fly.')
.action watch
# program
# .command('install [name] [dir]')
# .description('Add module to your project.')
# .action install
program.parse process.argv
| true | #!/usr/bin/env node
console.error = ->
##############################################################
# REQUIRE
prompt = require 'prompt'
program = require('commander')
colors = require('colors')
fs = require('fs-extra')
walk = require('walk')
path = require('path')
watcher = require('node-watch')
Generate = require('./lib/Generate')
CoffeeScript = require('coffee-script')
uglify = require('uglify-js')
restify = require('restify')
CookieParser = require('restify-cookies')
mkdirp = require('mkdirp')
openurl = require('openurl')
reload = require('reload')
##############################################################
# CONFIG
prompt.message = 'MagiX'
reloadServer = undefined
##############################################################
# HELPERS
Array::move = (old_index, new_index) ->
if new_index >= @length
k = new_index - (@length)
while k-- + 1
@push undefined
@splice new_index, 0, @splice(old_index, 1)[0]
this
reorderFiles = (files)->
new_files = JSON.parse(JSON.stringify(files));
no_match = []
for item of files
index = -1
for item2 of new_files
if new_files[item2].name is files[item].extends
index = item2
if index > -1
if index > item
new_files = new_files.move(index, item)
else
no_match.push files[item].id
for id in no_match
if id isnt undefined
for item of new_files
if new_files[item].id is id
new_files.move(item, 0)
return new_files
capitalizeFirstLetter = (string) ->
string.charAt(0).toUpperCase() + string.slice(1)
getDirectories = (srcpath) ->
fs.readdirSync(srcpath).filter (file) ->
fs.statSync(path.join(srcpath, file)).isDirectory()
# Indent text
indent = (str, numOfIndents, opt_spacesPerIndent) ->
str = str.replace(/^(?=.)/gm, new Array(numOfIndents + 1).join('\t'));
numOfIndents = new Array(opt_spacesPerIndent + 1 or 0).join(' ')
# re-use
if opt_spacesPerIndent then str.replace(/^\t+/g, ((tabs) ->
tabs.replace /./g, numOfIndents
)) else str
deleteFolderRecursive = (path) ->
if fs.existsSync(path)
fs.readdirSync(path).forEach (file, index) ->
curPath = path + '/' + file
if fs.lstatSync(curPath).isDirectory()
# recurse
deleteFolderRecursive curPath
else
# delete file
fs.unlinkSync curPath
return
fs.rmdirSync path
return
makeID = ->
text = ''
possible = 'abcdefghijklmnopqrstuvwxyz0123456789'
i = 0
while i < 8
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
##############################################################
# PROGRAMS FUNCTIONS
about = ->
console.log '\n'
console.log ' d8bY88b d88P '
console.log ' Y8P Y88b d88P '
console.log ' Y88o88P '
console.log '88888b.d88b. 8888b. .d88b. 888 Y888P '
console.log '888 "888 "88b "88bd88P"88b888 d888b '
console.log '888 888 888.d888888888 888888 d88888b '
console.log '888 888 888888 888Y88b 888888 d88P Y88b '
console.log '888 888 888"Y888888 "Y88888888d88P Y88b '
console.log ' 888 '
console.log ' Y8b d88P '
console.log ' "Y88P" '
console.log 'MagiX | magixjs.com'.green
console.log 'Beyond magical.'.green
console.log 'Created by PI:NAME:<NAME>END_PI (@etiennepinchon)'.green
console.log 'Copyright ©2016'.green
console.log '\n'
console.log 'Usage:'.green
console.log '* create [name] | Create a new project.'
console.log '* launch [dir] [port] | Create a new project.'
console.log '* build [dir] [env] | Build a project.'
console.log '* clean [dir] | Clear the build of a project.'
console.log '* watch [dir] | Observe change on a project and compile on the fly.'
console.log '\n'
return
create = (name) ->
if not name or not /^[a-zA-Z0-9\_\s\-]{1,100}$/.test name
console.log 'MagiX: [ERR] Name must be only letters, numbers, underscores, spaces or dashes.'.red
return
dir_project = './' + name
if not fs.existsSync(dir_project)
fs.mkdirSync(dir_project)
# Generate App content in both JS and CS
appJS = Generate.JS()
appCS = Generate.CS()
done = ->
process.chdir(dir_project)
console.log 'MagiX: Project created successfully.'.green
createJSON = ->
packageFile =
name: name
version: '0.1.0'
description: ''
tags: ''
created_at: new Date
fs.writeFile dir_project + '/package.json', JSON.stringify(packageFile, null, 2), (err) ->
return console.log(err) if err
done()
createIndexJS = ->
indexJS = Generate.indexJS()
fs.writeFile dir_project + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
createCatalog()
createCatalog = ->
catalog = Generate.catalog()
fs.writeFile dir_project + '/build/catalog.js', catalog, (err) ->
return console.log(err) if err
createJSON()
# Folders and files generation
dirBuild = dir_project + '/build'
dirSources = dir_project + '/documents'
fs.mkdirSync dir_project if not fs.existsSync(dir_project)
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
fs.mkdirSync dirSources if not fs.existsSync(dirSources)
htmlContent = Generate.HTML(name, '', '', no)
appNameCS = '/App.coffee'
appJS = Generate.appRunJS indent(appJS, 1)
# Write HTML content
fs.writeFile dir_project + '/index.html', htmlContent, (err) ->
return console.log(err) if err
# Write BUILD content
fs.writeFile dirBuild + '/App.js', appJS, (err) ->
return console.log(err) if err
# Write SOURCES content
fs.writeFile dirSources + appNameCS, appCS, (err) ->
return console.log(err) if err
createIndexJS()
return
return
return
return
launch = (dir, server_port, env)->
######################################################
# PROD CHECK
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else if prod.indexOf(server_port) > -1
isProd = yes
server_port = undefined
else
isProd = yes if prod.indexOf(env) > -1
######################################################
if not dir
dir = undefined
i = parseInt(dir)
if Number.isInteger(i) and i > 0
server_port = dir
dir = undefined
dir = process.cwd() if not dir
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
# If dir ends with / remove it
if dir.endsWith('/')
dir = dir.slice(0, -1)
server_port = 9000 if not server_port
######################################################
# SERVER
server = restify.createServer
name: 'server'
version: '1.0.0'
server.use restify.acceptParser(server.acceptable)
server.use restify.queryParser()
server.use restify.bodyParser()
server.use CookieParser.parse
server.use restify.gzipResponse()
server.get /^\/build\/?.*/, restify.serveStatic directory: dir
server.get /^\/documents\/?.*/, restify.serveStatic directory: dir
server.get /\/?/, (req, res, next) ->
res.writeHead 200
fs.createReadStream(dir + '/index.html').pipe res
next()
# Reload server
if not isProd
reloadServer = reload(server, server, no)
server.__port = server_port
server.start = (message)->
server.listen server.__port, 'localhost', ->
if message
url = server.url.replace('127.0.0.1', 'localhost')
if not isProd
console.log(('MagiX: Project launched! Running! Address ' + url).green)
openurl.open(url)
else
console.log(('MagiX: Project launched in Production mode! Running! Address ' + url).green)
return
server.start(yes)
if not isProd
if fs.existsSync(dir + '/documents')
watch(dir, server)
build = (dir, env) ->
prod = ['production', 'prod', 'p']
isProd = no
if not env
if prod.indexOf(dir) > -1
isProd = yes
dir = undefined
else
isProd = yes if prod.indexOf(env) > -1
dir = '.' if not dir
return if not dirCheck dir
files = []
startA = +new Date()
# Walker options
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
filename = stat.name
if filename.endsWith('.coffee')
files.push root + '/' + filename
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
compileFile name, root, next
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
file = root + '/' + filename
file_build = file.replace('documents', 'build')
files.push file
console.log ('MagiX: Copy ' + filename).magenta
fs.copy file, file_build, (err) ->
return console.error(err) if err
next()
else
next()
return
walker.on 'end', ->
buildAutoImport(dir)
endA = +new Date()
console.log "MagiX: Done: #{files.length} files built in #{(endA-startA)} ms.".green
# Build production project
buildProduction(dir) if isProd
return
watch = (dir, server) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Now observing changes in your project..'.green
watcher dir + '/documents', (filename) ->
# Fire server-side reload event
if reloadServer
reloadServer.reload()
# If file is coffeescript
if filename and filename.endsWith('.coffee')
name = filename.split('/')
name = name[name.length-1]
name = name.replace('.coffee', '')
path = filename.split('/')
path.pop()
path = path.join('/')
if fs.existsSync(filename)
compileFile name, path, undefined, yes
buildAutoImport(dir)
# JS files
else if filename and (filename.endsWith('.js') or filename.endsWith('.css'))
name = filename.split('/')
name = name[name.length-1]
file_build = filename.replace('documents', 'build')
# If the path exist, simply copy the JS file to the build
if fs.existsSync(filename)
console.log ('MagiX: Updating ' + name).magenta
fs.copy filename, file_build, (err) ->
return console.error(err) if err
else if fs.existsSync(file_build)
name = file_build.split('/')
name = name[name.length-1]
console.log ('MagiX: Removing ' + name).magenta
fs.unlink file_build, (err) ->
console.log err if err
#else if filename and filename.index#filename isnt '.DS_Store'
if server and server.close
server.close()
server.start(no)
return
clean = (dir) ->
dir = '.' if not dir
return if not dirCheck dir
console.log 'MagiX: Cleaning..'.magenta
pathBuild = dir + '/build'
deleteFolderRecursive pathBuild
build(dir)
console.log "MagiX: Done: build cleaned.".green
return
##############################################################
# PROCESSES
dirCheck = (dir)->
dir = '.' if not dir
dir_build_check = no
dir_documents_check = no
if not fs.existsSync(dir)
console.log 'MagiX: [ERR] Given folder does not exist.'.red
return
directories = getDirectories(dir)
for directory in directories
if directory is 'build'
dir_build_check = yes
else if directory is 'documents'
dir_documents_check = yes
if not dir_build_check or not dir_documents_check
if not dir_build_check and not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
console.log 'MagiX: [HELP] Are you sure you are in the right folder? (cd magix-yourProjectName ;) ).'.magenta
else
if not dir_build_check
dirBuild = __dirname + '/build'
fs.mkdirSync dirBuild if not fs.existsSync(dirBuild)
return yes
if not dir_documents_check
console.log 'MagiX: [ERR] Cannot find the "documents" directory.'.red
return no
return yes
compileFile = (name, dir, next, notification)->
console.log ('MagiX: Processing ' + name + ' ..').magenta
fs.readFile dir + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
contentCopy = data
file = {}
if name isnt 'App'
if /(Extends )\w+[ ]*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Extends )\w+[ ]*\n/, (match) ->
file.type = match.replace('Extends ', '')
file.type = file.type.replace(/[ ]*\n/, '')
return ''
if /(Kind )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = params.contentCopy.replace /(Kind )([-a-zA-Z0-9])*\n/, (match) ->
file.kind = match.replace('Kind ', '')
file.kind = file.kind.replace(/\n/, '')
return ''
if /(Element )([-a-zA-Z0-9])*\n/.test(contentCopy)
contentCopy = contentCopy.replace /(Element )([-a-zA-Z0-9])*\n/, (match) ->
file.element = match.replace('Element ', '')
file.element = file.element.replace(/\n/, '')
return ''
# We signal the code that we are about to add a class wrapper around the code
addClass = true
# We indent the code
contentCopy = indent(contentCopy, 2)
classes = ['Page', 'View', 'Text', 'Button', 'Link', 'CheckBox', 'Dropdown', 'RadioButton', 'Image', 'List', 'ListItem', 'TextInput', 'SpeechRecognition', 'Say', 'FileInput', 'Player', 'Slider', 'ProgressBar', 'Canvas', 'WebView']
# If extend framework, inject class with init
if classes.indexOf(file.type) > -1
# Create a class that extends another one
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else
# Create an empty class
if file.type is 'None'
classFile = "class #{name}\n\t"
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += "constructor: (options) ->\n\t\t\
super\n\
#{contentCopy}"
else
# Create an empty class
classFile = 'class ' + name + ' extends ' + file.type + '\n\t'
classFile += '_kind : "' + file.kind + '"\n\t' if file.kind
classFile += '_elementType : "'+ file.element + '"\n\t' if file.element
classFile += 'constructor: (options) ->\n\t\t\
super\n\
' + contentCopy + '\n\t\t\
if not @_didAppear and @parent and @didAppear\n\t\t\t\
@_didAppear = true\n\t\t\t\
@didAppear(@__options)'
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(contentCopy)
file.element = contentCopy.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
classFile = contentCopy
else
classFile = contentCopy
else
classFile = contentCopy
# Convert CS to JS
converted = null
try
converted = CoffeeScript.compile(classFile, 'bare': true)
catch err
convert_error = err
# Define paths
dirBuild = dir.replace('documents', 'build')
nextStep = ->
filePathBuild = dirBuild + '/' + name + '.js'
if converted
if name is 'App'
convertedFinal = Generate.appRunJS indent(converted, 1)
else
convertedFinal = converted
fs.writeFile filePathBuild, convertedFinal, (err) ->
#console.log err if err
console.log 'MagiX: ↳ success'.green
next() if next
else
lines_info = String(convert_error).replace('[stdin]:', '').split(':')
error = capitalizeFirstLetter "#{convert_error.message} at line #{lines_info[0]} column #{lines_info[1]}"
console.log "MagiX: ↳ #{error}".red
# Show user notification when watching changes
if notification
notifier = require('node-notifier')
path = require('path')
notifier.notify {
title: 'MagiX | Error on ' + name
message: error
icon: path.join(__dirname, 'images/icon.png')
sound: no
wait: no
}, (err, response) ->
# Response is response from notification
return
if not fs.existsSync(dirBuild)
mkdirp dirBuild, (err) ->
if err
console.error err
else
nextStep()
else
nextStep()
buildAutoImport = (dir)->
autoImport = []
catalog = []
# documents = reorderFiles documents
documents = []
# CREATE DOCUMENT ARRAY WITH FILE EXTENDS
# LOOP THROUGH DOCUMENTS AND REORDER
walker = walk.walk(dir + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
if stat.name.endsWith('.coffee')
doc = {}
doc.name = stat.name.split('/')
doc.name = doc.name[doc.name.length-1]
doc.name = doc.name.replace('.coffee', '')
fs.readFile root + '/' + doc.name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
if data[0] isnt '!'
root = root.substring(root.indexOf("/documents") + 1)
path = '/' + root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if path isnt '/build/App.js'
if /(Element )([-a-zA-Z0-9])*\n/.test(data)
doc.extends = data.match(/(Element )([-a-zA-Z0-9])*\n/)[0].replace('Element ', '')
doc.extends = doc.extends.replace(/\n/, '')
else if /(class)\s+\w+\s+(extends)\s+\w+/.test(data)
doc.extends = data.match(/(class)\s+\w+\s+(extends)\s+\w+/)[0].replace(/(class)\s+\w+\s+(extends)\s+/, '')
else
doc.extends = null
doc.path = path
doc.id = makeID()
documents.push doc
next()
else if stat.name.endsWith('.png') or stat.name.endsWith('.svg') or stat.name.endsWith('.jpg') or stat.name.endsWith('.jpeg') or stat.name.endsWith('.gif') or stat.name.endsWith('.webm') or stat.name.endsWith('.ogg') or stat.name.endsWith('.mpeg') or stat.name.endsWith('.mp3') or stat.name.endsWith('.wav') or stat.name.endsWith('.webm') or stat.name.endsWith('.mp4') or stat.name.endsWith('.ogg')
catalog.push (root.substring(root.indexOf("/documents") + 1)).replace('documents/', '') + '/' + stat.name
next()
else
next()
return
walker.on 'end', ->
documents = reorderFiles documents
for file in documents
autoImport.push file.path
autoImport.push '/build/App.js'
indexJS = Generate.indexJS(JSON.stringify(autoImport))
fs.writeFile dir + '/build/index.js', indexJS, (err) ->
return console.log(err) if err
fs.writeFile dir + '/build/catalog.js', Generate.catalog(catalog), (err) ->
return console.log(err) if err
return
buildProduction = (dir)->
dir = process.cwd() if not dir
#if dir is '.'
dirName = dir.split('/')
dirName = dirName[dirName.length-1]
files = []
folder = path.dirname(dir) + '/' + dirName
paths_to_remove = []
magixJSON = fs.readFileSync(folder + '/package.json', 'utf8')
config = JSON.parse(magixJSON)
if not config.name
console.log 'MagiX: [ERR] Invalid JSON project file, name missing.'.red
return
prodFolder = folder + "/../magix-#{config.name}-production"
# Generate scriptID
scriptID = makeID()
# fs.copy folder, prodFolder, (err) ->
# return console.error err if err
if not fs.existsSync(prodFolder)
console.log 'MagiX: Cloning working project..'.magenta
fs.mkdirSync prodFolder
# Clean build
if fs.existsSync(prodFolder + '/build')
deleteFolderRecursive prodFolder + '/build'
# Copy project folder to production dir
fs.copy folder, prodFolder, (err) ->
return console.error err if err
console.log ('MagiX: Production path: ' + prodFolder).green
fs.writeFile prodFolder + '/build/index.js', Generate.indexJS(JSON.stringify('/build/' + scriptID + '.js')), (err) ->
return console.log(err) if err
minify()
return
return
minify = ->
walker = walk.walk(prodFolder + '/documents', followLinks: false)
walker.on 'file', (root, stat, next) ->
name = stat.name.split('/')
name = name[name.length-1]
# REMOVE DS STORE ON MAC OS
if stat.name.endsWith('.DS_Store')
fs.unlinkSync root+'/'+stat.name
# PUSH PATHS TO REMOVE LATER ON..
if stat.name.endsWith('.coffee') or stat.name.endsWith('.js') or stat.name.endsWith('.css')
name = name.replace('.coffee', '').replace('.js', '').replace('.css', '')
if name isnt 'App'
paths_to_remove.push root+'/'+stat.name
# IF COFFEE, PREPARE FOR MINIFING
if stat.name.endsWith('.coffee')
name = name.replace('.coffee', '')
fs.readFile root + '/' + name + '.coffee', 'utf8', (err, data) ->
return console.log(err) if err
path = root.replace('documents', 'build') + '/' + stat.name.replace('coffee', 'js')
if data[0] isnt '!'
if path isnt prodFolder + '/build/App.js'
files.push path
next()
else
next()
else
uglified = uglify.minify([path]).code
fs.writeFile path, uglified, 'utf8', (err) ->
return console.log(err) if err
next()
else
next()
return
walker.on 'end', ->
files.push prodFolder + '/build/App.js'
# Minify files
uglified = '/* Made with MagiX (magixjs.com) and a smile :) */ ' + uglify.minify(files).code
console.log 'MagiX: Minify script..'.magenta
appPath = prodFolder + '/build/' + scriptID + '.js'
fs.writeFile appPath, uglified, (err) ->
console.log 'MagiX: Cleaning..'.magenta
cleaning ->
console.log "MagiX: Done: project built for production.".green
return
cleaning = (cb)->
# ADD BUILD FOLDER PATHS TO DOCUMENTS PATH
paths_to_remove = paths_to_remove.concat(files)
# CLEAN BUILD AND DOCUMENTS FOLDER
try
for item in paths_to_remove
if fs.existsSync(item)
fs.unlinkSync item
catch e
console.log e
cleanEmptyFoldersRecursively = (folder) ->
fs = require('fs')
path = require('path')
isDir = fs.statSync(folder).isDirectory()
if !isDir
return
files = fs.readdirSync(folder)
if files.length > 0
files.forEach (file) ->
fullPath = path.join(folder, file)
cleanEmptyFoldersRecursively fullPath
return
# re-evaluate files; after deleting subfolder
# we may have parent folder empty now
files = fs.readdirSync(folder)
if files.length == 0
console.log ('MagiX: removing: '+folder).magenta
fs.rmdirSync folder
return
return
cleanEmptyFoldersRecursively prodFolder + '/documents/'
cleanEmptyFoldersRecursively prodFolder + '/build/'
cb() if cb
##############################################################
# PROGRAMS
program
.command('about', {isDefault: yes})
.description('About magiX.')
.action about
program
.command('create [name]')
.description('Create a new project.')
.action create
program
.command('launch [dir] [port] [env]')
.description('Launch a local server to help you code an magix project.')
.action launch
# Maybe for later
###
program
.command('forever start [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStart
program
.command('forever stop [dir] [port]')
.description('Launch a local server that runs continuously.')
.action foreverStop
###
program
.command('build [dir] [env]')
.description('Build a project.')
.action build
program
.command('clean [dir]')
.description('Clear the build of a project.')
.action clean
program
.command('watch [dir]')
.description('Observe change on a project and compile on the fly.')
.action watch
# program
# .command('install [name] [dir]')
# .description('Add module to your project.')
# .action install
program.parse process.argv
|
[
{
"context": "sted output\n\n###\nmodule.exports = [\n { \n id: \"8b94d8444351171180fa01b24e302bc215f21474\",\n authorName: \"Bee Wilkerson\",\n # relative",
"end": 420,
"score": 0.5744317770004272,
"start": 380,
"tag": "KEY",
"value": "8b94d8444351171180fa01b24e302bc215f21474"
},
{
"context": "51171180fa01b24e302bc215f21474\",\n authorName: \"Bee Wilkerson\",\n # relativeDate will always be wrong, don't ",
"end": 453,
"score": 0.9997793436050415,
"start": 440,
"tag": "NAME",
"value": "Bee Wilkerson"
},
{
"context": "nesAdded: 1,\n linesDeleted: 1 \n },{ \n id: \"73a007fed9c0aa562a6acf6cfb7ae019d82f677d\",\n auth",
"end": 704,
"score": 0.6936323046684265,
"start": 703,
"tag": "KEY",
"value": "7"
},
{
"context": "esAdded: 1,\n linesDeleted: 1 \n },{ \n id: \"73a007fed9c0aa562a6acf6cfb7ae019d82f677d\",\n authorName: \"Bee Wilkerson\",\n authorDate",
"end": 743,
"score": 0.6177029609680176,
"start": 704,
"tag": "PASSWORD",
"value": "3a007fed9c0aa562a6acf6cfb7ae019d82f677d"
},
{
"context": "c0aa562a6acf6cfb7ae019d82f677d\",\n authorName: \"Bee Wilkerson\",\n authorDate: 1453041603,\n message: \"4th o",
"end": 776,
"score": 0.9998323321342468,
"start": 763,
"tag": "NAME",
"value": "Bee Wilkerson"
},
{
"context": "nesAdded: 1,\n linesDeleted: 3 \n },{ \n id: \"b275decc27a27fcfcf653d50c79d705e1cec0c20\",\n auth",
"end": 937,
"score": 0.572832465171814,
"start": 936,
"tag": "KEY",
"value": "b"
},
{
"context": "esAdded: 1,\n linesDeleted: 3 \n },{ \n id: \"b275decc27a27fcfcf653d50c79d705e1cec0c20\",\n authorNam",
"end": 942,
"score": 0.3802569508552551,
"start": 937,
"tag": "PASSWORD",
"value": "275de"
},
{
"context": ": 1,\n linesDeleted: 3 \n },{ \n id: \"b275decc27a27fcfcf653d50c79d705e1cec0c20\",\n authorName: \"Bee",
"end": 949,
"score": 0.37423592805862427,
"start": 944,
"tag": "PASSWORD",
"value": "27a27"
},
{
"context": " linesDeleted: 3 \n },{ \n id: \"b275decc27a27fcfcf653d50c79d705e1cec0c20\",\n authorName: \"Bee Wilkerson\",\n authorDate",
"end": 976,
"score": 0.42886024713516235,
"start": 952,
"tag": "PASSWORD",
"value": "cf653d50c79d705e1cec0c20"
},
{
"context": "a27fcfcf653d50c79d705e1cec0c20\",\n authorName: \"Bee Wilkerson\",\n authorDate: 1453041536,\n message: \"3rd o",
"end": 1009,
"score": 0.9998024106025696,
"start": 996,
"tag": "NAME",
"value": "Bee Wilkerson"
},
{
"context": "nesAdded: 2,\n linesDeleted: 0 \n },{ \n id: \"04f65dcf5b6d7da5bad9dcc6a9fba52acb3e548f\",\n authorName: \"Bee Wilkerson\",\n authorDate",
"end": 1209,
"score": 0.7559107542037964,
"start": 1169,
"tag": "KEY",
"value": "04f65dcf5b6d7da5bad9dcc6a9fba52acb3e548f"
},
{
"context": "6d7da5bad9dcc6a9fba52acb3e548f\",\n authorName: \"Bee Wilkerson\",\n authorDate: 1453041463,\n message: \"2nd o",
"end": 1242,
"score": 0.9998487234115601,
"start": 1229,
"tag": "NAME",
"value": "Bee Wilkerson"
},
{
"context": "nesAdded: 6,\n linesDeleted: 1 \n },{ \n id: \"404744f451bfbac84598c19cd20506af87b2060d\",\n authorName: \"Bee Wilkerson\",\n authorDate",
"end": 1442,
"score": 0.7374041676521301,
"start": 1402,
"tag": "KEY",
"value": "404744f451bfbac84598c19cd20506af87b2060d"
},
{
"context": "bfbac84598c19cd20506af87b2060d\",\n authorName: \"Bee Wilkerson\",\n authorDate: 1453041370,\n message: \"broug",
"end": 1475,
"score": 0.9998443722724915,
"start": 1462,
"tag": "NAME",
"value": "Bee Wilkerson"
}
] | packages/git-time-machine/node_modules/git-log-utils/test/data/expectedCommitsForFile.coffee | jarednipper/atom-config | 1 | ###
this is the expected data for the first five commits of test-data/fiveCommits.txt.
Only the keys in these objects are tested against the actual first five commits read
from git log
this should remain static, but if you need to redo, use bin/gitLogJson.coffee test/lib/fiveCommits.txt
and replace array below with pasted output
###
module.exports = [
{
id: "8b94d8444351171180fa01b24e302bc215f21474",
authorName: "Bee Wilkerson",
# relativeDate will always be wrong, don't test it
# relativeDate: "2 hours ago",
authorDate: 1453041651,
message: "5th of 5 commits test",
body: "",
hash: "8b94d84",
linesAdded: 1,
linesDeleted: 1
},{
id: "73a007fed9c0aa562a6acf6cfb7ae019d82f677d",
authorName: "Bee Wilkerson",
authorDate: 1453041603,
message: "4th of 5 commits test",
body: "",
hash: "73a007f",
linesAdded: 1,
linesDeleted: 3
},{
id: "b275decc27a27fcfcf653d50c79d705e1cec0c20",
authorName: "Bee Wilkerson",
authorDate: 1453041536,
message: "3rd of 5 commits test",
body: "",
hash: "b275dec",
linesAdded: 2,
linesDeleted: 0
},{
id: "04f65dcf5b6d7da5bad9dcc6a9fba52acb3e548f",
authorName: "Bee Wilkerson",
authorDate: 1453041463,
message: "2nd of 5 commits test",
body: "",
hash: "04f65dc",
linesAdded: 6,
linesDeleted: 1
},{
id: "404744f451bfbac84598c19cd20506af87b2060d",
authorName: "Bee Wilkerson",
authorDate: 1453041370,
message: "brought over git-utils and test from git-time-machine. also start of 5 commits test.",
body: "",
hash: "404744f",
linesAdded: 5,
linesDeleted: 0
}
] | 197977 | ###
this is the expected data for the first five commits of test-data/fiveCommits.txt.
Only the keys in these objects are tested against the actual first five commits read
from git log
this should remain static, but if you need to redo, use bin/gitLogJson.coffee test/lib/fiveCommits.txt
and replace array below with pasted output
###
module.exports = [
{
id: "<KEY>",
authorName: "<NAME>",
# relativeDate will always be wrong, don't test it
# relativeDate: "2 hours ago",
authorDate: 1453041651,
message: "5th of 5 commits test",
body: "",
hash: "8b94d84",
linesAdded: 1,
linesDeleted: 1
},{
id: "<KEY> <PASSWORD>",
authorName: "<NAME>",
authorDate: 1453041603,
message: "4th of 5 commits test",
body: "",
hash: "73a007f",
linesAdded: 1,
linesDeleted: 3
},{
id: "<KEY> <PASSWORD>cc<PASSWORD>fcf<PASSWORD>",
authorName: "<NAME>",
authorDate: 1453041536,
message: "3rd of 5 commits test",
body: "",
hash: "b275dec",
linesAdded: 2,
linesDeleted: 0
},{
id: "<KEY>",
authorName: "<NAME>",
authorDate: 1453041463,
message: "2nd of 5 commits test",
body: "",
hash: "04f65dc",
linesAdded: 6,
linesDeleted: 1
},{
id: "<KEY>",
authorName: "<NAME>",
authorDate: 1453041370,
message: "brought over git-utils and test from git-time-machine. also start of 5 commits test.",
body: "",
hash: "404744f",
linesAdded: 5,
linesDeleted: 0
}
] | true | ###
this is the expected data for the first five commits of test-data/fiveCommits.txt.
Only the keys in these objects are tested against the actual first five commits read
from git log
this should remain static, but if you need to redo, use bin/gitLogJson.coffee test/lib/fiveCommits.txt
and replace array below with pasted output
###
module.exports = [
{
id: "PI:KEY:<KEY>END_PI",
authorName: "PI:NAME:<NAME>END_PI",
# relativeDate will always be wrong, don't test it
# relativeDate: "2 hours ago",
authorDate: 1453041651,
message: "5th of 5 commits test",
body: "",
hash: "8b94d84",
linesAdded: 1,
linesDeleted: 1
},{
id: "PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI",
authorName: "PI:NAME:<NAME>END_PI",
authorDate: 1453041603,
message: "4th of 5 commits test",
body: "",
hash: "73a007f",
linesAdded: 1,
linesDeleted: 3
},{
id: "PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PIccPI:PASSWORD:<PASSWORD>END_PIfcfPI:PASSWORD:<PASSWORD>END_PI",
authorName: "PI:NAME:<NAME>END_PI",
authorDate: 1453041536,
message: "3rd of 5 commits test",
body: "",
hash: "b275dec",
linesAdded: 2,
linesDeleted: 0
},{
id: "PI:KEY:<KEY>END_PI",
authorName: "PI:NAME:<NAME>END_PI",
authorDate: 1453041463,
message: "2nd of 5 commits test",
body: "",
hash: "04f65dc",
linesAdded: 6,
linesDeleted: 1
},{
id: "PI:KEY:<KEY>END_PI",
authorName: "PI:NAME:<NAME>END_PI",
authorDate: 1453041370,
message: "brought over git-utils and test from git-time-machine. also start of 5 commits test.",
body: "",
hash: "404744f",
linesAdded: 5,
linesDeleted: 0
}
] |
[
{
"context": " <div>\n Server Password: <input type='password' id='password'/>\n </div>\n <div>\n ",
"end": 2030,
"score": 0.9960342645645142,
"start": 2022,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "div>\n Application Password: <input type='password' id='applicationPassword'/>\n </div>\n ",
"end": 2225,
"score": 0.995659589767456,
"start": 2217,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "UrlWithCredentials: =>\n username = Cookie.get(\"username\")\n password = Cookie.get(\"password\")\n \"#{Ta",
"end": 2626,
"score": 0.5220986604690552,
"start": 2618,
"tag": "USERNAME",
"value": "username"
},
{
"context": "rnamePassword: =>\n Cookie.set \"username\", @$('#username').val()\n Cookie.set \"password\", @$('#password'",
"end": 2847,
"score": 0.5382568836212158,
"start": 2839,
"tag": "USERNAME",
"value": "username"
},
{
"context": "render()\n\n login: =>\n @username = Cookie.get(\"username\")\n @password = Cookie.get(\"password\")\n\n unl",
"end": 3261,
"score": 0.963641881942749,
"start": 3253,
"tag": "USERNAME",
"value": "username"
},
{
"context": "ookie.get(\"username\")\n @password = Cookie.get(\"password\")\n\n unless @username and @password\n retur",
"end": 3300,
"score": 0.8466565012931824,
"start": 3292,
"tag": "PASSWORD",
"value": "password"
}
] | views/ServerView.coffee | Coconut-Data/tamarind | 0 | Backbone = require 'backbone'
Passphrase = require 'xkcd-passphrase'
crypto = require('crypto')
{ CognitoIdentityClient } = require("@aws-sdk/client-cognito-identity")
{ fromCognitoIdentityPool } = require("@aws-sdk/credential-provider-cognito-identity")
{DynamoDBClient,ScanCommand,PutItemCommand, CreateTableCommand, DescribeTableCommand} = require("@aws-sdk/client-dynamodb")
{ marshall, unmarshall } = require("@aws-sdk/util-dynamodb")
class ServerView extends Backbone.View
render: =>
await @login()
.catch =>
@renderLoginForm()
Promise.resolve()
.then (databaseList) =>
@$el.html "
<style>
li {
padding-top: 1em;
}
li a{
font-size: 1em;
}
</style>
<iframe class='help' style='display:none; float:right' width='420' height='315' src='https://www.youtube.com/embed/eMxO_U-i2Hc'></iframe>
<h1>Select a #{if @isDynamoDB then "Gateway" else "database"}:</h1>
#{
if @isDynamoDB
(for gateway in databaseList
"<li style='height:50px;'><a href='#gateway/#{Tamarind.serverName}/#{gateway}'>#{gateway}</a></li>"
).join("")
else
(for database in databaseList
continue unless _(database).isString()
continue if database.startsWith("_")
continue if database.match(/backup/)
continue if database.startsWith("plugin")
"<li><a href='#database/#{Tamarind.serverName}/#{database}'>#{database}</a></li>"
).join("")
}
<h1>or select a configured result:</h1>
"
$("#title").html "
<a href='#'>#{Tamarind.serverName}</a>
"
renderLoginForm: =>
@$el.html "
<h1>#{Tamarind.serverName}</h1>
<div style='margin-left:100px; margin-top:100px; id='usernamePassword'>
<div>
Server Username: <input id='username'/>
</div>
<div>
Server Password: <input type='password' id='password'/>
</div>
<div>
Application Username: <input id='applicationUsername'/>
</div>
<div>
Application Password: <input type='password' id='applicationPassword'/>
</div>
<button id='login'>Login</button>
</div>
"
events: =>
"click #login": "updateUsernamePassword"
"click #newDatabase": "newDatabase"
"click #newGateway": "newGateway"
"click #daily-button": "updateTasks"
"click #five-minutes-button": "updateTasks"
getServerUrlWithCredentials: =>
username = Cookie.get("username")
password = Cookie.get("password")
"#{Tamarind.knownDatabaseServers[Tamarind.serverName]}".replace(/:\/\//, "://#{username}:#{password}@")
updateUsernamePassword: =>
Cookie.set "username", @$('#username').val()
Cookie.set "password", @$('#password').val()
Cookie.set "applicationUsername", @$('#applicationUsername').val()
Cookie.set "applicationPassword", @$('#applicationPassword').val()
if Tamarind.targetUrl
targetUrl = Tamarind.targetUrl
Tamarind.targetUrl = null
return router.navigate targetUrl, trigger:true
@render()
login: =>
@username = Cookie.get("username")
@password = Cookie.get("password")
unless @username and @password
return Promise.reject()
@fetchDatabaseList()
fetchDatabaseList: =>
new Promise (resolve,reject) =>
if Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId? # DynamoDB
@isDynamoDB = true
region = Tamarind.knownDatabaseServers[Tamarind.serverName].region
identityPoolId = Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId
@dynamoDBClient = new DynamoDBClient(
region: region
credentials: fromCognitoIdentityPool(
client: new CognitoIdentityClient({region})
identityPoolId: identityPoolId
)
)
gatewayConfigurations = await @dynamoDBClient.send(
new ScanCommand(
TableName: "Configurations"
)
)
Tamarind.gateways = {}
for item in gatewayConfigurations.Items
unmarshalledItem = unmarshall(item)
Tamarind.gateways[unmarshalledItem.gatewayName] = unmarshalledItem
resolve(gatewayName for gatewayName,details of Tamarind.gateways)
else
@isDynamoDB = false
fetch "#{Tamarind.knownDatabaseServers[Tamarind.serverName]}/_all_dbs",
method: 'GET'
credentials: 'include'
headers:
'content-type': 'application/json'
authorization: "Basic #{btoa("#{@username}:#{@password}")}"
.catch (error) =>
reject(error)
.then (response) =>
if response.status is 401
reject(response.statusText)
else
result = await response.json()
resolve(result)
module.exports = ServerView
| 58919 | Backbone = require 'backbone'
Passphrase = require 'xkcd-passphrase'
crypto = require('crypto')
{ CognitoIdentityClient } = require("@aws-sdk/client-cognito-identity")
{ fromCognitoIdentityPool } = require("@aws-sdk/credential-provider-cognito-identity")
{DynamoDBClient,ScanCommand,PutItemCommand, CreateTableCommand, DescribeTableCommand} = require("@aws-sdk/client-dynamodb")
{ marshall, unmarshall } = require("@aws-sdk/util-dynamodb")
class ServerView extends Backbone.View
render: =>
await @login()
.catch =>
@renderLoginForm()
Promise.resolve()
.then (databaseList) =>
@$el.html "
<style>
li {
padding-top: 1em;
}
li a{
font-size: 1em;
}
</style>
<iframe class='help' style='display:none; float:right' width='420' height='315' src='https://www.youtube.com/embed/eMxO_U-i2Hc'></iframe>
<h1>Select a #{if @isDynamoDB then "Gateway" else "database"}:</h1>
#{
if @isDynamoDB
(for gateway in databaseList
"<li style='height:50px;'><a href='#gateway/#{Tamarind.serverName}/#{gateway}'>#{gateway}</a></li>"
).join("")
else
(for database in databaseList
continue unless _(database).isString()
continue if database.startsWith("_")
continue if database.match(/backup/)
continue if database.startsWith("plugin")
"<li><a href='#database/#{Tamarind.serverName}/#{database}'>#{database}</a></li>"
).join("")
}
<h1>or select a configured result:</h1>
"
$("#title").html "
<a href='#'>#{Tamarind.serverName}</a>
"
renderLoginForm: =>
@$el.html "
<h1>#{Tamarind.serverName}</h1>
<div style='margin-left:100px; margin-top:100px; id='usernamePassword'>
<div>
Server Username: <input id='username'/>
</div>
<div>
Server Password: <input type='<PASSWORD>' id='password'/>
</div>
<div>
Application Username: <input id='applicationUsername'/>
</div>
<div>
Application Password: <input type='<PASSWORD>' id='applicationPassword'/>
</div>
<button id='login'>Login</button>
</div>
"
events: =>
"click #login": "updateUsernamePassword"
"click #newDatabase": "newDatabase"
"click #newGateway": "newGateway"
"click #daily-button": "updateTasks"
"click #five-minutes-button": "updateTasks"
getServerUrlWithCredentials: =>
username = Cookie.get("username")
password = Cookie.get("password")
"#{Tamarind.knownDatabaseServers[Tamarind.serverName]}".replace(/:\/\//, "://#{username}:#{password}@")
updateUsernamePassword: =>
Cookie.set "username", @$('#username').val()
Cookie.set "password", @$('#password').val()
Cookie.set "applicationUsername", @$('#applicationUsername').val()
Cookie.set "applicationPassword", @$('#applicationPassword').val()
if Tamarind.targetUrl
targetUrl = Tamarind.targetUrl
Tamarind.targetUrl = null
return router.navigate targetUrl, trigger:true
@render()
login: =>
@username = Cookie.get("username")
@password = Cookie.get("<PASSWORD>")
unless @username and @password
return Promise.reject()
@fetchDatabaseList()
fetchDatabaseList: =>
new Promise (resolve,reject) =>
if Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId? # DynamoDB
@isDynamoDB = true
region = Tamarind.knownDatabaseServers[Tamarind.serverName].region
identityPoolId = Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId
@dynamoDBClient = new DynamoDBClient(
region: region
credentials: fromCognitoIdentityPool(
client: new CognitoIdentityClient({region})
identityPoolId: identityPoolId
)
)
gatewayConfigurations = await @dynamoDBClient.send(
new ScanCommand(
TableName: "Configurations"
)
)
Tamarind.gateways = {}
for item in gatewayConfigurations.Items
unmarshalledItem = unmarshall(item)
Tamarind.gateways[unmarshalledItem.gatewayName] = unmarshalledItem
resolve(gatewayName for gatewayName,details of Tamarind.gateways)
else
@isDynamoDB = false
fetch "#{Tamarind.knownDatabaseServers[Tamarind.serverName]}/_all_dbs",
method: 'GET'
credentials: 'include'
headers:
'content-type': 'application/json'
authorization: "Basic #{btoa("#{@username}:#{@password}")}"
.catch (error) =>
reject(error)
.then (response) =>
if response.status is 401
reject(response.statusText)
else
result = await response.json()
resolve(result)
module.exports = ServerView
| true | Backbone = require 'backbone'
Passphrase = require 'xkcd-passphrase'
crypto = require('crypto')
{ CognitoIdentityClient } = require("@aws-sdk/client-cognito-identity")
{ fromCognitoIdentityPool } = require("@aws-sdk/credential-provider-cognito-identity")
{DynamoDBClient,ScanCommand,PutItemCommand, CreateTableCommand, DescribeTableCommand} = require("@aws-sdk/client-dynamodb")
{ marshall, unmarshall } = require("@aws-sdk/util-dynamodb")
class ServerView extends Backbone.View
render: =>
await @login()
.catch =>
@renderLoginForm()
Promise.resolve()
.then (databaseList) =>
@$el.html "
<style>
li {
padding-top: 1em;
}
li a{
font-size: 1em;
}
</style>
<iframe class='help' style='display:none; float:right' width='420' height='315' src='https://www.youtube.com/embed/eMxO_U-i2Hc'></iframe>
<h1>Select a #{if @isDynamoDB then "Gateway" else "database"}:</h1>
#{
if @isDynamoDB
(for gateway in databaseList
"<li style='height:50px;'><a href='#gateway/#{Tamarind.serverName}/#{gateway}'>#{gateway}</a></li>"
).join("")
else
(for database in databaseList
continue unless _(database).isString()
continue if database.startsWith("_")
continue if database.match(/backup/)
continue if database.startsWith("plugin")
"<li><a href='#database/#{Tamarind.serverName}/#{database}'>#{database}</a></li>"
).join("")
}
<h1>or select a configured result:</h1>
"
$("#title").html "
<a href='#'>#{Tamarind.serverName}</a>
"
renderLoginForm: =>
@$el.html "
<h1>#{Tamarind.serverName}</h1>
<div style='margin-left:100px; margin-top:100px; id='usernamePassword'>
<div>
Server Username: <input id='username'/>
</div>
<div>
Server Password: <input type='PI:PASSWORD:<PASSWORD>END_PI' id='password'/>
</div>
<div>
Application Username: <input id='applicationUsername'/>
</div>
<div>
Application Password: <input type='PI:PASSWORD:<PASSWORD>END_PI' id='applicationPassword'/>
</div>
<button id='login'>Login</button>
</div>
"
events: =>
"click #login": "updateUsernamePassword"
"click #newDatabase": "newDatabase"
"click #newGateway": "newGateway"
"click #daily-button": "updateTasks"
"click #five-minutes-button": "updateTasks"
getServerUrlWithCredentials: =>
username = Cookie.get("username")
password = Cookie.get("password")
"#{Tamarind.knownDatabaseServers[Tamarind.serverName]}".replace(/:\/\//, "://#{username}:#{password}@")
updateUsernamePassword: =>
Cookie.set "username", @$('#username').val()
Cookie.set "password", @$('#password').val()
Cookie.set "applicationUsername", @$('#applicationUsername').val()
Cookie.set "applicationPassword", @$('#applicationPassword').val()
if Tamarind.targetUrl
targetUrl = Tamarind.targetUrl
Tamarind.targetUrl = null
return router.navigate targetUrl, trigger:true
@render()
login: =>
@username = Cookie.get("username")
@password = Cookie.get("PI:PASSWORD:<PASSWORD>END_PI")
unless @username and @password
return Promise.reject()
@fetchDatabaseList()
fetchDatabaseList: =>
new Promise (resolve,reject) =>
if Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId? # DynamoDB
@isDynamoDB = true
region = Tamarind.knownDatabaseServers[Tamarind.serverName].region
identityPoolId = Tamarind.knownDatabaseServers[Tamarind.serverName].IdentityPoolId
@dynamoDBClient = new DynamoDBClient(
region: region
credentials: fromCognitoIdentityPool(
client: new CognitoIdentityClient({region})
identityPoolId: identityPoolId
)
)
gatewayConfigurations = await @dynamoDBClient.send(
new ScanCommand(
TableName: "Configurations"
)
)
Tamarind.gateways = {}
for item in gatewayConfigurations.Items
unmarshalledItem = unmarshall(item)
Tamarind.gateways[unmarshalledItem.gatewayName] = unmarshalledItem
resolve(gatewayName for gatewayName,details of Tamarind.gateways)
else
@isDynamoDB = false
fetch "#{Tamarind.knownDatabaseServers[Tamarind.serverName]}/_all_dbs",
method: 'GET'
credentials: 'include'
headers:
'content-type': 'application/json'
authorization: "Basic #{btoa("#{@username}:#{@password}")}"
.catch (error) =>
reject(error)
.then (response) =>
if response.status is 401
reject(response.statusText)
else
result = await response.json()
resolve(result)
module.exports = ServerView
|
[
{
"context": "dynamic love to your reaver and wash duo\n# Author: Robbie Saunders http://eibbors.com/[/p/reavetard]\n# =============",
"end": 208,
"score": 0.9998704791069031,
"start": 193,
"tag": "NAME",
"value": "Robbie Saunders"
}
] | src/reavetard.coffee | eibbors/reavetard | 1 | # Reavetard - Reaver WPS (+Wash) extension scripts
# reavetard.coffee :: Module that brings all the little pieces together to make
# sweet, dynamic love to your reaver and wash duo
# Author: Robbie Saunders http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
rtard = require 'commander'
cli = require './cli'
db = require './db'
wash = require './wash'
{ReaverQueueManager} = require './reaver'
config = require './config'
# `Command` handlers - functions used to initiate/control reavetard
# ----------------------------------------------------------------------
# Spawns a wash survey (technically a scan, by default)
runWashScan = (iface, scan = true) =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
iface ?= config.DEFAULT_INTERFACE
w = new wash(config.WASH_DEFAULT_ARGS(iface, scan))
# Clean wash up in case we missed another chance to stop it
# These processes go absolutely bonkers with your resources once there's
# around three or more of the bastards left open. hah
process.on 'exit', =>
w.stop()
# After messing around with different options, this seemed simple & intuitive enough
cli._c.foreground(250)
rtard.prompt ' *** Press enter to move on to next step ***', =>
w.stop()
process.nextTick -> reviewTargets.apply(null, [stations])
# Handle access points found by our washing machine
w.on 'ap', (station) ->
rdb.checkSurvey station.bssid, (err, rows) ->
if not err and rows.length > 0
device =
name: rows[0].device_name
manufacturer: rows[0].manufacturer
model: rows[0].model_name
number: rows[0].model_number
for k,v of device
if v isnt ''
station.device = device
rdb.checkHistory station.bssid, (err, rows) ->
if err then console.error errd
if rows.length > 0
# Copy history data to station object
(station[k] ?= v) for k,v of rows[0]
# Load session data (if available)
station.session = rdb.loadSession station.bssid
# If history -or- session specifies completion
if station.attempts >= 11000 or station.session.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category
# Hopefully you won't be seeing any of these!
w.on 'error', ->
console.error "ERROR: #{arguments}"
# Let the mayhem begin
w.start()
cli.cwrite('magenta', ' Wash scan has been started, now waiting for AP data...\n\n')
# Function called when running the rdbq command. It pulls -every- available
# row out of the history/survey tables and any associated session files.
runRDBQuery = () =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
rdb.getHistory (err, history) ->
if err then throw err
rdb.getSurvey (err, survey) ->
if err then throw err
joined = {}
# Start by indexing the history data by bssid in new object, joined
for row in history
joined[row.bssid] = row
# Merge the survey data with history data or index the new station
for row in survey
joined[row.bssid] ?= {}
(joined[row.bssid][k] = v) for k,v of row
joined[row.bssid].locked = joined[row.bssid].locked is 1
# Finally, load session data, categorize the station, and print confirmation
for bssid, station of joined
station.session = rdb.loadSession(bssid)
if station.attempts?
if station.attempts is 11000 or station.session?.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category, true
rtard.prompt '\n (Press enter to continue to target review)', ->
process.nextTick -> reviewTargets.apply(null, [stations])
# Present ansi-formatted tables containing scan/query results, from which user can select from
reviewTargets = (stations, reprompt=false) ->
if not reprompt
cli.clear()
indexed = cli.targetReviewTable stations
cli.reviewSelectors()
cli._c.foreground(255)
else
console.log ' Your input did not include any valid selections, please try again...'
rtard.prompt ' Please enter the #/selector for each target that you wish to select: ', (input) =>
cli._c.down(1).erase('line')
selections = input.match(/-?\d+/g)
selected = []
for station in indexed
if '0' in selections or "#{station.tableIndex}" in selections
selected.push station
else
switch station.category
when 'C'
if '-1' in selections then selected.push station
when 'W'
if '-2' in selections then selected.push station
when 'N'
if '-3' in selections then selected.push station
cli.clear().title(true).cwrite(250, " You have selected the following #{selected.length} station(s): \n ")
isfirst = true
for sel in selected
if not isfirst then cli.cwrite(250, ', ')
else isfirst = false
cli.cwrite(cli.STATION_COLORS[sel.category ? 'N'], "#{sel.essid}")
cli.cdwrite('bright', 'blue', '\n\n What would you like reavetard to do with these station(s)?\n')
.cdwrite('reset', 245, ' -----------------------------------------------------------------------\n').targetActions()
cli._c.foreground('blue')
actionPrompt(selected)
# Prompt user for desired action, given a list of targets, and perform
# action or defer it to another function
actionPrompt = (selected) =>
rtard.prompt '\n Please enter one of the letters in ()\'s, or entire title, of your desired action: ', (choice) =>
switch choice
when 'a', 'attack'
process.nextTick -> startAttack.apply(null, [selected])
when 'j', 'json' # Output station data in JSON (useful when scripting reavetard yourself)
cli.cwrite 250, ' Would you like to include the session file key data?\n'
cli.cwrite 'yellow', ' Doing so can add 11k partial pins for every station with session data!\n'
cli._c.foreground 250
rtard.confirm " What'll it be, champ? (y)ay or (n)ay: ", (showPins) ->
if not showPins
for s in selected when s.session?
s.session.Pins = 'removed'
console.log JSON.stringify selected
process.exit()
when 'u', 'usage'
for s in selected
config.REAVER_DEFAULT_ARGS
console.log "reaver -i mon0 -b #{s.bssid} #{if s.channel then '-c ' + s.channel} -vv -a -N -S"
process.exit()
when 'x', 'exit'
console.log 'goodbye.'
process.exit()
else
console.log 'You didn\'t enter one of the available letters/words. Try again.\n'
process.nextTick -> actionPrompt selected
startAttack = (selected) ->
atkQueue = new ReaverQueueManager(selected, rtard.interface)
atkQueue.on 'stopped', (reason) =>
if reason is 'paused'
rtard.prompt ' *** Reavetard is paused, press enter to resume ***', ->
process.nextTick atkQueue.start
else if reason isnt 'killed'
if atkQueue.priority?.length? >= 1 or atkQueue.secondary?.length? >= 1
process.nextTick atkQueue.start
else
attackReview atkQueue.finished
attackPrompt = =>
cli._c.display('hidden')
rtard.prompt ': ', (cmd) ->
switch cmd
when 'h', 'help'
cli.attackCommands()
when 'n', 'next'
atkQueue.stop('skipped')
when 'p', 'pause'
atkQueue.stop('paused')
when 'x', 'exit'
atkQueue.stop('killed')
process.exit()
else
process.nextTick attackPrompt
cli._c.display('reset').attackPrompt
atkQueue.start()
process.on 'exit', =>
atkQueue.stop('killed')
pinterval = setInterval (=>
if atkQueue.reaver? and atkQueue.active?
if atkQueue.prevHealth?
[pact, pstat, pmets] = atkQueue.prevHealth
if pact.bssid is atkQueue.active.bssid
if not pstat.associated and not atkQueue.reaver.status.associated then atkQueue.stop('idle')
if pmets.totalChecked is atkQueue.reaver.metrics.totalChecked then atkQueue.stop('idle')
# store a snapshot of the current attack queue for next time around
atkQueue.prevHealth = [ atkQueue.active, atkQueue.reaver.status, atkQueue.reaver.metrics ]
else if atkQueue.priority.length is 0 and atkQueue.secondary.length is 0
attackReview atkQueue.finished
clearInterval()), config.HEALTH_CHECK_INTERVAL
# For now just prints out the JSON.stringified array of stations
attackReview = (fin) ->
for s in fin
if s.session? then s.session.Pins = 'removed'
if s.success then cli.cwrite 'green', JSON.stringify(s)
else cli.cwrite 'red', JSON.stringify(s)
process.exit()
# Commander.js stuff to support CLI commands & parse options
# ----------------------------------------------------------------------
parseOptions = (args) ->
rtard # Universal configuration / options
.version('0.1.0')
.option('-i, --interface <iface>', "Choose WLAN interface [#{config.DEFAULT_INTERFACE}]", config.DEFAULT_INTERFACE)
.option('-r, --reaver-path <path>', "Set path to your reaver.db and session files [#{config.REAVER_DEFAULT_PATH}]", config.REAVER_DEFAULT_PATH)
.option('-d, --rdb-file <filename>', "Set the filename of your reaver database [#{config.REAVER_DEFAULT_DBFILE}]", config.REAVER_DEFAULT_DBFILE)
.option('-D, --no-rdb-access', 'Do not attempt to access reaver\'s database')
rtard # Comman" definition for `scan`
.command('scan [silent]')
.description('Spawn a new wash process to generate a list of nearby targets')
.action (silent=false) ->
cli.clear().title().cwrite('blue', ' Scan command chosen. Initializing wash survey...\n')
if silent
cli.cwrite 170, ' SILENT MODE ENABLED - Will not send probes to access points\n'
scan = false
else scan = true
process.nextTick ->
runWashScan(rtard.interface ? undefined, scan)
rtard # Command definition for `rdbq` (reaver database query)
.command('rdbq')
.description('Pull all past targets from database and/or session files')
.action ->
if not rtard.rdbAccess then throw('Cannot query a database without accessing it! Try removing -D option')
cli.clear().title().cwrite('blue', ' Reaver DB Query command chosen, Initializing query... \n\n')
process.nextTick runRDBQuery
rtard # Command definition for `crack`
.command('attack <bssid1,b2,b3...>')
.description('Initiate reaver cracking session on one or more targets')
.action (bssids) ->
cli.clear().title().cwrite('blue', ' Attack command chosen. The following BSSIDs were provided: \n')
stations = { bssid } for bssid in bssids.split(',')
if not Array.isArray stations then stations = [stations]
cli.cwrite(250, " #{JSON.stringify(stations)}\n").cwrite('magenta', " Initializing new attack queue on #{rtard.interface}...\n")
process.nextTick ->
startAttack(stations)
# execute user's custom setup function before parsing arguments:
if typeof config.CUSTOM_SETUP is 'function'
config.CUSTOM_SETUP (okay, err) ->
if okay then rtard.parse args
else throw err
else
# assume user does not want to run extra setup procedure and parse args
rtard.parse args
# If this is the main module, then parse options, otherwise export commands
if module is require.main then parseOptions(process.argv)
else module.exports = { parseOptions, startAttack, reviewTargets, runRDBQuery, runWashScan } | 192680 | # Reavetard - Reaver WPS (+Wash) extension scripts
# reavetard.coffee :: Module that brings all the little pieces together to make
# sweet, dynamic love to your reaver and wash duo
# Author: <NAME> http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
rtard = require 'commander'
cli = require './cli'
db = require './db'
wash = require './wash'
{ReaverQueueManager} = require './reaver'
config = require './config'
# `Command` handlers - functions used to initiate/control reavetard
# ----------------------------------------------------------------------
# Spawns a wash survey (technically a scan, by default)
runWashScan = (iface, scan = true) =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
iface ?= config.DEFAULT_INTERFACE
w = new wash(config.WASH_DEFAULT_ARGS(iface, scan))
# Clean wash up in case we missed another chance to stop it
# These processes go absolutely bonkers with your resources once there's
# around three or more of the bastards left open. hah
process.on 'exit', =>
w.stop()
# After messing around with different options, this seemed simple & intuitive enough
cli._c.foreground(250)
rtard.prompt ' *** Press enter to move on to next step ***', =>
w.stop()
process.nextTick -> reviewTargets.apply(null, [stations])
# Handle access points found by our washing machine
w.on 'ap', (station) ->
rdb.checkSurvey station.bssid, (err, rows) ->
if not err and rows.length > 0
device =
name: rows[0].device_name
manufacturer: rows[0].manufacturer
model: rows[0].model_name
number: rows[0].model_number
for k,v of device
if v isnt ''
station.device = device
rdb.checkHistory station.bssid, (err, rows) ->
if err then console.error errd
if rows.length > 0
# Copy history data to station object
(station[k] ?= v) for k,v of rows[0]
# Load session data (if available)
station.session = rdb.loadSession station.bssid
# If history -or- session specifies completion
if station.attempts >= 11000 or station.session.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category
# Hopefully you won't be seeing any of these!
w.on 'error', ->
console.error "ERROR: #{arguments}"
# Let the mayhem begin
w.start()
cli.cwrite('magenta', ' Wash scan has been started, now waiting for AP data...\n\n')
# Function called when running the rdbq command. It pulls -every- available
# row out of the history/survey tables and any associated session files.
runRDBQuery = () =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
rdb.getHistory (err, history) ->
if err then throw err
rdb.getSurvey (err, survey) ->
if err then throw err
joined = {}
# Start by indexing the history data by bssid in new object, joined
for row in history
joined[row.bssid] = row
# Merge the survey data with history data or index the new station
for row in survey
joined[row.bssid] ?= {}
(joined[row.bssid][k] = v) for k,v of row
joined[row.bssid].locked = joined[row.bssid].locked is 1
# Finally, load session data, categorize the station, and print confirmation
for bssid, station of joined
station.session = rdb.loadSession(bssid)
if station.attempts?
if station.attempts is 11000 or station.session?.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category, true
rtard.prompt '\n (Press enter to continue to target review)', ->
process.nextTick -> reviewTargets.apply(null, [stations])
# Present ansi-formatted tables containing scan/query results, from which user can select from
reviewTargets = (stations, reprompt=false) ->
if not reprompt
cli.clear()
indexed = cli.targetReviewTable stations
cli.reviewSelectors()
cli._c.foreground(255)
else
console.log ' Your input did not include any valid selections, please try again...'
rtard.prompt ' Please enter the #/selector for each target that you wish to select: ', (input) =>
cli._c.down(1).erase('line')
selections = input.match(/-?\d+/g)
selected = []
for station in indexed
if '0' in selections or "#{station.tableIndex}" in selections
selected.push station
else
switch station.category
when 'C'
if '-1' in selections then selected.push station
when 'W'
if '-2' in selections then selected.push station
when 'N'
if '-3' in selections then selected.push station
cli.clear().title(true).cwrite(250, " You have selected the following #{selected.length} station(s): \n ")
isfirst = true
for sel in selected
if not isfirst then cli.cwrite(250, ', ')
else isfirst = false
cli.cwrite(cli.STATION_COLORS[sel.category ? 'N'], "#{sel.essid}")
cli.cdwrite('bright', 'blue', '\n\n What would you like reavetard to do with these station(s)?\n')
.cdwrite('reset', 245, ' -----------------------------------------------------------------------\n').targetActions()
cli._c.foreground('blue')
actionPrompt(selected)
# Prompt user for desired action, given a list of targets, and perform
# action or defer it to another function
actionPrompt = (selected) =>
rtard.prompt '\n Please enter one of the letters in ()\'s, or entire title, of your desired action: ', (choice) =>
switch choice
when 'a', 'attack'
process.nextTick -> startAttack.apply(null, [selected])
when 'j', 'json' # Output station data in JSON (useful when scripting reavetard yourself)
cli.cwrite 250, ' Would you like to include the session file key data?\n'
cli.cwrite 'yellow', ' Doing so can add 11k partial pins for every station with session data!\n'
cli._c.foreground 250
rtard.confirm " What'll it be, champ? (y)ay or (n)ay: ", (showPins) ->
if not showPins
for s in selected when s.session?
s.session.Pins = 'removed'
console.log JSON.stringify selected
process.exit()
when 'u', 'usage'
for s in selected
config.REAVER_DEFAULT_ARGS
console.log "reaver -i mon0 -b #{s.bssid} #{if s.channel then '-c ' + s.channel} -vv -a -N -S"
process.exit()
when 'x', 'exit'
console.log 'goodbye.'
process.exit()
else
console.log 'You didn\'t enter one of the available letters/words. Try again.\n'
process.nextTick -> actionPrompt selected
startAttack = (selected) ->
atkQueue = new ReaverQueueManager(selected, rtard.interface)
atkQueue.on 'stopped', (reason) =>
if reason is 'paused'
rtard.prompt ' *** Reavetard is paused, press enter to resume ***', ->
process.nextTick atkQueue.start
else if reason isnt 'killed'
if atkQueue.priority?.length? >= 1 or atkQueue.secondary?.length? >= 1
process.nextTick atkQueue.start
else
attackReview atkQueue.finished
attackPrompt = =>
cli._c.display('hidden')
rtard.prompt ': ', (cmd) ->
switch cmd
when 'h', 'help'
cli.attackCommands()
when 'n', 'next'
atkQueue.stop('skipped')
when 'p', 'pause'
atkQueue.stop('paused')
when 'x', 'exit'
atkQueue.stop('killed')
process.exit()
else
process.nextTick attackPrompt
cli._c.display('reset').attackPrompt
atkQueue.start()
process.on 'exit', =>
atkQueue.stop('killed')
pinterval = setInterval (=>
if atkQueue.reaver? and atkQueue.active?
if atkQueue.prevHealth?
[pact, pstat, pmets] = atkQueue.prevHealth
if pact.bssid is atkQueue.active.bssid
if not pstat.associated and not atkQueue.reaver.status.associated then atkQueue.stop('idle')
if pmets.totalChecked is atkQueue.reaver.metrics.totalChecked then atkQueue.stop('idle')
# store a snapshot of the current attack queue for next time around
atkQueue.prevHealth = [ atkQueue.active, atkQueue.reaver.status, atkQueue.reaver.metrics ]
else if atkQueue.priority.length is 0 and atkQueue.secondary.length is 0
attackReview atkQueue.finished
clearInterval()), config.HEALTH_CHECK_INTERVAL
# For now just prints out the JSON.stringified array of stations
attackReview = (fin) ->
for s in fin
if s.session? then s.session.Pins = 'removed'
if s.success then cli.cwrite 'green', JSON.stringify(s)
else cli.cwrite 'red', JSON.stringify(s)
process.exit()
# Commander.js stuff to support CLI commands & parse options
# ----------------------------------------------------------------------
parseOptions = (args) ->
rtard # Universal configuration / options
.version('0.1.0')
.option('-i, --interface <iface>', "Choose WLAN interface [#{config.DEFAULT_INTERFACE}]", config.DEFAULT_INTERFACE)
.option('-r, --reaver-path <path>', "Set path to your reaver.db and session files [#{config.REAVER_DEFAULT_PATH}]", config.REAVER_DEFAULT_PATH)
.option('-d, --rdb-file <filename>', "Set the filename of your reaver database [#{config.REAVER_DEFAULT_DBFILE}]", config.REAVER_DEFAULT_DBFILE)
.option('-D, --no-rdb-access', 'Do not attempt to access reaver\'s database')
rtard # Comman" definition for `scan`
.command('scan [silent]')
.description('Spawn a new wash process to generate a list of nearby targets')
.action (silent=false) ->
cli.clear().title().cwrite('blue', ' Scan command chosen. Initializing wash survey...\n')
if silent
cli.cwrite 170, ' SILENT MODE ENABLED - Will not send probes to access points\n'
scan = false
else scan = true
process.nextTick ->
runWashScan(rtard.interface ? undefined, scan)
rtard # Command definition for `rdbq` (reaver database query)
.command('rdbq')
.description('Pull all past targets from database and/or session files')
.action ->
if not rtard.rdbAccess then throw('Cannot query a database without accessing it! Try removing -D option')
cli.clear().title().cwrite('blue', ' Reaver DB Query command chosen, Initializing query... \n\n')
process.nextTick runRDBQuery
rtard # Command definition for `crack`
.command('attack <bssid1,b2,b3...>')
.description('Initiate reaver cracking session on one or more targets')
.action (bssids) ->
cli.clear().title().cwrite('blue', ' Attack command chosen. The following BSSIDs were provided: \n')
stations = { bssid } for bssid in bssids.split(',')
if not Array.isArray stations then stations = [stations]
cli.cwrite(250, " #{JSON.stringify(stations)}\n").cwrite('magenta', " Initializing new attack queue on #{rtard.interface}...\n")
process.nextTick ->
startAttack(stations)
# execute user's custom setup function before parsing arguments:
if typeof config.CUSTOM_SETUP is 'function'
config.CUSTOM_SETUP (okay, err) ->
if okay then rtard.parse args
else throw err
else
# assume user does not want to run extra setup procedure and parse args
rtard.parse args
# If this is the main module, then parse options, otherwise export commands
if module is require.main then parseOptions(process.argv)
else module.exports = { parseOptions, startAttack, reviewTargets, runRDBQuery, runWashScan } | true | # Reavetard - Reaver WPS (+Wash) extension scripts
# reavetard.coffee :: Module that brings all the little pieces together to make
# sweet, dynamic love to your reaver and wash duo
# Author: PI:NAME:<NAME>END_PI http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
rtard = require 'commander'
cli = require './cli'
db = require './db'
wash = require './wash'
{ReaverQueueManager} = require './reaver'
config = require './config'
# `Command` handlers - functions used to initiate/control reavetard
# ----------------------------------------------------------------------
# Spawns a wash survey (technically a scan, by default)
runWashScan = (iface, scan = true) =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
iface ?= config.DEFAULT_INTERFACE
w = new wash(config.WASH_DEFAULT_ARGS(iface, scan))
# Clean wash up in case we missed another chance to stop it
# These processes go absolutely bonkers with your resources once there's
# around three or more of the bastards left open. hah
process.on 'exit', =>
w.stop()
# After messing around with different options, this seemed simple & intuitive enough
cli._c.foreground(250)
rtard.prompt ' *** Press enter to move on to next step ***', =>
w.stop()
process.nextTick -> reviewTargets.apply(null, [stations])
# Handle access points found by our washing machine
w.on 'ap', (station) ->
rdb.checkSurvey station.bssid, (err, rows) ->
if not err and rows.length > 0
device =
name: rows[0].device_name
manufacturer: rows[0].manufacturer
model: rows[0].model_name
number: rows[0].model_number
for k,v of device
if v isnt ''
station.device = device
rdb.checkHistory station.bssid, (err, rows) ->
if err then console.error errd
if rows.length > 0
# Copy history data to station object
(station[k] ?= v) for k,v of rows[0]
# Load session data (if available)
station.session = rdb.loadSession station.bssid
# If history -or- session specifies completion
if station.attempts >= 11000 or station.session.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category
# Hopefully you won't be seeing any of these!
w.on 'error', ->
console.error "ERROR: #{arguments}"
# Let the mayhem begin
w.start()
cli.cwrite('magenta', ' Wash scan has been started, now waiting for AP data...\n\n')
# Function called when running the rdbq command. It pulls -every- available
# row out of the history/survey tables and any associated session files.
runRDBQuery = () =>
stations = # Used to categorize results for later review
complete: []
inProgress: []
noHistory: []
rdb = new db(rtard.reaverPath ? config.REAVER_DEFAULT_PATH, rtard.rdbFile ? config.REAVER_DEFAULT_DBFILE)
rdb.getHistory (err, history) ->
if err then throw err
rdb.getSurvey (err, survey) ->
if err then throw err
joined = {}
# Start by indexing the history data by bssid in new object, joined
for row in history
joined[row.bssid] = row
# Merge the survey data with history data or index the new station
for row in survey
joined[row.bssid] ?= {}
(joined[row.bssid][k] = v) for k,v of row
joined[row.bssid].locked = joined[row.bssid].locked is 1
# Finally, load session data, categorize the station, and print confirmation
for bssid, station of joined
station.session = rdb.loadSession(bssid)
if station.attempts?
if station.attempts is 11000 or station.session?.phase is 2
stations.complete.push station
station.category = 'C'
else
stations.inProgress.push station
station.category = 'W'
else
stations.noHistory.push station
station.category = 'N'
cli.washHit station, station.category, true
rtard.prompt '\n (Press enter to continue to target review)', ->
process.nextTick -> reviewTargets.apply(null, [stations])
# Present ansi-formatted tables containing scan/query results, from which user can select from
reviewTargets = (stations, reprompt=false) ->
if not reprompt
cli.clear()
indexed = cli.targetReviewTable stations
cli.reviewSelectors()
cli._c.foreground(255)
else
console.log ' Your input did not include any valid selections, please try again...'
rtard.prompt ' Please enter the #/selector for each target that you wish to select: ', (input) =>
cli._c.down(1).erase('line')
selections = input.match(/-?\d+/g)
selected = []
for station in indexed
if '0' in selections or "#{station.tableIndex}" in selections
selected.push station
else
switch station.category
when 'C'
if '-1' in selections then selected.push station
when 'W'
if '-2' in selections then selected.push station
when 'N'
if '-3' in selections then selected.push station
cli.clear().title(true).cwrite(250, " You have selected the following #{selected.length} station(s): \n ")
isfirst = true
for sel in selected
if not isfirst then cli.cwrite(250, ', ')
else isfirst = false
cli.cwrite(cli.STATION_COLORS[sel.category ? 'N'], "#{sel.essid}")
cli.cdwrite('bright', 'blue', '\n\n What would you like reavetard to do with these station(s)?\n')
.cdwrite('reset', 245, ' -----------------------------------------------------------------------\n').targetActions()
cli._c.foreground('blue')
actionPrompt(selected)
# Prompt user for desired action, given a list of targets, and perform
# action or defer it to another function
actionPrompt = (selected) =>
rtard.prompt '\n Please enter one of the letters in ()\'s, or entire title, of your desired action: ', (choice) =>
switch choice
when 'a', 'attack'
process.nextTick -> startAttack.apply(null, [selected])
when 'j', 'json' # Output station data in JSON (useful when scripting reavetard yourself)
cli.cwrite 250, ' Would you like to include the session file key data?\n'
cli.cwrite 'yellow', ' Doing so can add 11k partial pins for every station with session data!\n'
cli._c.foreground 250
rtard.confirm " What'll it be, champ? (y)ay or (n)ay: ", (showPins) ->
if not showPins
for s in selected when s.session?
s.session.Pins = 'removed'
console.log JSON.stringify selected
process.exit()
when 'u', 'usage'
for s in selected
config.REAVER_DEFAULT_ARGS
console.log "reaver -i mon0 -b #{s.bssid} #{if s.channel then '-c ' + s.channel} -vv -a -N -S"
process.exit()
when 'x', 'exit'
console.log 'goodbye.'
process.exit()
else
console.log 'You didn\'t enter one of the available letters/words. Try again.\n'
process.nextTick -> actionPrompt selected
startAttack = (selected) ->
atkQueue = new ReaverQueueManager(selected, rtard.interface)
atkQueue.on 'stopped', (reason) =>
if reason is 'paused'
rtard.prompt ' *** Reavetard is paused, press enter to resume ***', ->
process.nextTick atkQueue.start
else if reason isnt 'killed'
if atkQueue.priority?.length? >= 1 or atkQueue.secondary?.length? >= 1
process.nextTick atkQueue.start
else
attackReview atkQueue.finished
attackPrompt = =>
cli._c.display('hidden')
rtard.prompt ': ', (cmd) ->
switch cmd
when 'h', 'help'
cli.attackCommands()
when 'n', 'next'
atkQueue.stop('skipped')
when 'p', 'pause'
atkQueue.stop('paused')
when 'x', 'exit'
atkQueue.stop('killed')
process.exit()
else
process.nextTick attackPrompt
cli._c.display('reset').attackPrompt
atkQueue.start()
process.on 'exit', =>
atkQueue.stop('killed')
pinterval = setInterval (=>
if atkQueue.reaver? and atkQueue.active?
if atkQueue.prevHealth?
[pact, pstat, pmets] = atkQueue.prevHealth
if pact.bssid is atkQueue.active.bssid
if not pstat.associated and not atkQueue.reaver.status.associated then atkQueue.stop('idle')
if pmets.totalChecked is atkQueue.reaver.metrics.totalChecked then atkQueue.stop('idle')
# store a snapshot of the current attack queue for next time around
atkQueue.prevHealth = [ atkQueue.active, atkQueue.reaver.status, atkQueue.reaver.metrics ]
else if atkQueue.priority.length is 0 and atkQueue.secondary.length is 0
attackReview atkQueue.finished
clearInterval()), config.HEALTH_CHECK_INTERVAL
# For now just prints out the JSON.stringified array of stations
attackReview = (fin) ->
for s in fin
if s.session? then s.session.Pins = 'removed'
if s.success then cli.cwrite 'green', JSON.stringify(s)
else cli.cwrite 'red', JSON.stringify(s)
process.exit()
# Commander.js stuff to support CLI commands & parse options
# ----------------------------------------------------------------------
parseOptions = (args) ->
rtard # Universal configuration / options
.version('0.1.0')
.option('-i, --interface <iface>', "Choose WLAN interface [#{config.DEFAULT_INTERFACE}]", config.DEFAULT_INTERFACE)
.option('-r, --reaver-path <path>', "Set path to your reaver.db and session files [#{config.REAVER_DEFAULT_PATH}]", config.REAVER_DEFAULT_PATH)
.option('-d, --rdb-file <filename>', "Set the filename of your reaver database [#{config.REAVER_DEFAULT_DBFILE}]", config.REAVER_DEFAULT_DBFILE)
.option('-D, --no-rdb-access', 'Do not attempt to access reaver\'s database')
rtard # Comman" definition for `scan`
.command('scan [silent]')
.description('Spawn a new wash process to generate a list of nearby targets')
.action (silent=false) ->
cli.clear().title().cwrite('blue', ' Scan command chosen. Initializing wash survey...\n')
if silent
cli.cwrite 170, ' SILENT MODE ENABLED - Will not send probes to access points\n'
scan = false
else scan = true
process.nextTick ->
runWashScan(rtard.interface ? undefined, scan)
rtard # Command definition for `rdbq` (reaver database query)
.command('rdbq')
.description('Pull all past targets from database and/or session files')
.action ->
if not rtard.rdbAccess then throw('Cannot query a database without accessing it! Try removing -D option')
cli.clear().title().cwrite('blue', ' Reaver DB Query command chosen, Initializing query... \n\n')
process.nextTick runRDBQuery
rtard # Command definition for `crack`
.command('attack <bssid1,b2,b3...>')
.description('Initiate reaver cracking session on one or more targets')
.action (bssids) ->
cli.clear().title().cwrite('blue', ' Attack command chosen. The following BSSIDs were provided: \n')
stations = { bssid } for bssid in bssids.split(',')
if not Array.isArray stations then stations = [stations]
cli.cwrite(250, " #{JSON.stringify(stations)}\n").cwrite('magenta', " Initializing new attack queue on #{rtard.interface}...\n")
process.nextTick ->
startAttack(stations)
# execute user's custom setup function before parsing arguments:
if typeof config.CUSTOM_SETUP is 'function'
config.CUSTOM_SETUP (okay, err) ->
if okay then rtard.parse args
else throw err
else
# assume user does not want to run extra setup procedure and parse args
rtard.parse args
# If this is the main module, then parse options, otherwise export commands
if module is require.main then parseOptions(process.argv)
else module.exports = { parseOptions, startAttack, reviewTargets, runRDBQuery, runWashScan } |
[
{
"context": "name = process.env.HUBOT_NS_API_EMAIL\n password = process.env.HUBOT_NS_API_PASSWORD\n auth = \"Basic \" + new Buff",
"end": 1603,
"score": 0.7538696527481079,
"start": 1592,
"tag": "PASSWORD",
"value": "process.env"
},
{
"context": "s.env.HUBOT_NS_API_EMAIL\n password = process.env.HUBOT_NS_API_PASSWORD\n auth = \"Basic \" + new Buffer(us",
"end": 1609,
"score": 0.6933481693267822,
"start": 1604,
"tag": "PASSWORD",
"value": "HUBOT"
}
] | src/scripts/train.coffee | mcollina/hubot-scripts | 1 | # A way to interact with the NS (Dutch Railways) API
#
# To configure, add HUBOT_NS_API_EMAIL and HUBOT_NS_API_PASSWORD to your Heroku config with "heroku config:add"
#
# hubot train disruptions <station> - Retrieve the list of disruptions near <station>.
# Please note: <station> can be a station code (e.g. 'asd')
# or (part of) a station name (e.g. 'Amsterdam Centraal')
#
xml2js = require 'xml2js'
disruptionApiUrl = 'http://webservices.ns.nl/ns-api-storingen'
disruptionPageRoot = 'http://www.ns.nl/storingen/index.form#'
module.exports = (robot) ->
robot.respond /train disruptions (.*)/i, (msg) ->
station = msg.match[1]
station.replace(/^\s+|\s+$/g, "")
findDisruptions msg, station, (list) ->
if list.Ongepland == undefined || list.Gepland == undefined
msg.send "Sorry, that didn't work. Perhaps the NS API is down or your credentials are wrong?"
return
#
# Unplanned disruptions
#
if list.Ongepland[0].Storing == undefined
msg.send "There are no unplanned disruptions around '#{station}'"
else
sendDisruptions list.Ongepland[0].Storing, msg, false
#
# Planned disruptions
#
if list.Gepland[0].Storing == undefined
msg.send "There are no planned maintenance disruptions around '#{station}'"
else
sendDisruptions list.Gepland[0].Storing, msg, true
findDisruptions = (msg, station, callback) ->
url = disruptionApiUrl
username = process.env.HUBOT_NS_API_EMAIL
password = process.env.HUBOT_NS_API_PASSWORD
auth = "Basic " + new Buffer(username + ':' + password).toString('base64')
parser = new xml2js.Parser({explicitArray: true})
msg.http(url)
.header('Authorization', auth)
.query(station: station, actual: false, unplanned: false)
.get() (err, res, body) ->
parser.parseString body, (err, result) ->
callback result
sendDisruptions = (disruptions, msg, planned) ->
for disruption in disruptions
if planned
type = ''
urlInfix = 'werkzaamheden-'
else
type = ':warning:'
urlInfix = ''
output = [
type,
disruption.Traject[0],
"(#{disruption.Reden[0]}).",
"More info: #{disruptionPageRoot}#{urlInfix}#{disruption.id[0]}"
]
msg.send output.join(' ')
| 147899 | # A way to interact with the NS (Dutch Railways) API
#
# To configure, add HUBOT_NS_API_EMAIL and HUBOT_NS_API_PASSWORD to your Heroku config with "heroku config:add"
#
# hubot train disruptions <station> - Retrieve the list of disruptions near <station>.
# Please note: <station> can be a station code (e.g. 'asd')
# or (part of) a station name (e.g. 'Amsterdam Centraal')
#
xml2js = require 'xml2js'
disruptionApiUrl = 'http://webservices.ns.nl/ns-api-storingen'
disruptionPageRoot = 'http://www.ns.nl/storingen/index.form#'
module.exports = (robot) ->
robot.respond /train disruptions (.*)/i, (msg) ->
station = msg.match[1]
station.replace(/^\s+|\s+$/g, "")
findDisruptions msg, station, (list) ->
if list.Ongepland == undefined || list.Gepland == undefined
msg.send "Sorry, that didn't work. Perhaps the NS API is down or your credentials are wrong?"
return
#
# Unplanned disruptions
#
if list.Ongepland[0].Storing == undefined
msg.send "There are no unplanned disruptions around '#{station}'"
else
sendDisruptions list.Ongepland[0].Storing, msg, false
#
# Planned disruptions
#
if list.Gepland[0].Storing == undefined
msg.send "There are no planned maintenance disruptions around '#{station}'"
else
sendDisruptions list.Gepland[0].Storing, msg, true
findDisruptions = (msg, station, callback) ->
url = disruptionApiUrl
username = process.env.HUBOT_NS_API_EMAIL
password = <PASSWORD>.<PASSWORD>_NS_API_PASSWORD
auth = "Basic " + new Buffer(username + ':' + password).toString('base64')
parser = new xml2js.Parser({explicitArray: true})
msg.http(url)
.header('Authorization', auth)
.query(station: station, actual: false, unplanned: false)
.get() (err, res, body) ->
parser.parseString body, (err, result) ->
callback result
sendDisruptions = (disruptions, msg, planned) ->
for disruption in disruptions
if planned
type = ''
urlInfix = 'werkzaamheden-'
else
type = ':warning:'
urlInfix = ''
output = [
type,
disruption.Traject[0],
"(#{disruption.Reden[0]}).",
"More info: #{disruptionPageRoot}#{urlInfix}#{disruption.id[0]}"
]
msg.send output.join(' ')
| true | # A way to interact with the NS (Dutch Railways) API
#
# To configure, add HUBOT_NS_API_EMAIL and HUBOT_NS_API_PASSWORD to your Heroku config with "heroku config:add"
#
# hubot train disruptions <station> - Retrieve the list of disruptions near <station>.
# Please note: <station> can be a station code (e.g. 'asd')
# or (part of) a station name (e.g. 'Amsterdam Centraal')
#
xml2js = require 'xml2js'
disruptionApiUrl = 'http://webservices.ns.nl/ns-api-storingen'
disruptionPageRoot = 'http://www.ns.nl/storingen/index.form#'
module.exports = (robot) ->
robot.respond /train disruptions (.*)/i, (msg) ->
station = msg.match[1]
station.replace(/^\s+|\s+$/g, "")
findDisruptions msg, station, (list) ->
if list.Ongepland == undefined || list.Gepland == undefined
msg.send "Sorry, that didn't work. Perhaps the NS API is down or your credentials are wrong?"
return
#
# Unplanned disruptions
#
if list.Ongepland[0].Storing == undefined
msg.send "There are no unplanned disruptions around '#{station}'"
else
sendDisruptions list.Ongepland[0].Storing, msg, false
#
# Planned disruptions
#
if list.Gepland[0].Storing == undefined
msg.send "There are no planned maintenance disruptions around '#{station}'"
else
sendDisruptions list.Gepland[0].Storing, msg, true
findDisruptions = (msg, station, callback) ->
url = disruptionApiUrl
username = process.env.HUBOT_NS_API_EMAIL
password = PI:PASSWORD:<PASSWORD>END_PI.PI:PASSWORD:<PASSWORD>END_PI_NS_API_PASSWORD
auth = "Basic " + new Buffer(username + ':' + password).toString('base64')
parser = new xml2js.Parser({explicitArray: true})
msg.http(url)
.header('Authorization', auth)
.query(station: station, actual: false, unplanned: false)
.get() (err, res, body) ->
parser.parseString body, (err, result) ->
callback result
sendDisruptions = (disruptions, msg, planned) ->
for disruption in disruptions
if planned
type = ''
urlInfix = 'werkzaamheden-'
else
type = ':warning:'
urlInfix = ''
output = [
type,
disruption.Traject[0],
"(#{disruption.Reden[0]}).",
"More info: #{disruptionPageRoot}#{urlInfix}#{disruption.id[0]}"
]
msg.send output.join(' ')
|
[
{
"context": "@isShowModal = false\n @data = [\n { name: 'AAA', amount: 2000 }\n { name: 'BBB', amount: 10 ",
"end": 159,
"score": 0.9515673518180847,
"start": 156,
"tag": "NAME",
"value": "AAA"
},
{
"context": " { name: 'AAA', amount: 2000 }\n { name: 'BBB', amount: 10 }\n { name: 'CCC', amount: 10000",
"end": 195,
"score": 0.8577706217765808,
"start": 192,
"tag": "NAME",
"value": "BBB"
},
{
"context": "\n { name: 'BBB', amount: 10 }\n { name: 'CCC', amount: 10000 }\n ]\n\n addData: (item) ->\n ",
"end": 229,
"score": 0.9716529250144958,
"start": 226,
"tag": "NAME",
"value": "CCC"
}
] | src/extra/senior/SeniorModel.coffee | khirayama/hansonJavaScript | 10 | MicroModel = microModule.import('MicroModel')
class SeniorModel extends MicroModel
constructor: ->
@isShowModal = false
@data = [
{ name: 'AAA', amount: 2000 }
{ name: 'BBB', amount: 10 }
{ name: 'CCC', amount: 10000 }
]
addData: (item) ->
@data.push(item)
@set('data', @data)
microModule.export(SeniorModel)
| 16888 | MicroModel = microModule.import('MicroModel')
class SeniorModel extends MicroModel
constructor: ->
@isShowModal = false
@data = [
{ name: '<NAME>', amount: 2000 }
{ name: '<NAME>', amount: 10 }
{ name: '<NAME>', amount: 10000 }
]
addData: (item) ->
@data.push(item)
@set('data', @data)
microModule.export(SeniorModel)
| true | MicroModel = microModule.import('MicroModel')
class SeniorModel extends MicroModel
constructor: ->
@isShowModal = false
@data = [
{ name: 'PI:NAME:<NAME>END_PI', amount: 2000 }
{ name: 'PI:NAME:<NAME>END_PI', amount: 10 }
{ name: 'PI:NAME:<NAME>END_PI', amount: 10000 }
]
addData: (item) ->
@data.push(item)
@set('data', @data)
microModule.export(SeniorModel)
|
[
{
"context": "#########################\n#\n#\tMooqita\n# Created by Markus on 20/8/2017.\n#\n#################################",
"end": 87,
"score": 0.9995903968811035,
"start": 81,
"tag": "NAME",
"value": "Markus"
}
] | server/publications/user.coffee | agottschalk10/worklearn | 0 | #######################################################
#
# Mooqita
# Created by Markus on 20/8/2017.
#
#######################################################
#######################################################
Meteor.publish "my_profile", () ->
user_id = this.userId
filter =
owner_id: user_id
fields = visible_fields Profiles, user_id, filter
crs = Profiles.find filter, fields
log_publication "Profile", crs, filter, {}, "profiles", user_id
return crs
#######################################################
# Resumes
#######################################################
#######################################################
Meteor.publish "user_resumes", (user_id) ->
if user_id
check user_id, String
if not Roles.userIsInRole this.userId, "admin"
if this.userId != user_id
filter =
owner_id: user_id
profile = Profiles.findOne filter
if profile.locale
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
self = this
prepare_resume = (user) ->
resume = {}
filter =
owner_id: user._id
profile = Profiles.findOne filter
if profile
resume.name = get_profile_name profile, false, false
resume.owner_id = profile._id
resume.self_description = profile.resume
resume.avatar = get_avatar profile
solution_filter =
published: true
owner_id: user._id
#in_portfolio: true
solution_list = []
solution_cursor = Solutions.find solution_filter
solution_cursor.forEach (s) ->
solution = {}
solution.reviews = []
solution.solution = if !s.confidential then s.content else null
challenge = Challenges.findOne(s.challenge_id)
if challenge
solution.challenge = if !challenge.confidential then challenge.content else null
solution.challenge_title = challenge.title
filter =
owner_id: challenge.owner_id
profile = Profiles.findOne filter
if profile
solution.challenge_owner_avatar = get_avatar profile
abs_rating = 0
num_ratings = 0
review_filter =
solution_id: s._id
review_cursor = Reviews.find(review_filter)
review_cursor.forEach (r) ->
review = {}
filter =
parent_id: r._id
feedback = Feedback.findOne filter
filter =
owner_id: r.owner_id
profile = Profiles.findOne filter
if feedback.published
review.feedback = {}
review.feedback.content = feedback.content
review.feedback.rating = feedback.rating
review.rating = r.rating
abs_rating += parseInt(r.rating)
num_ratings += 1
if profile
review.name = get_profile_name profile ,false ,false
review.avatar = get_avatar profile
review.review = r.content
solution.reviews.push(review)
if abs_rating
avg = Math.round(abs_rating / num_ratings, 1)
solution.average = avg
solution_list.push(solution)
resume.solutions = solution_list
self.added("user_resumes", user._id, resume)
filter =
_id: user_id
crs = Meteor.users.find(filter)
crs.forEach(prepare_resume)
log_publication "UserResumes", crs, filter, {}, "credits", user_id
self.ready()
#######################################################
Meteor.publish "user_summary", (user_id, challenge_id) ->
check user_id, String
check challenge_id, String
if user_id
if not Roles.userIsInRole this.userId, "challenge_designer"
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
##########################################
# Initialize user summary through users
# database object
##########################################
mod =
fields:
emails: 1
user = Meteor.users.findOne user_id, mod
##########################################
# Same fields for solution review feedback
##########################################
mod =
fields:
rating: 1
content: 1
material: 1
##########################################
# Find Solutions
##########################################
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
solutions = Solutions.find filter, mod
##########################################
# Find relevant Feedback and Reviews
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
rev_given = Reviews.find filter, mod
fed_given = Feedback.find filter, mod
filter =
requester_id: user_id
challenge_id: challenge_id
rev_received = Reviews.find filter, mod
fed_received = Feedback.find filter, mod
##########################################
#
# Calculate statistics
#
##########################################
##########################################
# Solutions
##########################################
material = 0
length = 0
count = solutions.count()
solutions.forEach (entry) ->
if entry.content
length += entry.content.split(" ").length
if entry.material
material += 1
user.solutions_count = count
user.solutions_average_length = length / count
user.solutions_average_material = material / count
##########################################
# Given Reviews
##########################################
user = calc_statistics user, rev_given, "reviews_given"
user = calc_statistics user, rev_received, "reviews_received"
user = calc_statistics user, fed_given, "feedback_given"
user = calc_statistics user, fed_received, "feedback_received"
msg = "UserSummaries for: " + get_profile_name_by_user_id user_id, true
log_publication msg, null, {},
{}, "user_summary", this.userId
this.added "user_summaries", user_id, user
this.ready() | 91985 | #######################################################
#
# Mooqita
# Created by <NAME> on 20/8/2017.
#
#######################################################
#######################################################
Meteor.publish "my_profile", () ->
user_id = this.userId
filter =
owner_id: user_id
fields = visible_fields Profiles, user_id, filter
crs = Profiles.find filter, fields
log_publication "Profile", crs, filter, {}, "profiles", user_id
return crs
#######################################################
# Resumes
#######################################################
#######################################################
Meteor.publish "user_resumes", (user_id) ->
if user_id
check user_id, String
if not Roles.userIsInRole this.userId, "admin"
if this.userId != user_id
filter =
owner_id: user_id
profile = Profiles.findOne filter
if profile.locale
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
self = this
prepare_resume = (user) ->
resume = {}
filter =
owner_id: user._id
profile = Profiles.findOne filter
if profile
resume.name = get_profile_name profile, false, false
resume.owner_id = profile._id
resume.self_description = profile.resume
resume.avatar = get_avatar profile
solution_filter =
published: true
owner_id: user._id
#in_portfolio: true
solution_list = []
solution_cursor = Solutions.find solution_filter
solution_cursor.forEach (s) ->
solution = {}
solution.reviews = []
solution.solution = if !s.confidential then s.content else null
challenge = Challenges.findOne(s.challenge_id)
if challenge
solution.challenge = if !challenge.confidential then challenge.content else null
solution.challenge_title = challenge.title
filter =
owner_id: challenge.owner_id
profile = Profiles.findOne filter
if profile
solution.challenge_owner_avatar = get_avatar profile
abs_rating = 0
num_ratings = 0
review_filter =
solution_id: s._id
review_cursor = Reviews.find(review_filter)
review_cursor.forEach (r) ->
review = {}
filter =
parent_id: r._id
feedback = Feedback.findOne filter
filter =
owner_id: r.owner_id
profile = Profiles.findOne filter
if feedback.published
review.feedback = {}
review.feedback.content = feedback.content
review.feedback.rating = feedback.rating
review.rating = r.rating
abs_rating += parseInt(r.rating)
num_ratings += 1
if profile
review.name = get_profile_name profile ,false ,false
review.avatar = get_avatar profile
review.review = r.content
solution.reviews.push(review)
if abs_rating
avg = Math.round(abs_rating / num_ratings, 1)
solution.average = avg
solution_list.push(solution)
resume.solutions = solution_list
self.added("user_resumes", user._id, resume)
filter =
_id: user_id
crs = Meteor.users.find(filter)
crs.forEach(prepare_resume)
log_publication "UserResumes", crs, filter, {}, "credits", user_id
self.ready()
#######################################################
Meteor.publish "user_summary", (user_id, challenge_id) ->
check user_id, String
check challenge_id, String
if user_id
if not Roles.userIsInRole this.userId, "challenge_designer"
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
##########################################
# Initialize user summary through users
# database object
##########################################
mod =
fields:
emails: 1
user = Meteor.users.findOne user_id, mod
##########################################
# Same fields for solution review feedback
##########################################
mod =
fields:
rating: 1
content: 1
material: 1
##########################################
# Find Solutions
##########################################
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
solutions = Solutions.find filter, mod
##########################################
# Find relevant Feedback and Reviews
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
rev_given = Reviews.find filter, mod
fed_given = Feedback.find filter, mod
filter =
requester_id: user_id
challenge_id: challenge_id
rev_received = Reviews.find filter, mod
fed_received = Feedback.find filter, mod
##########################################
#
# Calculate statistics
#
##########################################
##########################################
# Solutions
##########################################
material = 0
length = 0
count = solutions.count()
solutions.forEach (entry) ->
if entry.content
length += entry.content.split(" ").length
if entry.material
material += 1
user.solutions_count = count
user.solutions_average_length = length / count
user.solutions_average_material = material / count
##########################################
# Given Reviews
##########################################
user = calc_statistics user, rev_given, "reviews_given"
user = calc_statistics user, rev_received, "reviews_received"
user = calc_statistics user, fed_given, "feedback_given"
user = calc_statistics user, fed_received, "feedback_received"
msg = "UserSummaries for: " + get_profile_name_by_user_id user_id, true
log_publication msg, null, {},
{}, "user_summary", this.userId
this.added "user_summaries", user_id, user
this.ready() | true | #######################################################
#
# Mooqita
# Created by PI:NAME:<NAME>END_PI on 20/8/2017.
#
#######################################################
#######################################################
Meteor.publish "my_profile", () ->
user_id = this.userId
filter =
owner_id: user_id
fields = visible_fields Profiles, user_id, filter
crs = Profiles.find filter, fields
log_publication "Profile", crs, filter, {}, "profiles", user_id
return crs
#######################################################
# Resumes
#######################################################
#######################################################
Meteor.publish "user_resumes", (user_id) ->
if user_id
check user_id, String
if not Roles.userIsInRole this.userId, "admin"
if this.userId != user_id
filter =
owner_id: user_id
profile = Profiles.findOne filter
if profile.locale
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
self = this
prepare_resume = (user) ->
resume = {}
filter =
owner_id: user._id
profile = Profiles.findOne filter
if profile
resume.name = get_profile_name profile, false, false
resume.owner_id = profile._id
resume.self_description = profile.resume
resume.avatar = get_avatar profile
solution_filter =
published: true
owner_id: user._id
#in_portfolio: true
solution_list = []
solution_cursor = Solutions.find solution_filter
solution_cursor.forEach (s) ->
solution = {}
solution.reviews = []
solution.solution = if !s.confidential then s.content else null
challenge = Challenges.findOne(s.challenge_id)
if challenge
solution.challenge = if !challenge.confidential then challenge.content else null
solution.challenge_title = challenge.title
filter =
owner_id: challenge.owner_id
profile = Profiles.findOne filter
if profile
solution.challenge_owner_avatar = get_avatar profile
abs_rating = 0
num_ratings = 0
review_filter =
solution_id: s._id
review_cursor = Reviews.find(review_filter)
review_cursor.forEach (r) ->
review = {}
filter =
parent_id: r._id
feedback = Feedback.findOne filter
filter =
owner_id: r.owner_id
profile = Profiles.findOne filter
if feedback.published
review.feedback = {}
review.feedback.content = feedback.content
review.feedback.rating = feedback.rating
review.rating = r.rating
abs_rating += parseInt(r.rating)
num_ratings += 1
if profile
review.name = get_profile_name profile ,false ,false
review.avatar = get_avatar profile
review.review = r.content
solution.reviews.push(review)
if abs_rating
avg = Math.round(abs_rating / num_ratings, 1)
solution.average = avg
solution_list.push(solution)
resume.solutions = solution_list
self.added("user_resumes", user._id, resume)
filter =
_id: user_id
crs = Meteor.users.find(filter)
crs.forEach(prepare_resume)
log_publication "UserResumes", crs, filter, {}, "credits", user_id
self.ready()
#######################################################
Meteor.publish "user_summary", (user_id, challenge_id) ->
check user_id, String
check challenge_id, String
if user_id
if not Roles.userIsInRole this.userId, "challenge_designer"
throw new Meteor.Error("Not permitted.")
if !user_id
user_id = this.userId
if !user_id
throw new Meteor.Error("Not permitted.")
##########################################
# Initialize user summary through users
# database object
##########################################
mod =
fields:
emails: 1
user = Meteor.users.findOne user_id, mod
##########################################
# Same fields for solution review feedback
##########################################
mod =
fields:
rating: 1
content: 1
material: 1
##########################################
# Find Solutions
##########################################
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
solutions = Solutions.find filter, mod
##########################################
# Find relevant Feedback and Reviews
##########################################
filter =
owner_id: user_id
challenge_id: challenge_id
rev_given = Reviews.find filter, mod
fed_given = Feedback.find filter, mod
filter =
requester_id: user_id
challenge_id: challenge_id
rev_received = Reviews.find filter, mod
fed_received = Feedback.find filter, mod
##########################################
#
# Calculate statistics
#
##########################################
##########################################
# Solutions
##########################################
material = 0
length = 0
count = solutions.count()
solutions.forEach (entry) ->
if entry.content
length += entry.content.split(" ").length
if entry.material
material += 1
user.solutions_count = count
user.solutions_average_length = length / count
user.solutions_average_material = material / count
##########################################
# Given Reviews
##########################################
user = calc_statistics user, rev_given, "reviews_given"
user = calc_statistics user, rev_received, "reviews_received"
user = calc_statistics user, fed_given, "feedback_given"
user = calc_statistics user, fed_received, "feedback_received"
msg = "UserSummaries for: " + get_profile_name_by_user_id user_id, true
log_publication msg, null, {},
{}, "user_summary", this.userId
this.added "user_summaries", user_id, user
this.ready() |
[
{
"context": "# @author mr.doob / http://mrdoob.com/\n# @author supereggbert / htt",
"end": 17,
"score": 0.8818013668060303,
"start": 10,
"tag": "USERNAME",
"value": "mr.doob"
},
{
"context": "# @author mr.doob / http://mrdoob.com/\n# @author supereggbert / http://www.paulbrunt.co.uk/\n# @author julianwa ",
"end": 61,
"score": 0.9985305666923523,
"start": 49,
"tag": "USERNAME",
"value": "supereggbert"
},
{
"context": "pereggbert / http://www.paulbrunt.co.uk/\n# @author julianwa / https://github.com/julianwa\n# @author aladjev.a",
"end": 110,
"score": 0.9995981454849243,
"start": 102,
"tag": "USERNAME",
"value": "julianwa"
},
{
"context": "nt.co.uk/\n# @author julianwa / https://github.com/julianwa\n# @author aladjev.andrew@gmail.com\n\n#= require ne",
"end": 140,
"score": 0.9996678233146667,
"start": 132,
"tag": "USERNAME",
"value": "julianwa"
},
{
"context": "r julianwa / https://github.com/julianwa\n# @author aladjev.andrew@gmail.com\n\n#= require new_src/core/matrix_4\n#= require new_",
"end": 175,
"score": 0.9999238848686218,
"start": 151,
"tag": "EMAIL",
"value": "aladjev.andrew@gmail.com"
}
] | source/javascripts/new_src/core/projector.coffee | andrew-aladev/three.js | 0 | # @author mr.doob / http://mrdoob.com/
# @author supereggbert / http://www.paulbrunt.co.uk/
# @author julianwa / https://github.com/julianwa
# @author aladjev.andrew@gmail.com
#= require new_src/core/matrix_4
#= require new_src/core/vector_3
#= require new_src/core/vector_4
#= require new_src/core/frustum
#= require new_src/core/ray
class Projector
constructor: ->
@_object = undefined
@_objectCount = undefined
@_objectPool = []
@_vertex = undefined
@_vertexCount = undefined
@_vertexPool = []
@_face = undefined
@_face3Count = undefined
@_face3Pool = []
@_face4Count = undefined
@_face4Pool = []
@_line = undefined
@_lineCount = undefined
@_linePool = []
@_particle = undefined
@_particleCount = undefined
@_particlePool = []
@_renderData =
objects: []
sprites: []
lights: []
elements: []
@_vector3 = new THREE.Vector3()
@_vector4 = new THREE.Vector4()
@_projScreenMatrix = new THREE.Matrix4()
@_projScreenobjectMatrixWorld = new THREE.Matrix4()
@_frustum = new THREE.Frustum()
@_clippedVertex1PositionScreen = new THREE.Vector4()
@_clippedVertex2PositionScreen = new THREE.Vector4()
@_face3VertexNormals = undefined
getNextObjectInPool: ->
object = @_objectPool[@_objectCount] = @_objectPool[@_objectCount] or new THREE.RenderableObject()
@_objectCount++
object
getNextVertexInPool: ->
vertex = @_vertexPool[@_vertexCount] = @_vertexPool[@_vertexCount] or new THREE.RenderableVertex()
@_vertexCount++
vertex
getNextFace3InPool: ->
face = @_face3Pool[@_face3Count] = @_face3Pool[@_face3Count] or new THREE.RenderableFace3()
@_face3Count++
face
getNextFace4InPool: ->
face = @_face4Pool[@_face4Count] = @_face4Pool[@_face4Count] or new THREE.RenderableFace4()
@_face4Count++
face
getNextLineInPool: ->
line = @_linePool[@_lineCount] = @_linePool[@_lineCount] or new THREE.RenderableLine()
@_lineCount++
line
getNextParticleInPool: ->
particle = @_particlePool[@_particleCount] = @_particlePool[@_particleCount] or new THREE.RenderableParticle()
@_particleCount++
particle
@painterSort: (a, b) ->
b.z - a.z
clipLine: (s1, s2) ->
# Calculate the boundary coordinate of each vertex for the near and far clip planes,
# Z = -1 and Z = +1, respectively.
alpha1 = 0
alpha2 = 1
bc1near = s1.z + s1.w
bc2near = s2.z + s2.w
bc1far = -s1.z + s1.w
bc2far = -s2.z + s2.w
if bc1near >= 0 and bc2near >= 0 and bc1far >= 0 and bc2far >= 0
# Both vertices lie entirely within all clip planes.
true
else if (bc1near < 0 and bc2near < 0) or (bc1far < 0 and bc2far < 0)
# Both vertices lie entirely outside one of the clip planes.
false
else
# The line segment spans at least one clip plane.
if bc1near < 0
# v1 lies outside the near plane, v2 inside
alpha1 = Math.max(alpha1, bc1near / (bc1near - bc2near))
else if bc2near < 0
# v2 lies outside the near plane, v1 inside
alpha2 = Math.min(alpha2, bc1near / (bc1near - bc2near))
if bc1far < 0
# v1 lies outside the far plane, v2 inside
alpha1 = Math.max(alpha1, bc1far / (bc1far - bc2far))
else if bc2far < 0
# v2 lies outside the far plane, v2 inside
alpha2 = Math.min(alpha2, bc1far / (bc1far - bc2far))
if alpha2 < alpha1
# The line segment spans two boundaries, but is outside both of them.
# (This can't happen when we're only clipping against just near/far but good
# to leave the check here for future usage if other clip planes are added.)
false
else
# Update the s1 and s2 vertices to match the clipped line segment.
s1.lerpSelf s2, alpha1
s2.lerpSelf s1, 1 - alpha2
true
projectVector: (vector, camera) ->
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_projScreenMatrix.multiplyVector3 vector
vector
unprojectVector: (vector, camera) ->
camera.projectionMatrixInverse.getInverse camera.projectionMatrix
@_projScreenMatrix.multiply camera.matrixWorld, camera.projectionMatrixInverse
@_projScreenMatrix.multiplyVector3 vector
vector
pickingRay: (vector, camera) ->
# set two vectors with opposing z values
end = undefined
ray = undefined
t = undefined
vector.z = -1.0
end = new THREE.Vector3(vector.x, vector.y, 1.0)
@unprojectVector vector, camera
@unprojectVector end, camera
# find direction from vector to end
end.subSelf(vector).normalize()
new THREE.Ray(vector, end)
projectGraph: (root, sort) ->
@_objectCount = 0
@_renderData.objects.length = 0
@_renderData.sprites.length = 0
@_renderData.lights.length = 0
projectObject = (object) ->
return if object.visible is false
if (object instanceof THREE.Mesh or object instanceof THREE.Line) and (object.frustumCulled is false or @_frustum.contains(object))
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.objects.push @_object
else if object instanceof THREE.Sprite or object instanceof THREE.Particle
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.sprites.push @_object
else @_renderData.lights.push object if object instanceof THREE.Light
c = 0
cl = object.children.length
while c < cl
projectObject object.children[c]
c++
projectObject root
sort and @_renderData.objects.sort(@painterSort)
@_renderData
projectScene: (scene, camera, sort) ->
near = camera.near
far = camera.far
visible = false
o = undefined
ol = undefined
v = undefined
vl = undefined
f = undefined
fl = undefined
n = undefined
nl = undefined
c = undefined
cl = undefined
u = undefined
ul = undefined
object = undefined
objectMatrixWorld = undefined
objectMatrixWorldRotation = undefined
geometry = undefined
geometryMaterials = undefined
vertices = undefined
vertex = undefined
vertexPositionScreen = undefined
faces = undefined
face = undefined
faceVertexNormals = undefined
normal = undefined
faceVertexUvs = undefined
uvs = undefined
v1 = undefined
v2 = undefined
v3 = undefined
v4 = undefined
@_face3Count = 0
@_face4Count = 0
@_lineCount = 0
@_particleCount = 0
@_renderData.elements.length = 0
if camera.parent is undefined
console.warn "DEPRECATED: Camera hasn't been added to a Scene. Adding it..."
scene.add camera
scene.updateMatrixWorld()
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_frustum.setFromMatrix @_projScreenMatrix
@_renderData = @projectGraph(scene, false)
o = 0
ol = @_renderData.objects.length
while o < ol
object = @_renderData.objects[o].object
objectMatrixWorld = object.matrixWorld
@_vertexCount = 0
if object instanceof THREE.Mesh
geometry = object.geometry
geometryMaterials = object.geometry.materials
vertices = geometry.vertices
faces = geometry.faces
faceVertexUvs = geometry.faceVertexUvs
objectMatrixWorldRotation = object.matrixRotationWorld.extractRotation(objectMatrixWorld)
v = 0
vl = vertices.length
while v < vl
@_vertex = getNextVertexInPool()
@_vertex.positionWorld.copy vertices[v]
objectMatrixWorld.multiplyVector3 @_vertex.positionWorld
@_vertex.positionScreen.copy @_vertex.positionWorld
@_projScreenMatrix.multiplyVector4 @_vertex.positionScreen
@_vertex.positionScreen.x /= @_vertex.positionScreen.w
@_vertex.positionScreen.y /= @_vertex.positionScreen.w
@_vertex.visible = @_vertex.positionScreen.z > near and @_vertex.positionScreen.z < far
v++
f = 0
fl = faces.length
while f < fl
face = faces[f]
if face instanceof THREE.Face3
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
if v1.visible and v2.visible and v3.visible
visible = (v3.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v3.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace3InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
else
continue
else
continue
else if face instanceof THREE.Face4
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
v4 = @_vertexPool[face.d]
if v1.visible and v2.visible and v3.visible and v4.visible
visible = (v4.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v4.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0 or (v2.positionScreen.x - v3.positionScreen.x) * (v4.positionScreen.y - v3.positionScreen.y) - (v2.positionScreen.y - v3.positionScreen.y) * (v4.positionScreen.x - v3.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace4InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
@_face.v4.copy v4
else
continue
else
continue
@_face.normalWorld.copy face.normal
@_face.normalWorld.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 @_face.normalWorld
@_face.centroidWorld.copy face.centroid
objectMatrixWorld.multiplyVector3 @_face.centroidWorld
@_face.centroidScreen.copy @_face.centroidWorld
@_projScreenMatrix.multiplyVector3 @_face.centroidScreen
faceVertexNormals = face.vertexNormals
n = 0
nl = faceVertexNormals.length
while n < nl
normal = @_face.vertexNormalsWorld[n]
normal.copy faceVertexNormals[n]
normal.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 normal
n++
c = 0
cl = faceVertexUvs.length
while c < cl
uvs = faceVertexUvs[c][f]
continue unless uvs
u = 0
ul = uvs.length
while u < ul
@_face.uvs[c][u] = uvs[u]
u++
c++
@_face.material = object.material
@_face.faceMaterial = (if face.materialIndex isnt null then geometryMaterials[face.materialIndex] else null)
@_face.z = @_face.centroidScreen.z
@_renderData.elements.push @_face
f++
else if object instanceof THREE.Line
@_projScreenobjectMatrixWorld.multiply @_projScreenMatrix, objectMatrixWorld
vertices = object.geometry.vertices
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[0]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
# Handle LineStrip and LinePieces
step = (if object.type is THREE.LinePieces then 2 else 1)
v = 1
vl = vertices.length
while v < vl
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[v]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
continue if (v + 1) % step > 0
v2 = @_vertexPool[@_vertexCount - 2]
@_clippedVertex1PositionScreen.copy v1.positionScreen
@_clippedVertex2PositionScreen.copy v2.positionScreen
if clipLine(@_clippedVertex1PositionScreen, @_clippedVertex2PositionScreen)
# Perform the perspective divide
@_clippedVertex1PositionScreen.multiplyScalar 1 / @_clippedVertex1PositionScreen.w
@_clippedVertex2PositionScreen.multiplyScalar 1 / @_clippedVertex2PositionScreen.w
@_line = getNextLineInPool()
@_line.v1.positionScreen.copy @_clippedVertex1PositionScreen
@_line.v2.positionScreen.copy @_clippedVertex2PositionScreen
@_line.z = Math.max(@_clippedVertex1PositionScreen.z, @_clippedVertex2PositionScreen.z)
@_line.material = object.material
@_renderData.elements.push @_line
v++
o++
o = 0
ol = @_renderData.sprites.length
while o < ol
object = @_renderData.sprites[o].object
objectMatrixWorld = object.matrixWorld
if object instanceof THREE.Particle
@_vector4.set objectMatrixWorld.elements[12], objectMatrixWorld.elements[13], objectMatrixWorld.elements[14], 1
@_projScreenMatrix.multiplyVector4 @_vector4
@_vector4.z /= @_vector4.w
if @_vector4.z > 0 and @_vector4.z < 1
@_particle = getNextParticleInPool()
@_particle.x = @_vector4.x / @_vector4.w
@_particle.y = @_vector4.y / @_vector4.w
@_particle.z = @_vector4.z
@_particle.rotation = object.rotation.z
@_particle.scale.x = object.scale.x * Math.abs(@_particle.x - (@_vector4.x + camera.projectionMatrix.elements[0]) / (@_vector4.w + camera.projectionMatrix.elements[12]))
@_particle.scale.y = object.scale.y * Math.abs(@_particle.y - (@_vector4.y + camera.projectionMatrix.elements[5]) / (@_vector4.w + camera.projectionMatrix.elements[13]))
@_particle.material = object.material
@_renderData.elements.push @_particle
o++
sort and @_renderData.elements.sort(@painterSort)
@_renderData
namespace "THREE", (exports) ->
exports.Projector = Projector | 38828 | # @author mr.doob / http://mrdoob.com/
# @author supereggbert / http://www.paulbrunt.co.uk/
# @author julianwa / https://github.com/julianwa
# @author <EMAIL>
#= require new_src/core/matrix_4
#= require new_src/core/vector_3
#= require new_src/core/vector_4
#= require new_src/core/frustum
#= require new_src/core/ray
class Projector
constructor: ->
@_object = undefined
@_objectCount = undefined
@_objectPool = []
@_vertex = undefined
@_vertexCount = undefined
@_vertexPool = []
@_face = undefined
@_face3Count = undefined
@_face3Pool = []
@_face4Count = undefined
@_face4Pool = []
@_line = undefined
@_lineCount = undefined
@_linePool = []
@_particle = undefined
@_particleCount = undefined
@_particlePool = []
@_renderData =
objects: []
sprites: []
lights: []
elements: []
@_vector3 = new THREE.Vector3()
@_vector4 = new THREE.Vector4()
@_projScreenMatrix = new THREE.Matrix4()
@_projScreenobjectMatrixWorld = new THREE.Matrix4()
@_frustum = new THREE.Frustum()
@_clippedVertex1PositionScreen = new THREE.Vector4()
@_clippedVertex2PositionScreen = new THREE.Vector4()
@_face3VertexNormals = undefined
getNextObjectInPool: ->
object = @_objectPool[@_objectCount] = @_objectPool[@_objectCount] or new THREE.RenderableObject()
@_objectCount++
object
getNextVertexInPool: ->
vertex = @_vertexPool[@_vertexCount] = @_vertexPool[@_vertexCount] or new THREE.RenderableVertex()
@_vertexCount++
vertex
getNextFace3InPool: ->
face = @_face3Pool[@_face3Count] = @_face3Pool[@_face3Count] or new THREE.RenderableFace3()
@_face3Count++
face
getNextFace4InPool: ->
face = @_face4Pool[@_face4Count] = @_face4Pool[@_face4Count] or new THREE.RenderableFace4()
@_face4Count++
face
getNextLineInPool: ->
line = @_linePool[@_lineCount] = @_linePool[@_lineCount] or new THREE.RenderableLine()
@_lineCount++
line
getNextParticleInPool: ->
particle = @_particlePool[@_particleCount] = @_particlePool[@_particleCount] or new THREE.RenderableParticle()
@_particleCount++
particle
@painterSort: (a, b) ->
b.z - a.z
clipLine: (s1, s2) ->
# Calculate the boundary coordinate of each vertex for the near and far clip planes,
# Z = -1 and Z = +1, respectively.
alpha1 = 0
alpha2 = 1
bc1near = s1.z + s1.w
bc2near = s2.z + s2.w
bc1far = -s1.z + s1.w
bc2far = -s2.z + s2.w
if bc1near >= 0 and bc2near >= 0 and bc1far >= 0 and bc2far >= 0
# Both vertices lie entirely within all clip planes.
true
else if (bc1near < 0 and bc2near < 0) or (bc1far < 0 and bc2far < 0)
# Both vertices lie entirely outside one of the clip planes.
false
else
# The line segment spans at least one clip plane.
if bc1near < 0
# v1 lies outside the near plane, v2 inside
alpha1 = Math.max(alpha1, bc1near / (bc1near - bc2near))
else if bc2near < 0
# v2 lies outside the near plane, v1 inside
alpha2 = Math.min(alpha2, bc1near / (bc1near - bc2near))
if bc1far < 0
# v1 lies outside the far plane, v2 inside
alpha1 = Math.max(alpha1, bc1far / (bc1far - bc2far))
else if bc2far < 0
# v2 lies outside the far plane, v2 inside
alpha2 = Math.min(alpha2, bc1far / (bc1far - bc2far))
if alpha2 < alpha1
# The line segment spans two boundaries, but is outside both of them.
# (This can't happen when we're only clipping against just near/far but good
# to leave the check here for future usage if other clip planes are added.)
false
else
# Update the s1 and s2 vertices to match the clipped line segment.
s1.lerpSelf s2, alpha1
s2.lerpSelf s1, 1 - alpha2
true
projectVector: (vector, camera) ->
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_projScreenMatrix.multiplyVector3 vector
vector
unprojectVector: (vector, camera) ->
camera.projectionMatrixInverse.getInverse camera.projectionMatrix
@_projScreenMatrix.multiply camera.matrixWorld, camera.projectionMatrixInverse
@_projScreenMatrix.multiplyVector3 vector
vector
pickingRay: (vector, camera) ->
# set two vectors with opposing z values
end = undefined
ray = undefined
t = undefined
vector.z = -1.0
end = new THREE.Vector3(vector.x, vector.y, 1.0)
@unprojectVector vector, camera
@unprojectVector end, camera
# find direction from vector to end
end.subSelf(vector).normalize()
new THREE.Ray(vector, end)
projectGraph: (root, sort) ->
@_objectCount = 0
@_renderData.objects.length = 0
@_renderData.sprites.length = 0
@_renderData.lights.length = 0
projectObject = (object) ->
return if object.visible is false
if (object instanceof THREE.Mesh or object instanceof THREE.Line) and (object.frustumCulled is false or @_frustum.contains(object))
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.objects.push @_object
else if object instanceof THREE.Sprite or object instanceof THREE.Particle
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.sprites.push @_object
else @_renderData.lights.push object if object instanceof THREE.Light
c = 0
cl = object.children.length
while c < cl
projectObject object.children[c]
c++
projectObject root
sort and @_renderData.objects.sort(@painterSort)
@_renderData
projectScene: (scene, camera, sort) ->
near = camera.near
far = camera.far
visible = false
o = undefined
ol = undefined
v = undefined
vl = undefined
f = undefined
fl = undefined
n = undefined
nl = undefined
c = undefined
cl = undefined
u = undefined
ul = undefined
object = undefined
objectMatrixWorld = undefined
objectMatrixWorldRotation = undefined
geometry = undefined
geometryMaterials = undefined
vertices = undefined
vertex = undefined
vertexPositionScreen = undefined
faces = undefined
face = undefined
faceVertexNormals = undefined
normal = undefined
faceVertexUvs = undefined
uvs = undefined
v1 = undefined
v2 = undefined
v3 = undefined
v4 = undefined
@_face3Count = 0
@_face4Count = 0
@_lineCount = 0
@_particleCount = 0
@_renderData.elements.length = 0
if camera.parent is undefined
console.warn "DEPRECATED: Camera hasn't been added to a Scene. Adding it..."
scene.add camera
scene.updateMatrixWorld()
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_frustum.setFromMatrix @_projScreenMatrix
@_renderData = @projectGraph(scene, false)
o = 0
ol = @_renderData.objects.length
while o < ol
object = @_renderData.objects[o].object
objectMatrixWorld = object.matrixWorld
@_vertexCount = 0
if object instanceof THREE.Mesh
geometry = object.geometry
geometryMaterials = object.geometry.materials
vertices = geometry.vertices
faces = geometry.faces
faceVertexUvs = geometry.faceVertexUvs
objectMatrixWorldRotation = object.matrixRotationWorld.extractRotation(objectMatrixWorld)
v = 0
vl = vertices.length
while v < vl
@_vertex = getNextVertexInPool()
@_vertex.positionWorld.copy vertices[v]
objectMatrixWorld.multiplyVector3 @_vertex.positionWorld
@_vertex.positionScreen.copy @_vertex.positionWorld
@_projScreenMatrix.multiplyVector4 @_vertex.positionScreen
@_vertex.positionScreen.x /= @_vertex.positionScreen.w
@_vertex.positionScreen.y /= @_vertex.positionScreen.w
@_vertex.visible = @_vertex.positionScreen.z > near and @_vertex.positionScreen.z < far
v++
f = 0
fl = faces.length
while f < fl
face = faces[f]
if face instanceof THREE.Face3
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
if v1.visible and v2.visible and v3.visible
visible = (v3.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v3.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace3InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
else
continue
else
continue
else if face instanceof THREE.Face4
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
v4 = @_vertexPool[face.d]
if v1.visible and v2.visible and v3.visible and v4.visible
visible = (v4.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v4.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0 or (v2.positionScreen.x - v3.positionScreen.x) * (v4.positionScreen.y - v3.positionScreen.y) - (v2.positionScreen.y - v3.positionScreen.y) * (v4.positionScreen.x - v3.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace4InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
@_face.v4.copy v4
else
continue
else
continue
@_face.normalWorld.copy face.normal
@_face.normalWorld.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 @_face.normalWorld
@_face.centroidWorld.copy face.centroid
objectMatrixWorld.multiplyVector3 @_face.centroidWorld
@_face.centroidScreen.copy @_face.centroidWorld
@_projScreenMatrix.multiplyVector3 @_face.centroidScreen
faceVertexNormals = face.vertexNormals
n = 0
nl = faceVertexNormals.length
while n < nl
normal = @_face.vertexNormalsWorld[n]
normal.copy faceVertexNormals[n]
normal.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 normal
n++
c = 0
cl = faceVertexUvs.length
while c < cl
uvs = faceVertexUvs[c][f]
continue unless uvs
u = 0
ul = uvs.length
while u < ul
@_face.uvs[c][u] = uvs[u]
u++
c++
@_face.material = object.material
@_face.faceMaterial = (if face.materialIndex isnt null then geometryMaterials[face.materialIndex] else null)
@_face.z = @_face.centroidScreen.z
@_renderData.elements.push @_face
f++
else if object instanceof THREE.Line
@_projScreenobjectMatrixWorld.multiply @_projScreenMatrix, objectMatrixWorld
vertices = object.geometry.vertices
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[0]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
# Handle LineStrip and LinePieces
step = (if object.type is THREE.LinePieces then 2 else 1)
v = 1
vl = vertices.length
while v < vl
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[v]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
continue if (v + 1) % step > 0
v2 = @_vertexPool[@_vertexCount - 2]
@_clippedVertex1PositionScreen.copy v1.positionScreen
@_clippedVertex2PositionScreen.copy v2.positionScreen
if clipLine(@_clippedVertex1PositionScreen, @_clippedVertex2PositionScreen)
# Perform the perspective divide
@_clippedVertex1PositionScreen.multiplyScalar 1 / @_clippedVertex1PositionScreen.w
@_clippedVertex2PositionScreen.multiplyScalar 1 / @_clippedVertex2PositionScreen.w
@_line = getNextLineInPool()
@_line.v1.positionScreen.copy @_clippedVertex1PositionScreen
@_line.v2.positionScreen.copy @_clippedVertex2PositionScreen
@_line.z = Math.max(@_clippedVertex1PositionScreen.z, @_clippedVertex2PositionScreen.z)
@_line.material = object.material
@_renderData.elements.push @_line
v++
o++
o = 0
ol = @_renderData.sprites.length
while o < ol
object = @_renderData.sprites[o].object
objectMatrixWorld = object.matrixWorld
if object instanceof THREE.Particle
@_vector4.set objectMatrixWorld.elements[12], objectMatrixWorld.elements[13], objectMatrixWorld.elements[14], 1
@_projScreenMatrix.multiplyVector4 @_vector4
@_vector4.z /= @_vector4.w
if @_vector4.z > 0 and @_vector4.z < 1
@_particle = getNextParticleInPool()
@_particle.x = @_vector4.x / @_vector4.w
@_particle.y = @_vector4.y / @_vector4.w
@_particle.z = @_vector4.z
@_particle.rotation = object.rotation.z
@_particle.scale.x = object.scale.x * Math.abs(@_particle.x - (@_vector4.x + camera.projectionMatrix.elements[0]) / (@_vector4.w + camera.projectionMatrix.elements[12]))
@_particle.scale.y = object.scale.y * Math.abs(@_particle.y - (@_vector4.y + camera.projectionMatrix.elements[5]) / (@_vector4.w + camera.projectionMatrix.elements[13]))
@_particle.material = object.material
@_renderData.elements.push @_particle
o++
sort and @_renderData.elements.sort(@painterSort)
@_renderData
namespace "THREE", (exports) ->
exports.Projector = Projector | true | # @author mr.doob / http://mrdoob.com/
# @author supereggbert / http://www.paulbrunt.co.uk/
# @author julianwa / https://github.com/julianwa
# @author PI:EMAIL:<EMAIL>END_PI
#= require new_src/core/matrix_4
#= require new_src/core/vector_3
#= require new_src/core/vector_4
#= require new_src/core/frustum
#= require new_src/core/ray
class Projector
constructor: ->
@_object = undefined
@_objectCount = undefined
@_objectPool = []
@_vertex = undefined
@_vertexCount = undefined
@_vertexPool = []
@_face = undefined
@_face3Count = undefined
@_face3Pool = []
@_face4Count = undefined
@_face4Pool = []
@_line = undefined
@_lineCount = undefined
@_linePool = []
@_particle = undefined
@_particleCount = undefined
@_particlePool = []
@_renderData =
objects: []
sprites: []
lights: []
elements: []
@_vector3 = new THREE.Vector3()
@_vector4 = new THREE.Vector4()
@_projScreenMatrix = new THREE.Matrix4()
@_projScreenobjectMatrixWorld = new THREE.Matrix4()
@_frustum = new THREE.Frustum()
@_clippedVertex1PositionScreen = new THREE.Vector4()
@_clippedVertex2PositionScreen = new THREE.Vector4()
@_face3VertexNormals = undefined
getNextObjectInPool: ->
object = @_objectPool[@_objectCount] = @_objectPool[@_objectCount] or new THREE.RenderableObject()
@_objectCount++
object
getNextVertexInPool: ->
vertex = @_vertexPool[@_vertexCount] = @_vertexPool[@_vertexCount] or new THREE.RenderableVertex()
@_vertexCount++
vertex
getNextFace3InPool: ->
face = @_face3Pool[@_face3Count] = @_face3Pool[@_face3Count] or new THREE.RenderableFace3()
@_face3Count++
face
getNextFace4InPool: ->
face = @_face4Pool[@_face4Count] = @_face4Pool[@_face4Count] or new THREE.RenderableFace4()
@_face4Count++
face
getNextLineInPool: ->
line = @_linePool[@_lineCount] = @_linePool[@_lineCount] or new THREE.RenderableLine()
@_lineCount++
line
getNextParticleInPool: ->
particle = @_particlePool[@_particleCount] = @_particlePool[@_particleCount] or new THREE.RenderableParticle()
@_particleCount++
particle
@painterSort: (a, b) ->
b.z - a.z
clipLine: (s1, s2) ->
# Calculate the boundary coordinate of each vertex for the near and far clip planes,
# Z = -1 and Z = +1, respectively.
alpha1 = 0
alpha2 = 1
bc1near = s1.z + s1.w
bc2near = s2.z + s2.w
bc1far = -s1.z + s1.w
bc2far = -s2.z + s2.w
if bc1near >= 0 and bc2near >= 0 and bc1far >= 0 and bc2far >= 0
# Both vertices lie entirely within all clip planes.
true
else if (bc1near < 0 and bc2near < 0) or (bc1far < 0 and bc2far < 0)
# Both vertices lie entirely outside one of the clip planes.
false
else
# The line segment spans at least one clip plane.
if bc1near < 0
# v1 lies outside the near plane, v2 inside
alpha1 = Math.max(alpha1, bc1near / (bc1near - bc2near))
else if bc2near < 0
# v2 lies outside the near plane, v1 inside
alpha2 = Math.min(alpha2, bc1near / (bc1near - bc2near))
if bc1far < 0
# v1 lies outside the far plane, v2 inside
alpha1 = Math.max(alpha1, bc1far / (bc1far - bc2far))
else if bc2far < 0
# v2 lies outside the far plane, v2 inside
alpha2 = Math.min(alpha2, bc1far / (bc1far - bc2far))
if alpha2 < alpha1
# The line segment spans two boundaries, but is outside both of them.
# (This can't happen when we're only clipping against just near/far but good
# to leave the check here for future usage if other clip planes are added.)
false
else
# Update the s1 and s2 vertices to match the clipped line segment.
s1.lerpSelf s2, alpha1
s2.lerpSelf s1, 1 - alpha2
true
projectVector: (vector, camera) ->
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_projScreenMatrix.multiplyVector3 vector
vector
unprojectVector: (vector, camera) ->
camera.projectionMatrixInverse.getInverse camera.projectionMatrix
@_projScreenMatrix.multiply camera.matrixWorld, camera.projectionMatrixInverse
@_projScreenMatrix.multiplyVector3 vector
vector
pickingRay: (vector, camera) ->
# set two vectors with opposing z values
end = undefined
ray = undefined
t = undefined
vector.z = -1.0
end = new THREE.Vector3(vector.x, vector.y, 1.0)
@unprojectVector vector, camera
@unprojectVector end, camera
# find direction from vector to end
end.subSelf(vector).normalize()
new THREE.Ray(vector, end)
projectGraph: (root, sort) ->
@_objectCount = 0
@_renderData.objects.length = 0
@_renderData.sprites.length = 0
@_renderData.lights.length = 0
projectObject = (object) ->
return if object.visible is false
if (object instanceof THREE.Mesh or object instanceof THREE.Line) and (object.frustumCulled is false or @_frustum.contains(object))
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.objects.push @_object
else if object instanceof THREE.Sprite or object instanceof THREE.Particle
@_vector3.copy object.matrixWorld.getPosition()
@_projScreenMatrix.multiplyVector3 @_vector3
@_object = getNextObjectInPool()
@_object.object = object
@_object.z = @_vector3.z
@_renderData.sprites.push @_object
else @_renderData.lights.push object if object instanceof THREE.Light
c = 0
cl = object.children.length
while c < cl
projectObject object.children[c]
c++
projectObject root
sort and @_renderData.objects.sort(@painterSort)
@_renderData
projectScene: (scene, camera, sort) ->
near = camera.near
far = camera.far
visible = false
o = undefined
ol = undefined
v = undefined
vl = undefined
f = undefined
fl = undefined
n = undefined
nl = undefined
c = undefined
cl = undefined
u = undefined
ul = undefined
object = undefined
objectMatrixWorld = undefined
objectMatrixWorldRotation = undefined
geometry = undefined
geometryMaterials = undefined
vertices = undefined
vertex = undefined
vertexPositionScreen = undefined
faces = undefined
face = undefined
faceVertexNormals = undefined
normal = undefined
faceVertexUvs = undefined
uvs = undefined
v1 = undefined
v2 = undefined
v3 = undefined
v4 = undefined
@_face3Count = 0
@_face4Count = 0
@_lineCount = 0
@_particleCount = 0
@_renderData.elements.length = 0
if camera.parent is undefined
console.warn "DEPRECATED: Camera hasn't been added to a Scene. Adding it..."
scene.add camera
scene.updateMatrixWorld()
camera.matrixWorldInverse.getInverse camera.matrixWorld
@_projScreenMatrix.multiply camera.projectionMatrix, camera.matrixWorldInverse
@_frustum.setFromMatrix @_projScreenMatrix
@_renderData = @projectGraph(scene, false)
o = 0
ol = @_renderData.objects.length
while o < ol
object = @_renderData.objects[o].object
objectMatrixWorld = object.matrixWorld
@_vertexCount = 0
if object instanceof THREE.Mesh
geometry = object.geometry
geometryMaterials = object.geometry.materials
vertices = geometry.vertices
faces = geometry.faces
faceVertexUvs = geometry.faceVertexUvs
objectMatrixWorldRotation = object.matrixRotationWorld.extractRotation(objectMatrixWorld)
v = 0
vl = vertices.length
while v < vl
@_vertex = getNextVertexInPool()
@_vertex.positionWorld.copy vertices[v]
objectMatrixWorld.multiplyVector3 @_vertex.positionWorld
@_vertex.positionScreen.copy @_vertex.positionWorld
@_projScreenMatrix.multiplyVector4 @_vertex.positionScreen
@_vertex.positionScreen.x /= @_vertex.positionScreen.w
@_vertex.positionScreen.y /= @_vertex.positionScreen.w
@_vertex.visible = @_vertex.positionScreen.z > near and @_vertex.positionScreen.z < far
v++
f = 0
fl = faces.length
while f < fl
face = faces[f]
if face instanceof THREE.Face3
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
if v1.visible and v2.visible and v3.visible
visible = (v3.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v3.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace3InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
else
continue
else
continue
else if face instanceof THREE.Face4
v1 = @_vertexPool[face.a]
v2 = @_vertexPool[face.b]
v3 = @_vertexPool[face.c]
v4 = @_vertexPool[face.d]
if v1.visible and v2.visible and v3.visible and v4.visible
visible = (v4.positionScreen.x - v1.positionScreen.x) * (v2.positionScreen.y - v1.positionScreen.y) - (v4.positionScreen.y - v1.positionScreen.y) * (v2.positionScreen.x - v1.positionScreen.x) < 0 or (v2.positionScreen.x - v3.positionScreen.x) * (v4.positionScreen.y - v3.positionScreen.y) - (v2.positionScreen.y - v3.positionScreen.y) * (v4.positionScreen.x - v3.positionScreen.x) < 0
if object.doubleSided or visible isnt object.flipSided
@_face = getNextFace4InPool()
@_face.v1.copy v1
@_face.v2.copy v2
@_face.v3.copy v3
@_face.v4.copy v4
else
continue
else
continue
@_face.normalWorld.copy face.normal
@_face.normalWorld.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 @_face.normalWorld
@_face.centroidWorld.copy face.centroid
objectMatrixWorld.multiplyVector3 @_face.centroidWorld
@_face.centroidScreen.copy @_face.centroidWorld
@_projScreenMatrix.multiplyVector3 @_face.centroidScreen
faceVertexNormals = face.vertexNormals
n = 0
nl = faceVertexNormals.length
while n < nl
normal = @_face.vertexNormalsWorld[n]
normal.copy faceVertexNormals[n]
normal.negate() if not visible and (object.flipSided or object.doubleSided)
objectMatrixWorldRotation.multiplyVector3 normal
n++
c = 0
cl = faceVertexUvs.length
while c < cl
uvs = faceVertexUvs[c][f]
continue unless uvs
u = 0
ul = uvs.length
while u < ul
@_face.uvs[c][u] = uvs[u]
u++
c++
@_face.material = object.material
@_face.faceMaterial = (if face.materialIndex isnt null then geometryMaterials[face.materialIndex] else null)
@_face.z = @_face.centroidScreen.z
@_renderData.elements.push @_face
f++
else if object instanceof THREE.Line
@_projScreenobjectMatrixWorld.multiply @_projScreenMatrix, objectMatrixWorld
vertices = object.geometry.vertices
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[0]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
# Handle LineStrip and LinePieces
step = (if object.type is THREE.LinePieces then 2 else 1)
v = 1
vl = vertices.length
while v < vl
v1 = getNextVertexInPool()
v1.positionScreen.copy vertices[v]
@_projScreenobjectMatrixWorld.multiplyVector4 v1.positionScreen
continue if (v + 1) % step > 0
v2 = @_vertexPool[@_vertexCount - 2]
@_clippedVertex1PositionScreen.copy v1.positionScreen
@_clippedVertex2PositionScreen.copy v2.positionScreen
if clipLine(@_clippedVertex1PositionScreen, @_clippedVertex2PositionScreen)
# Perform the perspective divide
@_clippedVertex1PositionScreen.multiplyScalar 1 / @_clippedVertex1PositionScreen.w
@_clippedVertex2PositionScreen.multiplyScalar 1 / @_clippedVertex2PositionScreen.w
@_line = getNextLineInPool()
@_line.v1.positionScreen.copy @_clippedVertex1PositionScreen
@_line.v2.positionScreen.copy @_clippedVertex2PositionScreen
@_line.z = Math.max(@_clippedVertex1PositionScreen.z, @_clippedVertex2PositionScreen.z)
@_line.material = object.material
@_renderData.elements.push @_line
v++
o++
o = 0
ol = @_renderData.sprites.length
while o < ol
object = @_renderData.sprites[o].object
objectMatrixWorld = object.matrixWorld
if object instanceof THREE.Particle
@_vector4.set objectMatrixWorld.elements[12], objectMatrixWorld.elements[13], objectMatrixWorld.elements[14], 1
@_projScreenMatrix.multiplyVector4 @_vector4
@_vector4.z /= @_vector4.w
if @_vector4.z > 0 and @_vector4.z < 1
@_particle = getNextParticleInPool()
@_particle.x = @_vector4.x / @_vector4.w
@_particle.y = @_vector4.y / @_vector4.w
@_particle.z = @_vector4.z
@_particle.rotation = object.rotation.z
@_particle.scale.x = object.scale.x * Math.abs(@_particle.x - (@_vector4.x + camera.projectionMatrix.elements[0]) / (@_vector4.w + camera.projectionMatrix.elements[12]))
@_particle.scale.y = object.scale.y * Math.abs(@_particle.y - (@_vector4.y + camera.projectionMatrix.elements[5]) / (@_vector4.w + camera.projectionMatrix.elements[13]))
@_particle.material = object.material
@_renderData.elements.push @_particle
o++
sort and @_renderData.elements.sort(@painterSort)
@_renderData
namespace "THREE", (exports) ->
exports.Projector = Projector |
[
{
"context": "onid, action: 'Validate', userid: cred.user, pass: cred.password}}, (err,resp,body) ->\n cred.loggedin=false\n ",
"end": 943,
"score": 0.9987649917602539,
"start": 930,
"tag": "PASSWORD",
"value": "cred.password"
},
{
"context": "\nnew Promise (next) ->\n Promp.get [\n name: 'username'\n {name: 'password',hidden: true}\n ], (er",
"end": 3213,
"score": 0.9960950016975403,
"start": 3205,
"tag": "USERNAME",
"value": "username"
},
{
"context": "\n cred.user=val.username\n cred.password=val.password\n next()\n return\n.then -> getSessionID()",
"end": 3361,
"score": 0.9977532029151917,
"start": 3349,
"tag": "PASSWORD",
"value": "val.password"
}
] | index.coffee | 1upon0/nodejs-iitd-proxy-login | 1 | Prompt = require 'prompt'
Promise = require 'promise'
Request = require 'request'
cred =
url:'https://proxy22.iitd.ernet.in/cgi-bin/proxy.cgi'
loggedin: false
requestTimeout: 3000
retryTimeout: 2000
refreshTimeout: 120000
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
getSessionID = -> new Promise retry = (next) ->
console.log "Obtaining Session ID"
Request {url:cred.url,timeout:cred.requestTimeout}, (error, response, body) ->
if error
console.log error,"\nRetrying in 2s..."
setTimeout (-> retry next), cred.retryTimeout
else
cred.sessionid=(/"sessionid".+?value="([^"]+?)">/i.exec body)[1]
console.log "Session ID:"+cred.sessionid
next()
return
return
login = ->new Promise retry= (next,fail) ->
console.log "Logging in..."
Request {url: cred.url, timeout: cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Validate', userid: cred.user, pass: cred.password}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nretrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Login successful. Press any key to logout"
cred.loggedin=true
next()
else if body.match /Either your userid and/i
console.log "Invalid credentials! Aborting"
fail()
else if logged_user=(/([\w]+?) already logged in/i.exec body)
logged_user=logged_user[1]
console.log "Already logged in as #{logged_user}. retrying in 2s..."
setTimeout (-> logout().then -> retry next, fail), cred.retryTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(next).catch(fail)
else
console.log "Unknown response. Retrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
return
return
refresh = ->
unless cred.loggedin
return
console.log "Refreshing..."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Refresh'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Success. Press any key to logout"
cred.loggedin=true
setTimeout refresh, cred.refreshTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(refresh)
return
return
logout = -> new Promise retry =(next)->
console.log "Logging out.."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'logout'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->retry next), cred.retryTimeout
else if body.match /logged out from/i
console.log "Success"
next()
else if body.match /Session expired/i
console.log "Session expired."
next()
return
return
Prompt.start()
new Promise (next) ->
Promp.get [
name: 'username'
{name: 'password',hidden: true}
], (er, val) ->
console.log(er,val);
cred.user=val.username
cred.password=val.password
next()
return
.then -> getSessionID()
.then -> login()
.then -> refresh()
###
process.stdin.setRawMode true
process.stdin.setEncoding 'utf8'
process.stdin.resume()
process.stdin.on 'data', (chunk) ->
#chunk = process.stdin.read()
if(!chunk?)
return
console.log chunk.toString()
key = chunk.toString()[0]
if cred.loggedin
logout().then -> console.log "Press q to login again"
else if cred.password? and key is 'q'
getSessionID().then ->login().then(refresh)
###
do keepBusy= ->
setTimeout keepBusy,10000 | 7008 | Prompt = require 'prompt'
Promise = require 'promise'
Request = require 'request'
cred =
url:'https://proxy22.iitd.ernet.in/cgi-bin/proxy.cgi'
loggedin: false
requestTimeout: 3000
retryTimeout: 2000
refreshTimeout: 120000
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
getSessionID = -> new Promise retry = (next) ->
console.log "Obtaining Session ID"
Request {url:cred.url,timeout:cred.requestTimeout}, (error, response, body) ->
if error
console.log error,"\nRetrying in 2s..."
setTimeout (-> retry next), cred.retryTimeout
else
cred.sessionid=(/"sessionid".+?value="([^"]+?)">/i.exec body)[1]
console.log "Session ID:"+cred.sessionid
next()
return
return
login = ->new Promise retry= (next,fail) ->
console.log "Logging in..."
Request {url: cred.url, timeout: cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Validate', userid: cred.user, pass: <PASSWORD>}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nretrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Login successful. Press any key to logout"
cred.loggedin=true
next()
else if body.match /Either your userid and/i
console.log "Invalid credentials! Aborting"
fail()
else if logged_user=(/([\w]+?) already logged in/i.exec body)
logged_user=logged_user[1]
console.log "Already logged in as #{logged_user}. retrying in 2s..."
setTimeout (-> logout().then -> retry next, fail), cred.retryTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(next).catch(fail)
else
console.log "Unknown response. Retrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
return
return
refresh = ->
unless cred.loggedin
return
console.log "Refreshing..."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Refresh'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Success. Press any key to logout"
cred.loggedin=true
setTimeout refresh, cred.refreshTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(refresh)
return
return
logout = -> new Promise retry =(next)->
console.log "Logging out.."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'logout'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->retry next), cred.retryTimeout
else if body.match /logged out from/i
console.log "Success"
next()
else if body.match /Session expired/i
console.log "Session expired."
next()
return
return
Prompt.start()
new Promise (next) ->
Promp.get [
name: 'username'
{name: 'password',hidden: true}
], (er, val) ->
console.log(er,val);
cred.user=val.username
cred.password=<PASSWORD>
next()
return
.then -> getSessionID()
.then -> login()
.then -> refresh()
###
process.stdin.setRawMode true
process.stdin.setEncoding 'utf8'
process.stdin.resume()
process.stdin.on 'data', (chunk) ->
#chunk = process.stdin.read()
if(!chunk?)
return
console.log chunk.toString()
key = chunk.toString()[0]
if cred.loggedin
logout().then -> console.log "Press q to login again"
else if cred.password? and key is 'q'
getSessionID().then ->login().then(refresh)
###
do keepBusy= ->
setTimeout keepBusy,10000 | true | Prompt = require 'prompt'
Promise = require 'promise'
Request = require 'request'
cred =
url:'https://proxy22.iitd.ernet.in/cgi-bin/proxy.cgi'
loggedin: false
requestTimeout: 3000
retryTimeout: 2000
refreshTimeout: 120000
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
getSessionID = -> new Promise retry = (next) ->
console.log "Obtaining Session ID"
Request {url:cred.url,timeout:cred.requestTimeout}, (error, response, body) ->
if error
console.log error,"\nRetrying in 2s..."
setTimeout (-> retry next), cred.retryTimeout
else
cred.sessionid=(/"sessionid".+?value="([^"]+?)">/i.exec body)[1]
console.log "Session ID:"+cred.sessionid
next()
return
return
login = ->new Promise retry= (next,fail) ->
console.log "Logging in..."
Request {url: cred.url, timeout: cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Validate', userid: cred.user, pass: PI:PASSWORD:<PASSWORD>END_PI}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nretrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Login successful. Press any key to logout"
cred.loggedin=true
next()
else if body.match /Either your userid and/i
console.log "Invalid credentials! Aborting"
fail()
else if logged_user=(/([\w]+?) already logged in/i.exec body)
logged_user=logged_user[1]
console.log "Already logged in as #{logged_user}. retrying in 2s..."
setTimeout (-> logout().then -> retry next, fail), cred.retryTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(next).catch(fail)
else
console.log "Unknown response. Retrying in 2s..."
setTimeout (-> login next, fail), cred.retryTimeout
return
return
refresh = ->
unless cred.loggedin
return
console.log "Refreshing..."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'Refresh'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->login next, fail), cred.retryTimeout
else if body.match /you are logged in successfully/i
console.log "Success. Press any key to logout"
cred.loggedin=true
setTimeout refresh, cred.refreshTimeout
else if body.match /Session expired/i
console.log "Session expired."
getSessionID().then ->login().then(refresh)
return
return
logout = -> new Promise retry =(next)->
console.log "Logging out.."
Request {url:cred.url,timeout:cred.requestTimeout, form: {sessionid: cred.sessionid, action: 'logout'}}, (err,resp,body) ->
cred.loggedin=false
if err
console.log err,"\nRetrying in 2s..."
setTimeout (->retry next), cred.retryTimeout
else if body.match /logged out from/i
console.log "Success"
next()
else if body.match /Session expired/i
console.log "Session expired."
next()
return
return
Prompt.start()
new Promise (next) ->
Promp.get [
name: 'username'
{name: 'password',hidden: true}
], (er, val) ->
console.log(er,val);
cred.user=val.username
cred.password=PI:PASSWORD:<PASSWORD>END_PI
next()
return
.then -> getSessionID()
.then -> login()
.then -> refresh()
###
process.stdin.setRawMode true
process.stdin.setEncoding 'utf8'
process.stdin.resume()
process.stdin.on 'data', (chunk) ->
#chunk = process.stdin.read()
if(!chunk?)
return
console.log chunk.toString()
key = chunk.toString()[0]
if cred.loggedin
logout().then -> console.log "Press q to login again"
else if cred.password? and key is 'q'
getSessionID().then ->login().then(refresh)
###
do keepBusy= ->
setTimeout keepBusy,10000 |
[
{
"context": "eate_account\n 'screen_name': sn\n 'password': pw\n 'email': email\n 'name': sn\n , (err, data,",
"end": 643,
"score": 0.999187707901001,
"start": 641,
"tag": "PASSWORD",
"value": "pw"
}
] | app.coffee | ky0615/create-twitter-account | 4 | require 'colors'
path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
config = yaml.safeLoad fs.readFileSync path.join __dirname, 'config.yml'
unless config
console.error 'I can not find the config file. Please copy the example file.'.red.underline
console.error ' hint:'
console.error ' $ cp config.yml.example config.yml'
console.error ' $ vim config.yml'
return
Twitter = require './lib/Twitter'
tw = new Twitter config
sn = config.Twitter.account.screen_name
email = config.Twitter.account.email
pw = config.Twitter.account.password
tw.create_account
'screen_name': sn
'password': pw
'email': email
'name': sn
, (err, data, res)->
console.log err
| 176789 | require 'colors'
path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
config = yaml.safeLoad fs.readFileSync path.join __dirname, 'config.yml'
unless config
console.error 'I can not find the config file. Please copy the example file.'.red.underline
console.error ' hint:'
console.error ' $ cp config.yml.example config.yml'
console.error ' $ vim config.yml'
return
Twitter = require './lib/Twitter'
tw = new Twitter config
sn = config.Twitter.account.screen_name
email = config.Twitter.account.email
pw = config.Twitter.account.password
tw.create_account
'screen_name': sn
'password': <PASSWORD>
'email': email
'name': sn
, (err, data, res)->
console.log err
| true | require 'colors'
path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
config = yaml.safeLoad fs.readFileSync path.join __dirname, 'config.yml'
unless config
console.error 'I can not find the config file. Please copy the example file.'.red.underline
console.error ' hint:'
console.error ' $ cp config.yml.example config.yml'
console.error ' $ vim config.yml'
return
Twitter = require './lib/Twitter'
tw = new Twitter config
sn = config.Twitter.account.screen_name
email = config.Twitter.account.email
pw = config.Twitter.account.password
tw.create_account
'screen_name': sn
'password': PI:PASSWORD:<PASSWORD>END_PI
'email': email
'name': sn
, (err, data, res)->
console.log err
|
[
{
"context": " backbone-orm.js 0.7.14\n Copyright (c) 2013-2016 Vidigami\n License: MIT (http://www.opensource.org/license",
"end": 63,
"score": 0.9998732209205627,
"start": 55,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": "ses/mit-license.php)\n Source: https://github.com/vidigami/backbone-orm\n Dependencies: Backbone.js and Unde",
"end": 169,
"score": 0.9883697032928467,
"start": 161,
"tag": "USERNAME",
"value": "vidigami"
}
] | src/relations/relation.coffee | dk-dev/backbone-orm | 54 | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 Vidigami
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
_ = require 'underscore'
Backbone = require 'backbone'
BackboneORM = require '../core'
Queue = require '../lib/queue'
Utils = require '../lib/utils'
# @nodoc
module.exports = class Relation
# hasJoinTable: -> return !!@join_table or (@reverse_relation and !!@reverse_relation.join_table)
# isManyToMany: -> return @type is 'hasMany' and @reverse_relation and @reverse_relation.type is 'hasMany'
isEmbedded: -> return !!(@embed or (@reverse_relation and @reverse_relation.embed))
isVirtual: -> return !!(@virtual or (@reverse_relation and @reverse_relation.virtual))
findOrGenerateJoinTable: ->
# already exists
return join_table if join_table = (@join_table or @reverse_relation.join_table)
return @model_type.schema().generateJoinTable(@)
_findOrGenerateReverseRelation: ->
model_type = @model_type
reverse_model_type = @reverse_model_type
reverse_model_type.sync = model_type.createSync(reverse_model_type) unless _.isFunction(reverse_model_type.schema) # convert to relational
reverse_relation = reverse_model_type.relation(@as)
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, false)) unless reverse_relation # singular
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, true)) unless reverse_relation # plural
if not reverse_relation and (@type isnt 'belongsTo')
reverse_relation = reverse_model_type.schema().generateBelongsTo(model_type)
reverse_relation.reverse_relation = @ if reverse_relation and not reverse_relation.reverse_relation
return reverse_relation
_saveRelated: (model, related_models, callback) ->
return callback() if @embed or not @reverse_relation or (@type is 'belongsTo') # no foriegn key, no save required
return callback() if @isVirtual() # skip virtual attributes
@cursor(model, @key).toJSON (err, json) =>
return callback(err) if err
json = (if json then [json] else []) unless _.isArray(json) # a One relation
queue = new Queue(1)
related_ids = _.pluck(related_models, 'id')
changes = _.groupBy(json, (test) => if _.contains(related_ids, test.id) then 'kept' else 'removed')
added_ids = if changes.kept then _.difference(related_ids, (test.id for test in changes.kept)) else related_ids
# destroy removed
if changes.removed
if @join_table
queue.defer (callback) =>
query = {}
query[@join_key] = model.id
query[@reverse_relation.join_key] = {$in: (related_json[@reverse_model_type::idAttribute] for related_json in changes.removed)}
@join_table.destroy query, callback
else
queue.defer (callback) =>
Utils.each changes.removed, ((related_json, callback) =>
related_json[@reverse_relation.foreign_key] = null
Utils.modelJSONSave(related_json, @reverse_model_type, callback)
), callback
# create new
if added_ids.length
if @join_table
queue.defer (callback) =>
Utils.each added_ids, ((related_id, callback) =>
attributes = {}
attributes[@foreign_key] = model.id
attributes[@reverse_relation.foreign_key] = related_id
# console.log "Creating join for: #{@model_type.model_name} join: #{JSONUtils.stringify(attributes)}"
join = new @join_table(attributes)
join.save callback
), callback
else
# add new, if they have changed
queue.defer (callback) =>
Utils.each added_ids, ((added_id, callback) =>
related_model = _.find(related_models, (test) -> test.id is added_id)
return callback() unless @reverse_relation._hasChanged(related_model) # related has not changed
related_model.save (err, saved_model) =>
cache.set(saved_model.id, saved_model) if not err and cache = @reverse_model_type.cache
callback(err)
), callback
queue.await callback
| 18669 | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 <NAME>
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
_ = require 'underscore'
Backbone = require 'backbone'
BackboneORM = require '../core'
Queue = require '../lib/queue'
Utils = require '../lib/utils'
# @nodoc
module.exports = class Relation
# hasJoinTable: -> return !!@join_table or (@reverse_relation and !!@reverse_relation.join_table)
# isManyToMany: -> return @type is 'hasMany' and @reverse_relation and @reverse_relation.type is 'hasMany'
isEmbedded: -> return !!(@embed or (@reverse_relation and @reverse_relation.embed))
isVirtual: -> return !!(@virtual or (@reverse_relation and @reverse_relation.virtual))
findOrGenerateJoinTable: ->
# already exists
return join_table if join_table = (@join_table or @reverse_relation.join_table)
return @model_type.schema().generateJoinTable(@)
_findOrGenerateReverseRelation: ->
model_type = @model_type
reverse_model_type = @reverse_model_type
reverse_model_type.sync = model_type.createSync(reverse_model_type) unless _.isFunction(reverse_model_type.schema) # convert to relational
reverse_relation = reverse_model_type.relation(@as)
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, false)) unless reverse_relation # singular
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, true)) unless reverse_relation # plural
if not reverse_relation and (@type isnt 'belongsTo')
reverse_relation = reverse_model_type.schema().generateBelongsTo(model_type)
reverse_relation.reverse_relation = @ if reverse_relation and not reverse_relation.reverse_relation
return reverse_relation
_saveRelated: (model, related_models, callback) ->
return callback() if @embed or not @reverse_relation or (@type is 'belongsTo') # no foriegn key, no save required
return callback() if @isVirtual() # skip virtual attributes
@cursor(model, @key).toJSON (err, json) =>
return callback(err) if err
json = (if json then [json] else []) unless _.isArray(json) # a One relation
queue = new Queue(1)
related_ids = _.pluck(related_models, 'id')
changes = _.groupBy(json, (test) => if _.contains(related_ids, test.id) then 'kept' else 'removed')
added_ids = if changes.kept then _.difference(related_ids, (test.id for test in changes.kept)) else related_ids
# destroy removed
if changes.removed
if @join_table
queue.defer (callback) =>
query = {}
query[@join_key] = model.id
query[@reverse_relation.join_key] = {$in: (related_json[@reverse_model_type::idAttribute] for related_json in changes.removed)}
@join_table.destroy query, callback
else
queue.defer (callback) =>
Utils.each changes.removed, ((related_json, callback) =>
related_json[@reverse_relation.foreign_key] = null
Utils.modelJSONSave(related_json, @reverse_model_type, callback)
), callback
# create new
if added_ids.length
if @join_table
queue.defer (callback) =>
Utils.each added_ids, ((related_id, callback) =>
attributes = {}
attributes[@foreign_key] = model.id
attributes[@reverse_relation.foreign_key] = related_id
# console.log "Creating join for: #{@model_type.model_name} join: #{JSONUtils.stringify(attributes)}"
join = new @join_table(attributes)
join.save callback
), callback
else
# add new, if they have changed
queue.defer (callback) =>
Utils.each added_ids, ((added_id, callback) =>
related_model = _.find(related_models, (test) -> test.id is added_id)
return callback() unless @reverse_relation._hasChanged(related_model) # related has not changed
related_model.save (err, saved_model) =>
cache.set(saved_model.id, saved_model) if not err and cache = @reverse_model_type.cache
callback(err)
), callback
queue.await callback
| true | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 PI:NAME:<NAME>END_PI
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
_ = require 'underscore'
Backbone = require 'backbone'
BackboneORM = require '../core'
Queue = require '../lib/queue'
Utils = require '../lib/utils'
# @nodoc
module.exports = class Relation
# hasJoinTable: -> return !!@join_table or (@reverse_relation and !!@reverse_relation.join_table)
# isManyToMany: -> return @type is 'hasMany' and @reverse_relation and @reverse_relation.type is 'hasMany'
isEmbedded: -> return !!(@embed or (@reverse_relation and @reverse_relation.embed))
isVirtual: -> return !!(@virtual or (@reverse_relation and @reverse_relation.virtual))
findOrGenerateJoinTable: ->
# already exists
return join_table if join_table = (@join_table or @reverse_relation.join_table)
return @model_type.schema().generateJoinTable(@)
_findOrGenerateReverseRelation: ->
model_type = @model_type
reverse_model_type = @reverse_model_type
reverse_model_type.sync = model_type.createSync(reverse_model_type) unless _.isFunction(reverse_model_type.schema) # convert to relational
reverse_relation = reverse_model_type.relation(@as)
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, false)) unless reverse_relation # singular
reverse_relation = reverse_model_type.relation(BackboneORM.naming_conventions.attribute(model_type.model_name, true)) unless reverse_relation # plural
if not reverse_relation and (@type isnt 'belongsTo')
reverse_relation = reverse_model_type.schema().generateBelongsTo(model_type)
reverse_relation.reverse_relation = @ if reverse_relation and not reverse_relation.reverse_relation
return reverse_relation
_saveRelated: (model, related_models, callback) ->
return callback() if @embed or not @reverse_relation or (@type is 'belongsTo') # no foriegn key, no save required
return callback() if @isVirtual() # skip virtual attributes
@cursor(model, @key).toJSON (err, json) =>
return callback(err) if err
json = (if json then [json] else []) unless _.isArray(json) # a One relation
queue = new Queue(1)
related_ids = _.pluck(related_models, 'id')
changes = _.groupBy(json, (test) => if _.contains(related_ids, test.id) then 'kept' else 'removed')
added_ids = if changes.kept then _.difference(related_ids, (test.id for test in changes.kept)) else related_ids
# destroy removed
if changes.removed
if @join_table
queue.defer (callback) =>
query = {}
query[@join_key] = model.id
query[@reverse_relation.join_key] = {$in: (related_json[@reverse_model_type::idAttribute] for related_json in changes.removed)}
@join_table.destroy query, callback
else
queue.defer (callback) =>
Utils.each changes.removed, ((related_json, callback) =>
related_json[@reverse_relation.foreign_key] = null
Utils.modelJSONSave(related_json, @reverse_model_type, callback)
), callback
# create new
if added_ids.length
if @join_table
queue.defer (callback) =>
Utils.each added_ids, ((related_id, callback) =>
attributes = {}
attributes[@foreign_key] = model.id
attributes[@reverse_relation.foreign_key] = related_id
# console.log "Creating join for: #{@model_type.model_name} join: #{JSONUtils.stringify(attributes)}"
join = new @join_table(attributes)
join.save callback
), callback
else
# add new, if they have changed
queue.defer (callback) =>
Utils.each added_ids, ((added_id, callback) =>
related_model = _.find(related_models, (test) -> test.id is added_id)
return callback() unless @reverse_relation._hasChanged(related_model) # related has not changed
related_model.save (err, saved_model) =>
cache.set(saved_model.id, saved_model) if not err and cache = @reverse_model_type.cache
callback(err)
), callback
queue.await callback
|
[
{
"context": "configuration fields that\n\tKaffee uses.\n\n @author Fabian M. <mail.fabianm@gmail.com>\n###\nclass Configuration",
"end": 107,
"score": 0.9993400573730469,
"start": 99,
"tag": "NAME",
"value": "Fabian M"
},
{
"context": "n fields that\n\tKaffee uses.\n\n @author Fabian M. <mail.fabianm@gmail.com>\n###\nclass Configuration\n\t@NAME: \"Kaffee\"\n\t@VERSI",
"end": 132,
"score": 0.999934732913971,
"start": 110,
"tag": "EMAIL",
"value": "mail.fabianm@gmail.com"
}
] | kaffee/src/main/kaffee/configuration.coffee | fabianm/kaffee | 1 | ###
The Configuration class contains serveral configuration fields that
Kaffee uses.
@author Fabian M. <mail.fabianm@gmail.com>
###
class Configuration
@NAME: "Kaffee"
@VERSION: "0.3.3"
###
Default filename of the project configuration file.
###
@DEFAULT_PROJECT_CONFIG_FILE: "package.json"
###
The super project configuration object.
###
@SUPER_PROJECT_CONFIG:
version: "0.0.1"
dependencies: []
kaffee:
###
Default Kaffee plugins.
###
plugins: {}
###
The default directory structure.
###
structure:
"src" : "./src/main"
"bin" : "./lib/main"
"src-test" : "./src/test"
"bin-test" : "./lib/test"
###
Default lifecycles.
###
lifecycles:
"compile" : []
"install" : []
"test" : []
"deploy" : []
"package" : []
"clean" : ["kaffee-plugin:clean"]
###
Parent project.
###
parent: ""
###
Child projects of this project.
###
modules: []
module.exports = Configuration;
| 5833 | ###
The Configuration class contains serveral configuration fields that
Kaffee uses.
@author <NAME>. <<EMAIL>>
###
class Configuration
@NAME: "Kaffee"
@VERSION: "0.3.3"
###
Default filename of the project configuration file.
###
@DEFAULT_PROJECT_CONFIG_FILE: "package.json"
###
The super project configuration object.
###
@SUPER_PROJECT_CONFIG:
version: "0.0.1"
dependencies: []
kaffee:
###
Default Kaffee plugins.
###
plugins: {}
###
The default directory structure.
###
structure:
"src" : "./src/main"
"bin" : "./lib/main"
"src-test" : "./src/test"
"bin-test" : "./lib/test"
###
Default lifecycles.
###
lifecycles:
"compile" : []
"install" : []
"test" : []
"deploy" : []
"package" : []
"clean" : ["kaffee-plugin:clean"]
###
Parent project.
###
parent: ""
###
Child projects of this project.
###
modules: []
module.exports = Configuration;
| true | ###
The Configuration class contains serveral configuration fields that
Kaffee uses.
@author PI:NAME:<NAME>END_PI. <PI:EMAIL:<EMAIL>END_PI>
###
class Configuration
@NAME: "Kaffee"
@VERSION: "0.3.3"
###
Default filename of the project configuration file.
###
@DEFAULT_PROJECT_CONFIG_FILE: "package.json"
###
The super project configuration object.
###
@SUPER_PROJECT_CONFIG:
version: "0.0.1"
dependencies: []
kaffee:
###
Default Kaffee plugins.
###
plugins: {}
###
The default directory structure.
###
structure:
"src" : "./src/main"
"bin" : "./lib/main"
"src-test" : "./src/test"
"bin-test" : "./lib/test"
###
Default lifecycles.
###
lifecycles:
"compile" : []
"install" : []
"test" : []
"deploy" : []
"package" : []
"clean" : ["kaffee-plugin:clean"]
###
Parent project.
###
parent: ""
###
Child projects of this project.
###
modules: []
module.exports = Configuration;
|
[
{
"context": "#################\n# Copyright (C) 2014-2016 by Vaughn Iverson\n# meteor-file-job-sample-app is free software",
"end": 124,
"score": 0.9997797012329102,
"start": 110,
"tag": "NAME",
"value": "Vaughn Iverson"
}
] | sample.coffee | dev-alex-alex2006hw/mpointlook | 0 | ############################################################################
# Copyright (C) 2014-2016 by Vaughn Iverson
# meteor-file-job-sample-app is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Both client and server
# Default collection name is 'fs'
myData = new FileCollection('images', {
resumable: true, # Enable the resumable.js compatible chunked file upload interface
http: [
{ method: 'get', path: '/id/:_id', lookup: (params, query) -> return { _id: params._id }}
]}
)
myJobs = new JobCollection 'queue',
idGeneration: 'MONGO'
transform: (d) ->
try
res = new Job myJobs, d
catch e
res = d
return res
############################################################
# Client-only code
############################################################
if Meteor.isClient
dataLookup =
myData: myData
myJobs: myJobs
imageTypes =
'image/jpeg': true
'image/png': true
'image/gif': true
'image/tiff': true
renderLayout = (template, data = {}, domNode = $('body').get(0)) ->
Blaze.renderWithData Template[template], data, domNode
FlowLayout.setRoot 'body'
FlowRouter.route '/',
triggersEnter: [
(context, redirect) ->
redirect '/gallery'
]
action: () ->
throw new Error "this should not get called"
FlowRouter.route '/gallery',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'gallery'
content: 'gallery'
contentData:
collection: 'myData'
FlowRouter.route '/files',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'files'
content: 'fileTable'
contentData:
collection: 'myData'
FlowRouter.route '/jobs',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'jobs'
content: 'jobTable'
contentData:
collection: 'myJobs'
Meteor.startup () ->
################################
# Setup resumable.js in the UI
# Prevent default drop behavior (loading a file) outside of the drop zone
window.addEventListener 'dragover', ((e) -> e.preventDefault()), false
window.addEventListener 'drop', ((e) -> e.preventDefault()), false
# When a file is added
myData.resumable.on 'fileAdded', (file) ->
if imageTypes[file.file.type]
# Keep track of its progress reactivaly in a session variable
Session.set file.uniqueIdentifier, 0
# Create a new file in the file collection to upload to
myData.insert({
_id: file.uniqueIdentifier # This is the ID resumable will use
filename: file.fileName
contentType: file.file.type
},
(err, _id) ->
if err
console.warn "File creation failed!", err
return
# Once the file exists on the server, start uploading
myData.resumable.upload()
)
# Update the upload progress session variable
myData.resumable.on 'fileProgress', (file) ->
Session.set file.uniqueIdentifier, Math.floor(100*file.progress())
# Finish the upload progress in the session variable
myData.resumable.on 'fileSuccess', (file) ->
Session.set file.uniqueIdentifier, undefined
# More robust error handling needed!
myData.resumable.on 'fileError', (file) ->
console.warn "Error uploading", file.uniqueIdentifier
Session.set file.uniqueIdentifier, undefined
# Set up an autorun to keep the X-Auth-Token cookie up-to-date and
# to update the subscription when the userId changes.
Tracker.autorun () ->
userId = Meteor.userId()
Meteor.subscribe 'allData', userId
Meteor.subscribe 'allJobs', userId
$.cookie 'X-Auth-Token', Accounts._storedLoginToken(), { path: '/'}
#####################
# UI template helpers
shorten = (name, w = 16) ->
w += w % 4
w = (w-4)/2
if name.length > 2*w
name[0..w] + '…' + name[-w-1..-1]
else
name
shortFilename = (w = 16) ->
shorten this.filename, w
Template.registerHelper 'data', () ->
dataLookup[this.collection]
Template.top.helpers
loginToken: () ->
Meteor.userId()
Accounts._storedLoginToken()
userId: () ->
Meteor.userId()
Template.nav.helpers
active: (pill) ->
return "active" if pill is this.page
Template.fileTable.helpers
dataEntries: () ->
# Reactively populate the table
this.find({}, {sort:{filename: 1}})
owner: () ->
this.metadata?._auth?.owner
id: () ->
"#{this._id}"
shortFilename: shortFilename
uploadStatus: () ->
percent = Session.get "#{this._id}"
unless percent?
"Processing..."
else
"Uploading..."
formattedLength: () ->
numeral(this.length).format('0.0b')
uploadProgress: () ->
percent = Session.get "#{this._id}"
Template.fileTable.events
# Wire up the event to remove a file by clicking the `X`
'click .del-file': (e, t) ->
# If there's an active upload, cancel it
coll = dataLookup[t.data.collection]
if Session.get "#{this._id}"
console.warn "Cancelling active upload to remove file! #{this._id}"
coll.resumable.removeFile(coll.resumable.getFromUniqueIdentifier "#{this._id}")
# Management of thumbnails happens on the server!
if this.metadata.thumbOf?
coll.remove this.metadata.thumbOf
else
coll.remove this._id
Template.gallery.helpers
dataEntries: () ->
# Reactively populate the table
this.find({'metadata.thumbOf': {$exists: false}}, {sort:{filename: 1}})
id: () ->
"#{this._id}"
thumb: () ->
unless this.metadata?.thumbComplete
null
else
"#{this.metadata.thumb}"
isImage: () ->
imageTypes[this.contentType]?
shortFilename: shortFilename
altMessage: () ->
if this.length is 0
"Uploading..."
else
"Processing thumbnail..."
Template.gallery.rendered = () ->
# This assigns a file drop zone to the "file table"
dataLookup[this.data.collection].resumable.assignDrop $(".#{dataLookup[this.data.collection].root}DropZone")
Template.fileControls.events
'click .remove-files': (e, t) ->
this.find({ 'metadata.thumbOf': {$exists: false} }).forEach ((d) -> this.remove(d._id)), this
Template.jobTable.helpers
jobEntries: () ->
# Reactively populate the table
this.find({})
Template.jobEntry.rendered = () ->
this.$('.button-column').tooltip
selector: 'button[data-toggle=tooltip]'
delay:
show: 500
hide: 100
Template.jobEntry.events
'click .cancel-job': (e, t) ->
console.log "Cancelling job: #{this._id}"
job = Template.currentData()
job.cancel() if job
'click .remove-job': (e, t) ->
console.log "Removing job: #{this._id}"
job = Template.currentData()
job.remove() if job
'click .restart-job': (e, t) ->
console.log "Restarting job: #{this._id}"
job = Template.currentData()
job.restart() if job
'click .rerun-job': (e, t) ->
console.log "Rerunning job: #{this._id}"
job = Template.currentData()
job.rerun({ wait: 15000 }) if job
'click .pause-job': (e, t) ->
console.log "Pausing job: #{this._id}"
job = Template.currentData()
job.pause() if job
'click .resume-job': (e, t) ->
console.log "Resuming job: #{this._id}"
job = Template.currentData()
job.resume() if job
isInfinity = (val) ->
if val > Job.forever - 7199254740935
"∞"
else
val
Template.jobEntry.helpers
numDepends: () ->
this.depends?.length
numResolved: () ->
this.resolved?.length
jobId: () ->
this._id.valueOf()
statusBG: () ->
{
waiting: 'primary'
ready: 'info'
paused: 'default'
running: 'default'
cancelled: 'warning'
failed: 'danger'
completed: 'success'
}[this.status]
numRepeats: () -> isInfinity this.repeats
numRetries: () -> isInfinity this.retries
runAt: () ->
Session.get 'date'
moment(this.after).fromNow()
lastUpdated: () ->
Session.get 'date'
moment(this.updated).fromNow()
futurePast: () ->
Session.get 'date'
if this.after > new Date()
"text-danger"
else
"text-success"
running: () ->
if Template.instance().view.isRendered
# This code destroys Bootstrap tooltips on existing buttons that may be
# about to disappear. This is done here because by the time the template
# autorun function runs, the button may already be out of the DOM, but
# a "ghost" tooltip for that button can remain visible.
Template.instance().$("button[data-toggle=tooltip]").tooltip('destroy')
this.status is 'running'
cancellable: () ->
this.status in Job.jobStatusCancellable
removable: () ->
this.status in Job.jobStatusRemovable
restartable: () ->
this.status in Job.jobStatusRestartable
rerunable: () ->
this.status is 'completed'
pausable: () ->
this.status in Job.jobStatusPausable
resumable: () ->
this.status is 'paused'
Template.jobControls.events
'click .clear-completed': (e, t) ->
console.log "clear completed"
ids = t.data.find({ status: 'completed' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "clearing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
'click .pause-queue': (e, t) ->
if $(e.target).hasClass 'active'
console.log "resume queue"
ids = t.data.find({ status: 'paused' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "resuming: #{ids.length} jobs"
t.data.resumeJobs(ids) if ids.length > 0
else
console.log "pause queue"
ids = t.data.find({ status: { $in: Job.jobStatusPausable }}, { fields: { _id: 1 }}).map (d) -> d._id console.log "pausing: #{ids.length} jobs"
t.data.pauseJobs(ids) if ids.length > 0
'click .stop-queue': (e, t) ->
unless $(e.target).hasClass 'active'
console.log "stop queue"
t.data.stopJobs()
else
console.log "restart queue"
t.data.stopJobs(0)
'click .cancel-queue': (e, t) ->
console.log "cancel all"
ids = t.data.find({ status: { $in: Job.jobStatusCancellable } }).map (d) -> d._id
console.log "cancelling: #{ids.length} jobs"
t.data.cancelJobs(ids) if ids.length > 0
'click .restart-queue': (e, t) ->
console.log "restart all"
ids = t.data.find({ status: { $in: Job.jobStatusRestartable } }).map (d) -> d._id
console.log "restarting: #{ids.length} jobs"
t.data.restartJobs(ids, (e, r) -> console.log("Restart returned", r)) if ids.length > 0
'click .remove-queue': (e, t) ->
console.log "remove all"
ids = t.data.find({ status: { $in: Job.jobStatusRemovable } }).map (d) -> d._id
console.log "removing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
############################################################
# Server-only code
############################################################
if Meteor.isServer
gm = Meteor.npmRequire 'gm'
exec = Meteor.npmRequire('child_process').exec
myJobs.setLogStream process.stdout
myJobs.promote 2500
Meteor.startup () ->
myJobs.startJobServer()
Meteor.publish 'allJobs', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myJobs.find({ 'data.owner': this.userId })
else
return []
# Only publish files owned by this userId, and ignore temp file chunks used by resumable
Meteor.publish 'allData', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myData.find({ 'metadata._Resumable': { $exists: false }, 'metadata._auth.owner': this.userId })
else
return []
# Don't allow users to modify the user docs
Meteor.users.deny({update: () -> true })
# Only allow job owners to manage or rerun jobs
myJobs.allow
manager: (userId, method, params) ->
ids = params[0]
unless typeof ids is 'object' and ids instanceof Array
ids = [ ids ]
numIds = ids.length
numMatches = myJobs.find({ _id: { $in: ids }, 'data.owner': userId }).count()
return numMatches is numIds
jobRerun: (userId, method, params) ->
id = params[0]
numMatches = myJobs.find({ _id: id, 'data.owner': userId }).count()
return numMatches is 1
stopJobs: (userId, method, params) ->
return userId?
# Allow rules for security. Without these, no writes would be allowed by default
myData.allow
insert: (userId, file) ->
# Assign the proper owner when a file is created
file.metadata = file.metadata ? {}
file.metadata._auth =
owner: userId
true
remove: (userId, file) ->
# Only owners can delete
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
read: (userId, file) ->
# Only owners can GET file data
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
write: (userId, file, fields) -> # This is for the HTTP REST interfaces PUT/POST
# All client file metadata updates are denied, implement Methods for that...
# Only owners can upload a file
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
# Create a job to make a thumbnail for each newly uploaded image
addedFileJob = (file) ->
# Don't make new jobs for files which already have them in process...
# findAndModify is atomic, so in a multi-server environment,
# only one server can succeed and go on to create a job.
# Too bad Meteor has no built-in atomic DB update...
myData.rawCollection().findAndModify(
{ _id: new MongoInternals.NpmModule.ObjectID(file._id.toHexString()), 'metadata._Job': {$exists: false}},
[],
{ $set: { 'metadata._Job': null }},
{ w: 1 },
Meteor.bindEnvironment (err, doc) ->
if err
return console.error "Error locking file document in job creation: ", err
if doc # This is null if update above didn't succeed
outputFileId = myData.insert
filename: "tn_#{file.filename}.png"
contentType: 'image/png'
metadata: file.metadata
job = new Job myJobs, 'makeThumb',
owner: file.metadata._auth.owner
# These Id values are used by the worker to read and write the correct files for this job.
inputFileId: file._id
outputFileId: outputFileId
if jobId = job.delay(5000).retry({ wait: 20000, retries: 5 }).save()
myData.update({ _id: file._id }, { $set: { 'metadata._Job': jobId, 'metadata.thumb': outputFileId }})
myData.update({ _id: outputFileId }, { $set: { 'metadata._Job': jobId, 'metadata.thumbOf': file._id }})
else
console.error "Error saving new job for file #{file._id}"
)
# If a removed file has an associated cancellable job, cancel it.
removedFileJob = (file) ->
if file.metadata?._Job
if job = myJobs.findOne({_id: file.metadata._Job, status: { $in: myJobs.jobStatusCancellable }},{ fields: { log: 0 }})
console.log "Cancelling the job for the removed file!", job._id
job.cancel (err, res) ->
myData.remove
_id: job.data.outputFileId
if file.metadata?.thumb?
thumb = myData.remove { _id: file.metadata.thumb }
# When a file's data changes, call the appropriate functions
# for the removal of the old file and addition of the new.
changedFileJob = (oldFile, newFile) ->
if oldFile.md5 isnt newFile.md5
if oldFile.metadata._Job?
# Only call if this file has a job outstanding
removedFileJob oldFile
addedFileJob newFile
# Watch for changes to uploaded image files
fileObserve = myData.find(
md5:
$ne: 'd41d8cd98f00b204e9800998ecf8427e' # md5 sum for zero length file
'metadata._Resumable':
$exists: false
'metadata.thumbOf':
$exists: false
).observe(
added: addedFileJob
changed: changedFileJob
removed: removedFileJob
)
worker = (job, cb) ->
exec 'gm version', Meteor.bindEnvironment (err) ->
if err
console.warn 'Graphicsmagick is not installed!\n', err
job.fail "Error running graphicsmagick: #{err}", { fatal: true }
return cb()
job.log "Beginning work on thumbnail image: #{job.data.inputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
inStream = myData.findOneStream { _id: job.data.inputFileId }
unless inStream
job.fail 'Input file not found', { fatal: true }
return cb()
job.progress 20, 100
gm(inStream)
.resize(150,150)
.stream 'png', Meteor.bindEnvironment (err, stdout, stderr) ->
stderr.pipe process.stderr
if err
job.fail "Error running graphicsmagick: #{err}"
return cb()
else
outStream = myData.upsertStream { _id: job.data.outputFileId }, {}, (err, file) ->
if err
job.fail "#{err}"
else if file.length is 0
job.fail 'Empty output from graphicsmagick!'
else
job.progress 80, 100
myData.update { _id: job.data.inputFileId }, { $set: 'metadata.thumbComplete': true }
job.log "Finished work on thumbnail image: #{job.data.outputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
job.done(file)
return cb()
unless outStream
job.fail 'Output file not found'
return cb()
stdout.pipe(outStream)
workers = myJobs.processJobs 'makeThumb', { concurrency: 2, prefetch: 2, pollInterval: 1000000000 }, worker
myJobs.find({ type: 'makeThumb', status: 'ready' })
.observe
added: (doc) ->
workers.trigger()
| 153957 | ############################################################################
# Copyright (C) 2014-2016 by <NAME>
# meteor-file-job-sample-app is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Both client and server
# Default collection name is 'fs'
myData = new FileCollection('images', {
resumable: true, # Enable the resumable.js compatible chunked file upload interface
http: [
{ method: 'get', path: '/id/:_id', lookup: (params, query) -> return { _id: params._id }}
]}
)
myJobs = new JobCollection 'queue',
idGeneration: 'MONGO'
transform: (d) ->
try
res = new Job myJobs, d
catch e
res = d
return res
############################################################
# Client-only code
############################################################
if Meteor.isClient
dataLookup =
myData: myData
myJobs: myJobs
imageTypes =
'image/jpeg': true
'image/png': true
'image/gif': true
'image/tiff': true
renderLayout = (template, data = {}, domNode = $('body').get(0)) ->
Blaze.renderWithData Template[template], data, domNode
FlowLayout.setRoot 'body'
FlowRouter.route '/',
triggersEnter: [
(context, redirect) ->
redirect '/gallery'
]
action: () ->
throw new Error "this should not get called"
FlowRouter.route '/gallery',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'gallery'
content: 'gallery'
contentData:
collection: 'myData'
FlowRouter.route '/files',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'files'
content: 'fileTable'
contentData:
collection: 'myData'
FlowRouter.route '/jobs',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'jobs'
content: 'jobTable'
contentData:
collection: 'myJobs'
Meteor.startup () ->
################################
# Setup resumable.js in the UI
# Prevent default drop behavior (loading a file) outside of the drop zone
window.addEventListener 'dragover', ((e) -> e.preventDefault()), false
window.addEventListener 'drop', ((e) -> e.preventDefault()), false
# When a file is added
myData.resumable.on 'fileAdded', (file) ->
if imageTypes[file.file.type]
# Keep track of its progress reactivaly in a session variable
Session.set file.uniqueIdentifier, 0
# Create a new file in the file collection to upload to
myData.insert({
_id: file.uniqueIdentifier # This is the ID resumable will use
filename: file.fileName
contentType: file.file.type
},
(err, _id) ->
if err
console.warn "File creation failed!", err
return
# Once the file exists on the server, start uploading
myData.resumable.upload()
)
# Update the upload progress session variable
myData.resumable.on 'fileProgress', (file) ->
Session.set file.uniqueIdentifier, Math.floor(100*file.progress())
# Finish the upload progress in the session variable
myData.resumable.on 'fileSuccess', (file) ->
Session.set file.uniqueIdentifier, undefined
# More robust error handling needed!
myData.resumable.on 'fileError', (file) ->
console.warn "Error uploading", file.uniqueIdentifier
Session.set file.uniqueIdentifier, undefined
# Set up an autorun to keep the X-Auth-Token cookie up-to-date and
# to update the subscription when the userId changes.
Tracker.autorun () ->
userId = Meteor.userId()
Meteor.subscribe 'allData', userId
Meteor.subscribe 'allJobs', userId
$.cookie 'X-Auth-Token', Accounts._storedLoginToken(), { path: '/'}
#####################
# UI template helpers
shorten = (name, w = 16) ->
w += w % 4
w = (w-4)/2
if name.length > 2*w
name[0..w] + '…' + name[-w-1..-1]
else
name
shortFilename = (w = 16) ->
shorten this.filename, w
Template.registerHelper 'data', () ->
dataLookup[this.collection]
Template.top.helpers
loginToken: () ->
Meteor.userId()
Accounts._storedLoginToken()
userId: () ->
Meteor.userId()
Template.nav.helpers
active: (pill) ->
return "active" if pill is this.page
Template.fileTable.helpers
dataEntries: () ->
# Reactively populate the table
this.find({}, {sort:{filename: 1}})
owner: () ->
this.metadata?._auth?.owner
id: () ->
"#{this._id}"
shortFilename: shortFilename
uploadStatus: () ->
percent = Session.get "#{this._id}"
unless percent?
"Processing..."
else
"Uploading..."
formattedLength: () ->
numeral(this.length).format('0.0b')
uploadProgress: () ->
percent = Session.get "#{this._id}"
Template.fileTable.events
# Wire up the event to remove a file by clicking the `X`
'click .del-file': (e, t) ->
# If there's an active upload, cancel it
coll = dataLookup[t.data.collection]
if Session.get "#{this._id}"
console.warn "Cancelling active upload to remove file! #{this._id}"
coll.resumable.removeFile(coll.resumable.getFromUniqueIdentifier "#{this._id}")
# Management of thumbnails happens on the server!
if this.metadata.thumbOf?
coll.remove this.metadata.thumbOf
else
coll.remove this._id
Template.gallery.helpers
dataEntries: () ->
# Reactively populate the table
this.find({'metadata.thumbOf': {$exists: false}}, {sort:{filename: 1}})
id: () ->
"#{this._id}"
thumb: () ->
unless this.metadata?.thumbComplete
null
else
"#{this.metadata.thumb}"
isImage: () ->
imageTypes[this.contentType]?
shortFilename: shortFilename
altMessage: () ->
if this.length is 0
"Uploading..."
else
"Processing thumbnail..."
Template.gallery.rendered = () ->
# This assigns a file drop zone to the "file table"
dataLookup[this.data.collection].resumable.assignDrop $(".#{dataLookup[this.data.collection].root}DropZone")
Template.fileControls.events
'click .remove-files': (e, t) ->
this.find({ 'metadata.thumbOf': {$exists: false} }).forEach ((d) -> this.remove(d._id)), this
Template.jobTable.helpers
jobEntries: () ->
# Reactively populate the table
this.find({})
Template.jobEntry.rendered = () ->
this.$('.button-column').tooltip
selector: 'button[data-toggle=tooltip]'
delay:
show: 500
hide: 100
Template.jobEntry.events
'click .cancel-job': (e, t) ->
console.log "Cancelling job: #{this._id}"
job = Template.currentData()
job.cancel() if job
'click .remove-job': (e, t) ->
console.log "Removing job: #{this._id}"
job = Template.currentData()
job.remove() if job
'click .restart-job': (e, t) ->
console.log "Restarting job: #{this._id}"
job = Template.currentData()
job.restart() if job
'click .rerun-job': (e, t) ->
console.log "Rerunning job: #{this._id}"
job = Template.currentData()
job.rerun({ wait: 15000 }) if job
'click .pause-job': (e, t) ->
console.log "Pausing job: #{this._id}"
job = Template.currentData()
job.pause() if job
'click .resume-job': (e, t) ->
console.log "Resuming job: #{this._id}"
job = Template.currentData()
job.resume() if job
isInfinity = (val) ->
if val > Job.forever - 7199254740935
"∞"
else
val
Template.jobEntry.helpers
numDepends: () ->
this.depends?.length
numResolved: () ->
this.resolved?.length
jobId: () ->
this._id.valueOf()
statusBG: () ->
{
waiting: 'primary'
ready: 'info'
paused: 'default'
running: 'default'
cancelled: 'warning'
failed: 'danger'
completed: 'success'
}[this.status]
numRepeats: () -> isInfinity this.repeats
numRetries: () -> isInfinity this.retries
runAt: () ->
Session.get 'date'
moment(this.after).fromNow()
lastUpdated: () ->
Session.get 'date'
moment(this.updated).fromNow()
futurePast: () ->
Session.get 'date'
if this.after > new Date()
"text-danger"
else
"text-success"
running: () ->
if Template.instance().view.isRendered
# This code destroys Bootstrap tooltips on existing buttons that may be
# about to disappear. This is done here because by the time the template
# autorun function runs, the button may already be out of the DOM, but
# a "ghost" tooltip for that button can remain visible.
Template.instance().$("button[data-toggle=tooltip]").tooltip('destroy')
this.status is 'running'
cancellable: () ->
this.status in Job.jobStatusCancellable
removable: () ->
this.status in Job.jobStatusRemovable
restartable: () ->
this.status in Job.jobStatusRestartable
rerunable: () ->
this.status is 'completed'
pausable: () ->
this.status in Job.jobStatusPausable
resumable: () ->
this.status is 'paused'
Template.jobControls.events
'click .clear-completed': (e, t) ->
console.log "clear completed"
ids = t.data.find({ status: 'completed' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "clearing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
'click .pause-queue': (e, t) ->
if $(e.target).hasClass 'active'
console.log "resume queue"
ids = t.data.find({ status: 'paused' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "resuming: #{ids.length} jobs"
t.data.resumeJobs(ids) if ids.length > 0
else
console.log "pause queue"
ids = t.data.find({ status: { $in: Job.jobStatusPausable }}, { fields: { _id: 1 }}).map (d) -> d._id console.log "pausing: #{ids.length} jobs"
t.data.pauseJobs(ids) if ids.length > 0
'click .stop-queue': (e, t) ->
unless $(e.target).hasClass 'active'
console.log "stop queue"
t.data.stopJobs()
else
console.log "restart queue"
t.data.stopJobs(0)
'click .cancel-queue': (e, t) ->
console.log "cancel all"
ids = t.data.find({ status: { $in: Job.jobStatusCancellable } }).map (d) -> d._id
console.log "cancelling: #{ids.length} jobs"
t.data.cancelJobs(ids) if ids.length > 0
'click .restart-queue': (e, t) ->
console.log "restart all"
ids = t.data.find({ status: { $in: Job.jobStatusRestartable } }).map (d) -> d._id
console.log "restarting: #{ids.length} jobs"
t.data.restartJobs(ids, (e, r) -> console.log("Restart returned", r)) if ids.length > 0
'click .remove-queue': (e, t) ->
console.log "remove all"
ids = t.data.find({ status: { $in: Job.jobStatusRemovable } }).map (d) -> d._id
console.log "removing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
############################################################
# Server-only code
############################################################
if Meteor.isServer
gm = Meteor.npmRequire 'gm'
exec = Meteor.npmRequire('child_process').exec
myJobs.setLogStream process.stdout
myJobs.promote 2500
Meteor.startup () ->
myJobs.startJobServer()
Meteor.publish 'allJobs', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myJobs.find({ 'data.owner': this.userId })
else
return []
# Only publish files owned by this userId, and ignore temp file chunks used by resumable
Meteor.publish 'allData', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myData.find({ 'metadata._Resumable': { $exists: false }, 'metadata._auth.owner': this.userId })
else
return []
# Don't allow users to modify the user docs
Meteor.users.deny({update: () -> true })
# Only allow job owners to manage or rerun jobs
myJobs.allow
manager: (userId, method, params) ->
ids = params[0]
unless typeof ids is 'object' and ids instanceof Array
ids = [ ids ]
numIds = ids.length
numMatches = myJobs.find({ _id: { $in: ids }, 'data.owner': userId }).count()
return numMatches is numIds
jobRerun: (userId, method, params) ->
id = params[0]
numMatches = myJobs.find({ _id: id, 'data.owner': userId }).count()
return numMatches is 1
stopJobs: (userId, method, params) ->
return userId?
# Allow rules for security. Without these, no writes would be allowed by default
myData.allow
insert: (userId, file) ->
# Assign the proper owner when a file is created
file.metadata = file.metadata ? {}
file.metadata._auth =
owner: userId
true
remove: (userId, file) ->
# Only owners can delete
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
read: (userId, file) ->
# Only owners can GET file data
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
write: (userId, file, fields) -> # This is for the HTTP REST interfaces PUT/POST
# All client file metadata updates are denied, implement Methods for that...
# Only owners can upload a file
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
# Create a job to make a thumbnail for each newly uploaded image
addedFileJob = (file) ->
# Don't make new jobs for files which already have them in process...
# findAndModify is atomic, so in a multi-server environment,
# only one server can succeed and go on to create a job.
# Too bad Meteor has no built-in atomic DB update...
myData.rawCollection().findAndModify(
{ _id: new MongoInternals.NpmModule.ObjectID(file._id.toHexString()), 'metadata._Job': {$exists: false}},
[],
{ $set: { 'metadata._Job': null }},
{ w: 1 },
Meteor.bindEnvironment (err, doc) ->
if err
return console.error "Error locking file document in job creation: ", err
if doc # This is null if update above didn't succeed
outputFileId = myData.insert
filename: "tn_#{file.filename}.png"
contentType: 'image/png'
metadata: file.metadata
job = new Job myJobs, 'makeThumb',
owner: file.metadata._auth.owner
# These Id values are used by the worker to read and write the correct files for this job.
inputFileId: file._id
outputFileId: outputFileId
if jobId = job.delay(5000).retry({ wait: 20000, retries: 5 }).save()
myData.update({ _id: file._id }, { $set: { 'metadata._Job': jobId, 'metadata.thumb': outputFileId }})
myData.update({ _id: outputFileId }, { $set: { 'metadata._Job': jobId, 'metadata.thumbOf': file._id }})
else
console.error "Error saving new job for file #{file._id}"
)
# If a removed file has an associated cancellable job, cancel it.
removedFileJob = (file) ->
if file.metadata?._Job
if job = myJobs.findOne({_id: file.metadata._Job, status: { $in: myJobs.jobStatusCancellable }},{ fields: { log: 0 }})
console.log "Cancelling the job for the removed file!", job._id
job.cancel (err, res) ->
myData.remove
_id: job.data.outputFileId
if file.metadata?.thumb?
thumb = myData.remove { _id: file.metadata.thumb }
# When a file's data changes, call the appropriate functions
# for the removal of the old file and addition of the new.
changedFileJob = (oldFile, newFile) ->
if oldFile.md5 isnt newFile.md5
if oldFile.metadata._Job?
# Only call if this file has a job outstanding
removedFileJob oldFile
addedFileJob newFile
# Watch for changes to uploaded image files
fileObserve = myData.find(
md5:
$ne: 'd41d8cd98f00b204e9800998ecf8427e' # md5 sum for zero length file
'metadata._Resumable':
$exists: false
'metadata.thumbOf':
$exists: false
).observe(
added: addedFileJob
changed: changedFileJob
removed: removedFileJob
)
worker = (job, cb) ->
exec 'gm version', Meteor.bindEnvironment (err) ->
if err
console.warn 'Graphicsmagick is not installed!\n', err
job.fail "Error running graphicsmagick: #{err}", { fatal: true }
return cb()
job.log "Beginning work on thumbnail image: #{job.data.inputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
inStream = myData.findOneStream { _id: job.data.inputFileId }
unless inStream
job.fail 'Input file not found', { fatal: true }
return cb()
job.progress 20, 100
gm(inStream)
.resize(150,150)
.stream 'png', Meteor.bindEnvironment (err, stdout, stderr) ->
stderr.pipe process.stderr
if err
job.fail "Error running graphicsmagick: #{err}"
return cb()
else
outStream = myData.upsertStream { _id: job.data.outputFileId }, {}, (err, file) ->
if err
job.fail "#{err}"
else if file.length is 0
job.fail 'Empty output from graphicsmagick!'
else
job.progress 80, 100
myData.update { _id: job.data.inputFileId }, { $set: 'metadata.thumbComplete': true }
job.log "Finished work on thumbnail image: #{job.data.outputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
job.done(file)
return cb()
unless outStream
job.fail 'Output file not found'
return cb()
stdout.pipe(outStream)
workers = myJobs.processJobs 'makeThumb', { concurrency: 2, prefetch: 2, pollInterval: 1000000000 }, worker
myJobs.find({ type: 'makeThumb', status: 'ready' })
.observe
added: (doc) ->
workers.trigger()
| true | ############################################################################
# Copyright (C) 2014-2016 by PI:NAME:<NAME>END_PI
# meteor-file-job-sample-app is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Both client and server
# Default collection name is 'fs'
myData = new FileCollection('images', {
resumable: true, # Enable the resumable.js compatible chunked file upload interface
http: [
{ method: 'get', path: '/id/:_id', lookup: (params, query) -> return { _id: params._id }}
]}
)
myJobs = new JobCollection 'queue',
idGeneration: 'MONGO'
transform: (d) ->
try
res = new Job myJobs, d
catch e
res = d
return res
############################################################
# Client-only code
############################################################
if Meteor.isClient
dataLookup =
myData: myData
myJobs: myJobs
imageTypes =
'image/jpeg': true
'image/png': true
'image/gif': true
'image/tiff': true
renderLayout = (template, data = {}, domNode = $('body').get(0)) ->
Blaze.renderWithData Template[template], data, domNode
FlowLayout.setRoot 'body'
FlowRouter.route '/',
triggersEnter: [
(context, redirect) ->
redirect '/gallery'
]
action: () ->
throw new Error "this should not get called"
FlowRouter.route '/gallery',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'gallery'
content: 'gallery'
contentData:
collection: 'myData'
FlowRouter.route '/files',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'files'
content: 'fileTable'
contentData:
collection: 'myData'
FlowRouter.route '/jobs',
action: () ->
FlowLayout.render 'master',
nav: 'nav'
navData:
page: 'jobs'
content: 'jobTable'
contentData:
collection: 'myJobs'
Meteor.startup () ->
################################
# Setup resumable.js in the UI
# Prevent default drop behavior (loading a file) outside of the drop zone
window.addEventListener 'dragover', ((e) -> e.preventDefault()), false
window.addEventListener 'drop', ((e) -> e.preventDefault()), false
# When a file is added
myData.resumable.on 'fileAdded', (file) ->
if imageTypes[file.file.type]
# Keep track of its progress reactivaly in a session variable
Session.set file.uniqueIdentifier, 0
# Create a new file in the file collection to upload to
myData.insert({
_id: file.uniqueIdentifier # This is the ID resumable will use
filename: file.fileName
contentType: file.file.type
},
(err, _id) ->
if err
console.warn "File creation failed!", err
return
# Once the file exists on the server, start uploading
myData.resumable.upload()
)
# Update the upload progress session variable
myData.resumable.on 'fileProgress', (file) ->
Session.set file.uniqueIdentifier, Math.floor(100*file.progress())
# Finish the upload progress in the session variable
myData.resumable.on 'fileSuccess', (file) ->
Session.set file.uniqueIdentifier, undefined
# More robust error handling needed!
myData.resumable.on 'fileError', (file) ->
console.warn "Error uploading", file.uniqueIdentifier
Session.set file.uniqueIdentifier, undefined
# Set up an autorun to keep the X-Auth-Token cookie up-to-date and
# to update the subscription when the userId changes.
Tracker.autorun () ->
userId = Meteor.userId()
Meteor.subscribe 'allData', userId
Meteor.subscribe 'allJobs', userId
$.cookie 'X-Auth-Token', Accounts._storedLoginToken(), { path: '/'}
#####################
# UI template helpers
shorten = (name, w = 16) ->
w += w % 4
w = (w-4)/2
if name.length > 2*w
name[0..w] + '…' + name[-w-1..-1]
else
name
shortFilename = (w = 16) ->
shorten this.filename, w
Template.registerHelper 'data', () ->
dataLookup[this.collection]
Template.top.helpers
loginToken: () ->
Meteor.userId()
Accounts._storedLoginToken()
userId: () ->
Meteor.userId()
Template.nav.helpers
active: (pill) ->
return "active" if pill is this.page
Template.fileTable.helpers
dataEntries: () ->
# Reactively populate the table
this.find({}, {sort:{filename: 1}})
owner: () ->
this.metadata?._auth?.owner
id: () ->
"#{this._id}"
shortFilename: shortFilename
uploadStatus: () ->
percent = Session.get "#{this._id}"
unless percent?
"Processing..."
else
"Uploading..."
formattedLength: () ->
numeral(this.length).format('0.0b')
uploadProgress: () ->
percent = Session.get "#{this._id}"
Template.fileTable.events
# Wire up the event to remove a file by clicking the `X`
'click .del-file': (e, t) ->
# If there's an active upload, cancel it
coll = dataLookup[t.data.collection]
if Session.get "#{this._id}"
console.warn "Cancelling active upload to remove file! #{this._id}"
coll.resumable.removeFile(coll.resumable.getFromUniqueIdentifier "#{this._id}")
# Management of thumbnails happens on the server!
if this.metadata.thumbOf?
coll.remove this.metadata.thumbOf
else
coll.remove this._id
Template.gallery.helpers
dataEntries: () ->
# Reactively populate the table
this.find({'metadata.thumbOf': {$exists: false}}, {sort:{filename: 1}})
id: () ->
"#{this._id}"
thumb: () ->
unless this.metadata?.thumbComplete
null
else
"#{this.metadata.thumb}"
isImage: () ->
imageTypes[this.contentType]?
shortFilename: shortFilename
altMessage: () ->
if this.length is 0
"Uploading..."
else
"Processing thumbnail..."
Template.gallery.rendered = () ->
# This assigns a file drop zone to the "file table"
dataLookup[this.data.collection].resumable.assignDrop $(".#{dataLookup[this.data.collection].root}DropZone")
Template.fileControls.events
'click .remove-files': (e, t) ->
this.find({ 'metadata.thumbOf': {$exists: false} }).forEach ((d) -> this.remove(d._id)), this
Template.jobTable.helpers
jobEntries: () ->
# Reactively populate the table
this.find({})
Template.jobEntry.rendered = () ->
this.$('.button-column').tooltip
selector: 'button[data-toggle=tooltip]'
delay:
show: 500
hide: 100
Template.jobEntry.events
'click .cancel-job': (e, t) ->
console.log "Cancelling job: #{this._id}"
job = Template.currentData()
job.cancel() if job
'click .remove-job': (e, t) ->
console.log "Removing job: #{this._id}"
job = Template.currentData()
job.remove() if job
'click .restart-job': (e, t) ->
console.log "Restarting job: #{this._id}"
job = Template.currentData()
job.restart() if job
'click .rerun-job': (e, t) ->
console.log "Rerunning job: #{this._id}"
job = Template.currentData()
job.rerun({ wait: 15000 }) if job
'click .pause-job': (e, t) ->
console.log "Pausing job: #{this._id}"
job = Template.currentData()
job.pause() if job
'click .resume-job': (e, t) ->
console.log "Resuming job: #{this._id}"
job = Template.currentData()
job.resume() if job
isInfinity = (val) ->
if val > Job.forever - 7199254740935
"∞"
else
val
Template.jobEntry.helpers
numDepends: () ->
this.depends?.length
numResolved: () ->
this.resolved?.length
jobId: () ->
this._id.valueOf()
statusBG: () ->
{
waiting: 'primary'
ready: 'info'
paused: 'default'
running: 'default'
cancelled: 'warning'
failed: 'danger'
completed: 'success'
}[this.status]
numRepeats: () -> isInfinity this.repeats
numRetries: () -> isInfinity this.retries
runAt: () ->
Session.get 'date'
moment(this.after).fromNow()
lastUpdated: () ->
Session.get 'date'
moment(this.updated).fromNow()
futurePast: () ->
Session.get 'date'
if this.after > new Date()
"text-danger"
else
"text-success"
running: () ->
if Template.instance().view.isRendered
# This code destroys Bootstrap tooltips on existing buttons that may be
# about to disappear. This is done here because by the time the template
# autorun function runs, the button may already be out of the DOM, but
# a "ghost" tooltip for that button can remain visible.
Template.instance().$("button[data-toggle=tooltip]").tooltip('destroy')
this.status is 'running'
cancellable: () ->
this.status in Job.jobStatusCancellable
removable: () ->
this.status in Job.jobStatusRemovable
restartable: () ->
this.status in Job.jobStatusRestartable
rerunable: () ->
this.status is 'completed'
pausable: () ->
this.status in Job.jobStatusPausable
resumable: () ->
this.status is 'paused'
Template.jobControls.events
'click .clear-completed': (e, t) ->
console.log "clear completed"
ids = t.data.find({ status: 'completed' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "clearing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
'click .pause-queue': (e, t) ->
if $(e.target).hasClass 'active'
console.log "resume queue"
ids = t.data.find({ status: 'paused' },{ fields: { _id: 1 }}).map (d) -> d._id
console.log "resuming: #{ids.length} jobs"
t.data.resumeJobs(ids) if ids.length > 0
else
console.log "pause queue"
ids = t.data.find({ status: { $in: Job.jobStatusPausable }}, { fields: { _id: 1 }}).map (d) -> d._id console.log "pausing: #{ids.length} jobs"
t.data.pauseJobs(ids) if ids.length > 0
'click .stop-queue': (e, t) ->
unless $(e.target).hasClass 'active'
console.log "stop queue"
t.data.stopJobs()
else
console.log "restart queue"
t.data.stopJobs(0)
'click .cancel-queue': (e, t) ->
console.log "cancel all"
ids = t.data.find({ status: { $in: Job.jobStatusCancellable } }).map (d) -> d._id
console.log "cancelling: #{ids.length} jobs"
t.data.cancelJobs(ids) if ids.length > 0
'click .restart-queue': (e, t) ->
console.log "restart all"
ids = t.data.find({ status: { $in: Job.jobStatusRestartable } }).map (d) -> d._id
console.log "restarting: #{ids.length} jobs"
t.data.restartJobs(ids, (e, r) -> console.log("Restart returned", r)) if ids.length > 0
'click .remove-queue': (e, t) ->
console.log "remove all"
ids = t.data.find({ status: { $in: Job.jobStatusRemovable } }).map (d) -> d._id
console.log "removing: #{ids.length} jobs"
t.data.removeJobs(ids) if ids.length > 0
############################################################
# Server-only code
############################################################
if Meteor.isServer
gm = Meteor.npmRequire 'gm'
exec = Meteor.npmRequire('child_process').exec
myJobs.setLogStream process.stdout
myJobs.promote 2500
Meteor.startup () ->
myJobs.startJobServer()
Meteor.publish 'allJobs', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myJobs.find({ 'data.owner': this.userId })
else
return []
# Only publish files owned by this userId, and ignore temp file chunks used by resumable
Meteor.publish 'allData', (clientUserId) ->
# This prevents a race condition on the client between Meteor.userId() and subscriptions to this publish
# See: https://stackoverflow.com/questions/24445404/how-to-prevent-a-client-reactive-race-between-meteor-userid-and-a-subscription/24460877#24460877
if this.userId is clientUserId
return myData.find({ 'metadata._Resumable': { $exists: false }, 'metadata._auth.owner': this.userId })
else
return []
# Don't allow users to modify the user docs
Meteor.users.deny({update: () -> true })
# Only allow job owners to manage or rerun jobs
myJobs.allow
manager: (userId, method, params) ->
ids = params[0]
unless typeof ids is 'object' and ids instanceof Array
ids = [ ids ]
numIds = ids.length
numMatches = myJobs.find({ _id: { $in: ids }, 'data.owner': userId }).count()
return numMatches is numIds
jobRerun: (userId, method, params) ->
id = params[0]
numMatches = myJobs.find({ _id: id, 'data.owner': userId }).count()
return numMatches is 1
stopJobs: (userId, method, params) ->
return userId?
# Allow rules for security. Without these, no writes would be allowed by default
myData.allow
insert: (userId, file) ->
# Assign the proper owner when a file is created
file.metadata = file.metadata ? {}
file.metadata._auth =
owner: userId
true
remove: (userId, file) ->
# Only owners can delete
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
read: (userId, file) ->
# Only owners can GET file data
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
write: (userId, file, fields) -> # This is for the HTTP REST interfaces PUT/POST
# All client file metadata updates are denied, implement Methods for that...
# Only owners can upload a file
if file.metadata?._auth?.owner and userId isnt file.metadata._auth.owner
return false
true
# Create a job to make a thumbnail for each newly uploaded image
addedFileJob = (file) ->
# Don't make new jobs for files which already have them in process...
# findAndModify is atomic, so in a multi-server environment,
# only one server can succeed and go on to create a job.
# Too bad Meteor has no built-in atomic DB update...
myData.rawCollection().findAndModify(
{ _id: new MongoInternals.NpmModule.ObjectID(file._id.toHexString()), 'metadata._Job': {$exists: false}},
[],
{ $set: { 'metadata._Job': null }},
{ w: 1 },
Meteor.bindEnvironment (err, doc) ->
if err
return console.error "Error locking file document in job creation: ", err
if doc # This is null if update above didn't succeed
outputFileId = myData.insert
filename: "tn_#{file.filename}.png"
contentType: 'image/png'
metadata: file.metadata
job = new Job myJobs, 'makeThumb',
owner: file.metadata._auth.owner
# These Id values are used by the worker to read and write the correct files for this job.
inputFileId: file._id
outputFileId: outputFileId
if jobId = job.delay(5000).retry({ wait: 20000, retries: 5 }).save()
myData.update({ _id: file._id }, { $set: { 'metadata._Job': jobId, 'metadata.thumb': outputFileId }})
myData.update({ _id: outputFileId }, { $set: { 'metadata._Job': jobId, 'metadata.thumbOf': file._id }})
else
console.error "Error saving new job for file #{file._id}"
)
# If a removed file has an associated cancellable job, cancel it.
removedFileJob = (file) ->
if file.metadata?._Job
if job = myJobs.findOne({_id: file.metadata._Job, status: { $in: myJobs.jobStatusCancellable }},{ fields: { log: 0 }})
console.log "Cancelling the job for the removed file!", job._id
job.cancel (err, res) ->
myData.remove
_id: job.data.outputFileId
if file.metadata?.thumb?
thumb = myData.remove { _id: file.metadata.thumb }
# When a file's data changes, call the appropriate functions
# for the removal of the old file and addition of the new.
changedFileJob = (oldFile, newFile) ->
if oldFile.md5 isnt newFile.md5
if oldFile.metadata._Job?
# Only call if this file has a job outstanding
removedFileJob oldFile
addedFileJob newFile
# Watch for changes to uploaded image files
fileObserve = myData.find(
md5:
$ne: 'd41d8cd98f00b204e9800998ecf8427e' # md5 sum for zero length file
'metadata._Resumable':
$exists: false
'metadata.thumbOf':
$exists: false
).observe(
added: addedFileJob
changed: changedFileJob
removed: removedFileJob
)
worker = (job, cb) ->
exec 'gm version', Meteor.bindEnvironment (err) ->
if err
console.warn 'Graphicsmagick is not installed!\n', err
job.fail "Error running graphicsmagick: #{err}", { fatal: true }
return cb()
job.log "Beginning work on thumbnail image: #{job.data.inputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
inStream = myData.findOneStream { _id: job.data.inputFileId }
unless inStream
job.fail 'Input file not found', { fatal: true }
return cb()
job.progress 20, 100
gm(inStream)
.resize(150,150)
.stream 'png', Meteor.bindEnvironment (err, stdout, stderr) ->
stderr.pipe process.stderr
if err
job.fail "Error running graphicsmagick: #{err}"
return cb()
else
outStream = myData.upsertStream { _id: job.data.outputFileId }, {}, (err, file) ->
if err
job.fail "#{err}"
else if file.length is 0
job.fail 'Empty output from graphicsmagick!'
else
job.progress 80, 100
myData.update { _id: job.data.inputFileId }, { $set: 'metadata.thumbComplete': true }
job.log "Finished work on thumbnail image: #{job.data.outputFileId.toHexString()}",
level: 'info'
data:
input: job.data.inputFileId
output: job.data.outputFileId
echo: true
job.done(file)
return cb()
unless outStream
job.fail 'Output file not found'
return cb()
stdout.pipe(outStream)
workers = myJobs.processJobs 'makeThumb', { concurrency: 2, prefetch: 2, pollInterval: 1000000000 }, worker
myJobs.find({ type: 'makeThumb', status: 'ready' })
.observe
added: (doc) ->
workers.trigger()
|
[
{
"context": "Algorithm API for JavaScript\n# https://github.com/kzokm/ga.js\n#\n# Copyright (c) 2014 OKAMURA, Kazuhide\n#\n",
"end": 69,
"score": 0.9996821880340576,
"start": 64,
"tag": "USERNAME",
"value": "kzokm"
},
{
"context": "ps://github.com/kzokm/ga.js\n#\n# Copyright (c) 2014 OKAMURA, Kazuhide\n#\n# This software is released under the",
"end": 106,
"score": 0.9990578889846802,
"start": 99,
"tag": "NAME",
"value": "OKAMURA"
},
{
"context": "ub.com/kzokm/ga.js\n#\n# Copyright (c) 2014 OKAMURA, Kazuhide\n#\n# This software is released under the MIT Licen",
"end": 116,
"score": 0.9877702593803406,
"start": 108,
"tag": "NAME",
"value": "Kazuhide"
}
] | lib/resolver.coffee | kzokm/ga.js | 4 | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 OKAMURA, Kazuhide
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{EventEmitter} = require 'events'
class Resolver extends EventEmitter
constructor: (reproduct, config = {})->
if typeof reproduct == 'function'
config.reproduct = reproduct
else
config = reproduct ? {}
config.reproduct ?= @reproduct
@config = config
resolve: (population, config = {}, callback_on_result)->
if typeof config == 'function'
callback_on_result = config
config = {}
for key of @config
config[key] ?= @config[key]
unless typeof config.reproduct == 'function'
throw new TypeError "#{config.reproduct} is not a function"
terminates = [].concat config.terminate
.map (fn)->
if typeof fn == 'number'
do (limit = fn)-> (population)->
population.generationNumber >= limit
else
fn
terminates.unshift => !@processing
population.sort()
population.generationNumber = generationNumber = 0
process = =>
if @processing
population = ((config.reproduct.call @, population, config) ? population)
.sort()
population.generationNumber = ++generationNumber
@emit 'reproduct', population, config
if (terminates.some (fn)-> fn.call @, population)
@emit 'terminate', population, config
callback_on_result?.call @, population.best, population, config
else
@processing = setTimeout process, config.intervalMillis
@processing = setTimeout process, config.intervalMillis
@
terminate: ->
@processing = undefined
module.exports = Resolver
| 68960 | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 <NAME>, <NAME>
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{EventEmitter} = require 'events'
class Resolver extends EventEmitter
constructor: (reproduct, config = {})->
if typeof reproduct == 'function'
config.reproduct = reproduct
else
config = reproduct ? {}
config.reproduct ?= @reproduct
@config = config
resolve: (population, config = {}, callback_on_result)->
if typeof config == 'function'
callback_on_result = config
config = {}
for key of @config
config[key] ?= @config[key]
unless typeof config.reproduct == 'function'
throw new TypeError "#{config.reproduct} is not a function"
terminates = [].concat config.terminate
.map (fn)->
if typeof fn == 'number'
do (limit = fn)-> (population)->
population.generationNumber >= limit
else
fn
terminates.unshift => !@processing
population.sort()
population.generationNumber = generationNumber = 0
process = =>
if @processing
population = ((config.reproduct.call @, population, config) ? population)
.sort()
population.generationNumber = ++generationNumber
@emit 'reproduct', population, config
if (terminates.some (fn)-> fn.call @, population)
@emit 'terminate', population, config
callback_on_result?.call @, population.best, population, config
else
@processing = setTimeout process, config.intervalMillis
@processing = setTimeout process, config.intervalMillis
@
terminate: ->
@processing = undefined
module.exports = Resolver
| true | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{EventEmitter} = require 'events'
class Resolver extends EventEmitter
constructor: (reproduct, config = {})->
if typeof reproduct == 'function'
config.reproduct = reproduct
else
config = reproduct ? {}
config.reproduct ?= @reproduct
@config = config
resolve: (population, config = {}, callback_on_result)->
if typeof config == 'function'
callback_on_result = config
config = {}
for key of @config
config[key] ?= @config[key]
unless typeof config.reproduct == 'function'
throw new TypeError "#{config.reproduct} is not a function"
terminates = [].concat config.terminate
.map (fn)->
if typeof fn == 'number'
do (limit = fn)-> (population)->
population.generationNumber >= limit
else
fn
terminates.unshift => !@processing
population.sort()
population.generationNumber = generationNumber = 0
process = =>
if @processing
population = ((config.reproduct.call @, population, config) ? population)
.sort()
population.generationNumber = ++generationNumber
@emit 'reproduct', population, config
if (terminates.some (fn)-> fn.call @, population)
@emit 'terminate', population, config
callback_on_result?.call @, population.best, population, config
else
@processing = setTimeout process, config.intervalMillis
@processing = setTimeout process, config.intervalMillis
@
terminate: ->
@processing = undefined
module.exports = Resolver
|
[
{
"context": "-----------------------------\n# Copyright (c) 2012 Patrick Mueller\n#\n# Licensed under the Apache License, Version 2.",
"end": 117,
"score": 0.9998260736465454,
"start": 102,
"tag": "NAME",
"value": "Patrick Mueller"
}
] | lib/FileSet.coffee | pmuellr/offl-site | 1 | #-------------------------------------------------------------------------------
# Copyright (c) 2012 Patrick Mueller
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
utils = require './utils'
#-------------------------------------------------------------------------------
module.exports = class FileTree
#---------------------------------------------------------------------------
@fromDir: (dir) ->
fileTree = new FileTree(dir)
try
collectTree(dir, "", fileTree.files, fileTree.dirs)
catch e
throw e
return null
fileTree
#---------------------------------------------------------------------------
constructor: (@baseDir) ->
@files = []
@dirs = []
#---------------------------------------------------------------------------
relFiles: () ->
@files.slice()
#---------------------------------------------------------------------------
fullFiles: () ->
for file in @files
path.join(@baseDir, file)
#---------------------------------------------------------------------------
relDirs: () ->
@dirs.slice()
#---------------------------------------------------------------------------
fullDirs: () ->
for dir in @dirs
path.join(@baseDir, dir)
#---------------------------------------------------------------------------
dump: () ->
console.log "base directory: #{@baseDir}"
for file in @relFiles()
console.log " #{file}"
#-------------------------------------------------------------------------------
collectTree = (dir, prefix, files, dirs) ->
entries = fs.readdirSync(dir)
for entry in entries
fullName = path.join(dir, entry)
relName = path.join(prefix, entry)
stat = fs.lstatSync(fullName)
if stat.isSymbolicLink()
utils.logVerbose "ignoring symlink: #{fullName}"
continue
files.push(relName) if stat.isFile()
dirs.push(relName) if stat.isDirectory()
if stat.isDirectory()
collectTree(fullName, relName, files, dirs)
| 179982 | #-------------------------------------------------------------------------------
# Copyright (c) 2012 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
utils = require './utils'
#-------------------------------------------------------------------------------
module.exports = class FileTree
#---------------------------------------------------------------------------
@fromDir: (dir) ->
fileTree = new FileTree(dir)
try
collectTree(dir, "", fileTree.files, fileTree.dirs)
catch e
throw e
return null
fileTree
#---------------------------------------------------------------------------
constructor: (@baseDir) ->
@files = []
@dirs = []
#---------------------------------------------------------------------------
relFiles: () ->
@files.slice()
#---------------------------------------------------------------------------
fullFiles: () ->
for file in @files
path.join(@baseDir, file)
#---------------------------------------------------------------------------
relDirs: () ->
@dirs.slice()
#---------------------------------------------------------------------------
fullDirs: () ->
for dir in @dirs
path.join(@baseDir, dir)
#---------------------------------------------------------------------------
dump: () ->
console.log "base directory: #{@baseDir}"
for file in @relFiles()
console.log " #{file}"
#-------------------------------------------------------------------------------
collectTree = (dir, prefix, files, dirs) ->
entries = fs.readdirSync(dir)
for entry in entries
fullName = path.join(dir, entry)
relName = path.join(prefix, entry)
stat = fs.lstatSync(fullName)
if stat.isSymbolicLink()
utils.logVerbose "ignoring symlink: #{fullName}"
continue
files.push(relName) if stat.isFile()
dirs.push(relName) if stat.isDirectory()
if stat.isDirectory()
collectTree(fullName, relName, files, dirs)
| true | #-------------------------------------------------------------------------------
# Copyright (c) 2012 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
utils = require './utils'
#-------------------------------------------------------------------------------
module.exports = class FileTree
#---------------------------------------------------------------------------
@fromDir: (dir) ->
fileTree = new FileTree(dir)
try
collectTree(dir, "", fileTree.files, fileTree.dirs)
catch e
throw e
return null
fileTree
#---------------------------------------------------------------------------
constructor: (@baseDir) ->
@files = []
@dirs = []
#---------------------------------------------------------------------------
relFiles: () ->
@files.slice()
#---------------------------------------------------------------------------
fullFiles: () ->
for file in @files
path.join(@baseDir, file)
#---------------------------------------------------------------------------
relDirs: () ->
@dirs.slice()
#---------------------------------------------------------------------------
fullDirs: () ->
for dir in @dirs
path.join(@baseDir, dir)
#---------------------------------------------------------------------------
dump: () ->
console.log "base directory: #{@baseDir}"
for file in @relFiles()
console.log " #{file}"
#-------------------------------------------------------------------------------
collectTree = (dir, prefix, files, dirs) ->
entries = fs.readdirSync(dir)
for entry in entries
fullName = path.join(dir, entry)
relName = path.join(prefix, entry)
stat = fs.lstatSync(fullName)
if stat.isSymbolicLink()
utils.logVerbose "ignoring symlink: #{fullName}"
continue
files.push(relName) if stat.isFile()
dirs.push(relName) if stat.isDirectory()
if stat.isDirectory()
collectTree(fullName, relName, files, dirs)
|
[
{
"context": "er destructuring from arrays and objects\n# @author Alex LaFroscia\n###\n'use strict'\n\n#------------------------------",
"end": 90,
"score": 0.9998181462287903,
"start": 76,
"tag": "NAME",
"value": "Alex LaFroscia"
}
] | src/tests/rules/prefer-destructuring.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Prefer destructuring from arrays and objects
# @author Alex LaFroscia
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/prefer-destructuring'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'prefer-destructuring', rule,
valid: [
'[foo] = array'
'{ foo } = object'
'foo'
,
# Ensure that the default behavior does not require desturcturing when renaming
code: 'foo = object.bar'
options: [object: yes]
,
code: 'foo = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: '{ bar: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: '{ [bar]: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: 'foo = array[0]'
options: [array: no]
,
code: 'foo = object.foo'
options: [object: no]
,
code: "foo = object['foo']"
options: [object: no]
,
'({ foo } = object)'
,
# Fix #8654
code: 'foo = array[0]'
options: [{array: no}, {enforceForRenamedProperties: yes}]
,
'[foo] = array'
'foo += array[0]'
'foo += bar.foo'
'''
class Foo extends Bar
@foo: ->
foo = super.foo
'''
'foo = bar[foo]'
'foo = bar[foo]'
'foo = object?.foo'
"foo = object?['foo']"
]
invalid: [
code: 'foo = array[0]'
errors: [message: 'Use array destructuring.']
,
code: 'foo = object.foo'
errors: [message: 'Use object destructuring.']
,
code: 'foobar = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: "foo = object['foo']"
errors: [message: 'Use object destructuring.']
,
code: 'foo = array[0]'
options: [{array: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use array destructuring.']
,
code: 'foo = array[0]'
options: [array: yes]
errors: [message: 'Use array destructuring.']
,
code: '''
class Foo extends Bar
@foo: -> bar = super.foo.bar
'''
errors: [message: 'Use object destructuring.']
]
| 83714 | ###*
# @fileoverview Prefer destructuring from arrays and objects
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/prefer-destructuring'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'prefer-destructuring', rule,
valid: [
'[foo] = array'
'{ foo } = object'
'foo'
,
# Ensure that the default behavior does not require desturcturing when renaming
code: 'foo = object.bar'
options: [object: yes]
,
code: 'foo = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: '{ bar: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: '{ [bar]: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: 'foo = array[0]'
options: [array: no]
,
code: 'foo = object.foo'
options: [object: no]
,
code: "foo = object['foo']"
options: [object: no]
,
'({ foo } = object)'
,
# Fix #8654
code: 'foo = array[0]'
options: [{array: no}, {enforceForRenamedProperties: yes}]
,
'[foo] = array'
'foo += array[0]'
'foo += bar.foo'
'''
class Foo extends Bar
@foo: ->
foo = super.foo
'''
'foo = bar[foo]'
'foo = bar[foo]'
'foo = object?.foo'
"foo = object?['foo']"
]
invalid: [
code: 'foo = array[0]'
errors: [message: 'Use array destructuring.']
,
code: 'foo = object.foo'
errors: [message: 'Use object destructuring.']
,
code: 'foobar = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: "foo = object['foo']"
errors: [message: 'Use object destructuring.']
,
code: 'foo = array[0]'
options: [{array: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use array destructuring.']
,
code: 'foo = array[0]'
options: [array: yes]
errors: [message: 'Use array destructuring.']
,
code: '''
class Foo extends Bar
@foo: -> bar = super.foo.bar
'''
errors: [message: 'Use object destructuring.']
]
| true | ###*
# @fileoverview Prefer destructuring from arrays and objects
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/prefer-destructuring'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'prefer-destructuring', rule,
valid: [
'[foo] = array'
'{ foo } = object'
'foo'
,
# Ensure that the default behavior does not require desturcturing when renaming
code: 'foo = object.bar'
options: [object: yes]
,
code: 'foo = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: no}]
,
code: '{ bar: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: '{ [bar]: foo } = object'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
,
code: 'foo = array[0]'
options: [array: no]
,
code: 'foo = object.foo'
options: [object: no]
,
code: "foo = object['foo']"
options: [object: no]
,
'({ foo } = object)'
,
# Fix #8654
code: 'foo = array[0]'
options: [{array: no}, {enforceForRenamedProperties: yes}]
,
'[foo] = array'
'foo += array[0]'
'foo += bar.foo'
'''
class Foo extends Bar
@foo: ->
foo = super.foo
'''
'foo = bar[foo]'
'foo = bar[foo]'
'foo = object?.foo'
"foo = object?['foo']"
]
invalid: [
code: 'foo = array[0]'
errors: [message: 'Use array destructuring.']
,
code: 'foo = object.foo'
errors: [message: 'Use object destructuring.']
,
code: 'foobar = object.bar'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: 'foo = object[bar]'
options: [{object: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use object destructuring.']
,
code: "foo = object['foo']"
errors: [message: 'Use object destructuring.']
,
code: 'foo = array[0]'
options: [{array: yes}, {enforceForRenamedProperties: yes}]
errors: [message: 'Use array destructuring.']
,
code: 'foo = array[0]'
options: [array: yes]
errors: [message: 'Use array destructuring.']
,
code: '''
class Foo extends Bar
@foo: -> bar = super.foo.bar
'''
errors: [message: 'Use object destructuring.']
]
|
[
{
"context": " thing: 'foo',\n\n q: 'bar',\n\n name: 'Katzdale',\n\n remoteQueryChangeCount: 0,\n\n filter",
"end": 1720,
"score": 0.9994839429855347,
"start": 1712,
"tag": "NAME",
"value": "Katzdale"
},
{
"context": "undController.get('filter') -> {\"q\":\"bar\",\"name\":\"Katzdale\"}\n // HashboundController.get('page') -> {\"off",
"end": 2508,
"score": 0.9887338876724243,
"start": 2500,
"tag": "NAME",
"value": "Katzdale"
},
{
"context": "ffset\":60,\"limit\":30},\"filter\":{\"q\":\"bar\",\"name\":\"Katzdale\"}}\n\n @class HashBindingsMixin\n###\n\nHashBindingsM",
"end": 2710,
"score": 0.9899144768714905,
"start": 2702,
"tag": "NAME",
"value": "Katzdale"
}
] | addon/mixins/hash-bindings.coffee | realityendshere/ella-burn-ins | 1 | `import Ember from 'ember'`
###
@module emberella
@submodule emberella-mixins
###
get = Ember.get
keys = Object.keys
typeOf = Ember.typeOf
computed = Ember.computed
observer = Ember.observer
mixin = Ember.mixin
run = Ember.run
debounce = run.debounce
###
`HashBindingsMixin` combines attributes in the host object into
computed hashes.
To use this mixin, define an array of properties to compute in a
`hashBindings` array.
For each hash binding, define another array of properties to observe and
assemble into computed properties. For example, if
`hashBindings: ['filter']`, then `filterBindings: ['q', 'name']` will define
the `filter` property on the parent object that produces a hash that looks
like `{'q': 'value of q', 'name': 'value of name'}.
You can optionally define change handlers that do something after the
computed hash updates. For example, a `filterDidChange` method would be
called each time the `q` or `name` property changes.
If the change handler is computationally complex, you can delay calling
the change handling method for some number of ms after the last in a series
of changes. For example, setting `filterDelay: 500` on the parent object
would cause `filterDidChange` to be called once, 500ms after a series of
rapid changes to the `q` or `name` properties. This uses debounce.
@example
HashboundController = Ember.Controller.extend(HashBindingsMixin, {
hashBindings: ['filter', 'page', 'remoteQuery'],
filterBindings: ['q', 'name'],
pageBindings: ['offset', 'limit'],
remoteQueryBindings: ['thing', 'page', 'filter'],
offset: 60,
limit: 30,
thing: 'foo',
q: 'bar',
name: 'Katzdale',
remoteQueryChangeCount: 0,
filterJSON: Ember.computed('filter', function () {
return JSON.stringify(this.get('filter'));
}),
pageJSON: Ember.computed('page', function () {
return JSON.stringify(this.get('page'));
}),
remoteQueryJSON: Ember.computed('remoteQuery', function () {
return JSON.stringify(this.get('remoteQuery'));
}),
// Callback to handle changes to the computed `remoteQuery`
remoteQueryDidChange: function remoteQueryDidChange() {
return this.incrementProperty('remoteQueryChangeCount');
},
// Wait 500ms after the last change to call `remoteQueryDidChange`
remoteQueryDelay: 500
});
// HashboundController.get('filter') -> {"q":"bar","name":"Katzdale"}
// HashboundController.get('page') -> {"offset":60,"limit":30}
// HashboundController.get('remoteQuery') -> {"thing":"foo","page":{"offset":60,"limit":30},"filter":{"q":"bar","name":"Katzdale"}}
@class HashBindingsMixin
###
HashBindingsMixin = Ember.Mixin.create
###
Make `hashBindings` property concatenated instead of replaced by
inheritance chain.
@property concatenatedProperties
@type Array
@default ['hashBindings']
@final
###
concatenatedProperties: ['hashBindings']
###
Setup bindings to watch the properties named in the `hashBindings`
attribute of this object.
@method applyHashBindings
@chainable
###
applyHashBindings: ->
hashBindings = @hashBindings
return unless hashBindings and typeOf(hashBindings) is 'array'
for hashBind in hashBindings when typeOf(bindings = get(@, hashBind + 'Bindings')) is 'array'
lookup = {}
bindComputed = null
bindings.forEach (binding) ->
[property, param] = binding.split(':')
lookup[(param or property)] = property
@_attachComputedProperty hashBind, lookup
@_attachHashChangeHandler hashBind
@
_attachComputedProperty: (name, lookup) ->
params = keys(lookup)
properties = params.map (param) -> lookup[param]
# create computed property
bindComputed = computed
get: =>
result = {}
params.forEach (param) =>
val = get(@, lookup[param])
result[param] = val if val
result
bindComputed.property.apply(bindComputed, properties)
# define query computed properties
remix = {}
remix[name] = bindComputed
mixin(@, remix)
get(@, name)
@
_attachHashChangeHandler: (name) ->
handlerName = name + 'DidChange'
handler = get(@, handlerName)
return @ if typeOf(handler) isnt 'function'
handlerFn = =>
delay = parseInt(get(@, name + 'Delay'), 10)
if typeOf(delay) is 'number' and delay > 0
debounce(@, get(@, handlerName), get(@, name), delay)
else
run(@, get(@, handlerName), get(@, name))
remix = {}
remix['__' + handlerName] = observer(name, handlerFn)
mixin(@, remix)
@
initRemoteQueryBindings: Ember.on('init', ->
@applyHashBindings()
)
`export default HashBindingsMixin`
| 48932 | `import Ember from 'ember'`
###
@module emberella
@submodule emberella-mixins
###
get = Ember.get
keys = Object.keys
typeOf = Ember.typeOf
computed = Ember.computed
observer = Ember.observer
mixin = Ember.mixin
run = Ember.run
debounce = run.debounce
###
`HashBindingsMixin` combines attributes in the host object into
computed hashes.
To use this mixin, define an array of properties to compute in a
`hashBindings` array.
For each hash binding, define another array of properties to observe and
assemble into computed properties. For example, if
`hashBindings: ['filter']`, then `filterBindings: ['q', 'name']` will define
the `filter` property on the parent object that produces a hash that looks
like `{'q': 'value of q', 'name': 'value of name'}.
You can optionally define change handlers that do something after the
computed hash updates. For example, a `filterDidChange` method would be
called each time the `q` or `name` property changes.
If the change handler is computationally complex, you can delay calling
the change handling method for some number of ms after the last in a series
of changes. For example, setting `filterDelay: 500` on the parent object
would cause `filterDidChange` to be called once, 500ms after a series of
rapid changes to the `q` or `name` properties. This uses debounce.
@example
HashboundController = Ember.Controller.extend(HashBindingsMixin, {
hashBindings: ['filter', 'page', 'remoteQuery'],
filterBindings: ['q', 'name'],
pageBindings: ['offset', 'limit'],
remoteQueryBindings: ['thing', 'page', 'filter'],
offset: 60,
limit: 30,
thing: 'foo',
q: 'bar',
name: '<NAME>',
remoteQueryChangeCount: 0,
filterJSON: Ember.computed('filter', function () {
return JSON.stringify(this.get('filter'));
}),
pageJSON: Ember.computed('page', function () {
return JSON.stringify(this.get('page'));
}),
remoteQueryJSON: Ember.computed('remoteQuery', function () {
return JSON.stringify(this.get('remoteQuery'));
}),
// Callback to handle changes to the computed `remoteQuery`
remoteQueryDidChange: function remoteQueryDidChange() {
return this.incrementProperty('remoteQueryChangeCount');
},
// Wait 500ms after the last change to call `remoteQueryDidChange`
remoteQueryDelay: 500
});
// HashboundController.get('filter') -> {"q":"bar","name":"<NAME>"}
// HashboundController.get('page') -> {"offset":60,"limit":30}
// HashboundController.get('remoteQuery') -> {"thing":"foo","page":{"offset":60,"limit":30},"filter":{"q":"bar","name":"<NAME>"}}
@class HashBindingsMixin
###
HashBindingsMixin = Ember.Mixin.create
###
Make `hashBindings` property concatenated instead of replaced by
inheritance chain.
@property concatenatedProperties
@type Array
@default ['hashBindings']
@final
###
concatenatedProperties: ['hashBindings']
###
Setup bindings to watch the properties named in the `hashBindings`
attribute of this object.
@method applyHashBindings
@chainable
###
applyHashBindings: ->
hashBindings = @hashBindings
return unless hashBindings and typeOf(hashBindings) is 'array'
for hashBind in hashBindings when typeOf(bindings = get(@, hashBind + 'Bindings')) is 'array'
lookup = {}
bindComputed = null
bindings.forEach (binding) ->
[property, param] = binding.split(':')
lookup[(param or property)] = property
@_attachComputedProperty hashBind, lookup
@_attachHashChangeHandler hashBind
@
_attachComputedProperty: (name, lookup) ->
params = keys(lookup)
properties = params.map (param) -> lookup[param]
# create computed property
bindComputed = computed
get: =>
result = {}
params.forEach (param) =>
val = get(@, lookup[param])
result[param] = val if val
result
bindComputed.property.apply(bindComputed, properties)
# define query computed properties
remix = {}
remix[name] = bindComputed
mixin(@, remix)
get(@, name)
@
_attachHashChangeHandler: (name) ->
handlerName = name + 'DidChange'
handler = get(@, handlerName)
return @ if typeOf(handler) isnt 'function'
handlerFn = =>
delay = parseInt(get(@, name + 'Delay'), 10)
if typeOf(delay) is 'number' and delay > 0
debounce(@, get(@, handlerName), get(@, name), delay)
else
run(@, get(@, handlerName), get(@, name))
remix = {}
remix['__' + handlerName] = observer(name, handlerFn)
mixin(@, remix)
@
initRemoteQueryBindings: Ember.on('init', ->
@applyHashBindings()
)
`export default HashBindingsMixin`
| true | `import Ember from 'ember'`
###
@module emberella
@submodule emberella-mixins
###
get = Ember.get
keys = Object.keys
typeOf = Ember.typeOf
computed = Ember.computed
observer = Ember.observer
mixin = Ember.mixin
run = Ember.run
debounce = run.debounce
###
`HashBindingsMixin` combines attributes in the host object into
computed hashes.
To use this mixin, define an array of properties to compute in a
`hashBindings` array.
For each hash binding, define another array of properties to observe and
assemble into computed properties. For example, if
`hashBindings: ['filter']`, then `filterBindings: ['q', 'name']` will define
the `filter` property on the parent object that produces a hash that looks
like `{'q': 'value of q', 'name': 'value of name'}.
You can optionally define change handlers that do something after the
computed hash updates. For example, a `filterDidChange` method would be
called each time the `q` or `name` property changes.
If the change handler is computationally complex, you can delay calling
the change handling method for some number of ms after the last in a series
of changes. For example, setting `filterDelay: 500` on the parent object
would cause `filterDidChange` to be called once, 500ms after a series of
rapid changes to the `q` or `name` properties. This uses debounce.
@example
HashboundController = Ember.Controller.extend(HashBindingsMixin, {
hashBindings: ['filter', 'page', 'remoteQuery'],
filterBindings: ['q', 'name'],
pageBindings: ['offset', 'limit'],
remoteQueryBindings: ['thing', 'page', 'filter'],
offset: 60,
limit: 30,
thing: 'foo',
q: 'bar',
name: 'PI:NAME:<NAME>END_PI',
remoteQueryChangeCount: 0,
filterJSON: Ember.computed('filter', function () {
return JSON.stringify(this.get('filter'));
}),
pageJSON: Ember.computed('page', function () {
return JSON.stringify(this.get('page'));
}),
remoteQueryJSON: Ember.computed('remoteQuery', function () {
return JSON.stringify(this.get('remoteQuery'));
}),
// Callback to handle changes to the computed `remoteQuery`
remoteQueryDidChange: function remoteQueryDidChange() {
return this.incrementProperty('remoteQueryChangeCount');
},
// Wait 500ms after the last change to call `remoteQueryDidChange`
remoteQueryDelay: 500
});
// HashboundController.get('filter') -> {"q":"bar","name":"PI:NAME:<NAME>END_PI"}
// HashboundController.get('page') -> {"offset":60,"limit":30}
// HashboundController.get('remoteQuery') -> {"thing":"foo","page":{"offset":60,"limit":30},"filter":{"q":"bar","name":"PI:NAME:<NAME>END_PI"}}
@class HashBindingsMixin
###
HashBindingsMixin = Ember.Mixin.create
###
Make `hashBindings` property concatenated instead of replaced by
inheritance chain.
@property concatenatedProperties
@type Array
@default ['hashBindings']
@final
###
concatenatedProperties: ['hashBindings']
###
Setup bindings to watch the properties named in the `hashBindings`
attribute of this object.
@method applyHashBindings
@chainable
###
applyHashBindings: ->
hashBindings = @hashBindings
return unless hashBindings and typeOf(hashBindings) is 'array'
for hashBind in hashBindings when typeOf(bindings = get(@, hashBind + 'Bindings')) is 'array'
lookup = {}
bindComputed = null
bindings.forEach (binding) ->
[property, param] = binding.split(':')
lookup[(param or property)] = property
@_attachComputedProperty hashBind, lookup
@_attachHashChangeHandler hashBind
@
_attachComputedProperty: (name, lookup) ->
params = keys(lookup)
properties = params.map (param) -> lookup[param]
# create computed property
bindComputed = computed
get: =>
result = {}
params.forEach (param) =>
val = get(@, lookup[param])
result[param] = val if val
result
bindComputed.property.apply(bindComputed, properties)
# define query computed properties
remix = {}
remix[name] = bindComputed
mixin(@, remix)
get(@, name)
@
_attachHashChangeHandler: (name) ->
handlerName = name + 'DidChange'
handler = get(@, handlerName)
return @ if typeOf(handler) isnt 'function'
handlerFn = =>
delay = parseInt(get(@, name + 'Delay'), 10)
if typeOf(delay) is 'number' and delay > 0
debounce(@, get(@, handlerName), get(@, name), delay)
else
run(@, get(@, handlerName), get(@, name))
remix = {}
remix['__' + handlerName] = observer(name, handlerFn)
mixin(@, remix)
@
initRemoteQueryBindings: Ember.on('init', ->
@applyHashBindings()
)
`export default HashBindingsMixin`
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission ",
"end": 18,
"score": 0.9660224914550781,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-path.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
isWindows = process.platform is "win32"
f = __filename
assert.equal path.basename(f), "test-path.js"
assert.equal path.basename(f, ".js"), "test-path"
assert.equal path.basename(""), ""
assert.equal path.basename("/dir/basename.ext"), "basename.ext"
assert.equal path.basename("/basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext/"), "basename.ext"
assert.equal path.basename("basename.ext//"), "basename.ext"
# On Windows a backslash acts as a path separator.
assert.equal path.win32.basename("\\dir\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\\\"), "basename.ext"
# On unix a backslash is just treated as any other character.
assert.equal path.posix.basename("\\dir\\basename.ext"), "\\dir\\basename.ext"
assert.equal path.posix.basename("\\basename.ext"), "\\basename.ext"
assert.equal path.posix.basename("basename.ext"), "basename.ext"
assert.equal path.posix.basename("basename.ext\\"), "basename.ext\\"
assert.equal path.posix.basename("basename.ext\\\\"), "basename.ext\\\\"
# POSIX filenames may include control characters
# c.f. http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html
unless isWindows
controlCharFilename = "Icon" + String.fromCharCode(13)
assert.equal path.basename("/a/b/" + controlCharFilename), controlCharFilename
assert.equal path.extname(f), ".js"
assert.equal path.dirname(f).substr(-11), (if isWindows then "test\\simple" else "test/simple")
assert.equal path.dirname("/a/b/"), "/a"
assert.equal path.dirname("/a/b"), "/a"
assert.equal path.dirname("/a"), "/"
assert.equal path.dirname(""), "."
assert.equal path.dirname("/"), "/"
assert.equal path.dirname("////"), "/"
assert.equal path.win32.dirname("c:\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\bar"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\baz"), "c:\\foo\\bar"
assert.equal path.win32.dirname("\\"), "\\"
assert.equal path.win32.dirname("\\foo"), "\\"
assert.equal path.win32.dirname("\\foo\\"), "\\"
assert.equal path.win32.dirname("\\foo\\bar"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\baz"), "\\foo\\bar"
assert.equal path.win32.dirname("c:"), "c:"
assert.equal path.win32.dirname("c:foo"), "c:"
assert.equal path.win32.dirname("c:foo\\"), "c:"
assert.equal path.win32.dirname("c:foo\\bar"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\baz"), "c:foo\\bar"
assert.equal path.win32.dirname("\\\\unc\\share"), "\\\\unc\\share"
assert.equal path.win32.dirname("\\\\unc\\share\\foo"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\baz"), "\\\\unc\\share\\foo\\bar"
assert.equal path.extname(""), ""
assert.equal path.extname("/path/to/file"), ""
assert.equal path.extname("/path/to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file"), ""
assert.equal path.extname("/path.to/.file"), ""
assert.equal path.extname("/path.to/.file.ext"), ".ext"
assert.equal path.extname("/path/to/f.ext"), ".ext"
assert.equal path.extname("/path/to/..ext"), ".ext"
assert.equal path.extname("file"), ""
assert.equal path.extname("file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname("/file"), ""
assert.equal path.extname("/file.ext"), ".ext"
assert.equal path.extname("/.file"), ""
assert.equal path.extname("/.file.ext"), ".ext"
assert.equal path.extname(".path/file.ext"), ".ext"
assert.equal path.extname("file.ext.ext"), ".ext"
assert.equal path.extname("file."), "."
assert.equal path.extname("."), ""
assert.equal path.extname("./"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file."), "."
assert.equal path.extname(".file.."), "."
assert.equal path.extname(".."), ""
assert.equal path.extname("../"), ""
assert.equal path.extname("..file.ext"), ".ext"
assert.equal path.extname("..file"), ".file"
assert.equal path.extname("..file."), "."
assert.equal path.extname("..file.."), "."
assert.equal path.extname("..."), "."
assert.equal path.extname("...ext"), ".ext"
assert.equal path.extname("...."), "."
assert.equal path.extname("file.ext/"), ".ext"
assert.equal path.extname("file.ext//"), ".ext"
assert.equal path.extname("file/"), ""
assert.equal path.extname("file//"), ""
assert.equal path.extname("file./"), "."
assert.equal path.extname("file.//"), "."
# On windows, backspace is a path separator.
assert.equal path.win32.extname(".\\"), ""
assert.equal path.win32.extname("..\\"), ""
assert.equal path.win32.extname("file.ext\\"), ".ext"
assert.equal path.win32.extname("file.ext\\\\"), ".ext"
assert.equal path.win32.extname("file\\"), ""
assert.equal path.win32.extname("file\\\\"), ""
assert.equal path.win32.extname("file.\\"), "."
assert.equal path.win32.extname("file.\\\\"), "."
# On unix, backspace is a valid name component like any other character.
assert.equal path.posix.extname(".\\"), ""
assert.equal path.posix.extname("..\\"), ".\\"
assert.equal path.posix.extname("file.ext\\"), ".ext\\"
assert.equal path.posix.extname("file.ext\\\\"), ".ext\\\\"
assert.equal path.posix.extname("file\\"), ""
assert.equal path.posix.extname("file\\\\"), ""
assert.equal path.posix.extname("file.\\"), ".\\"
assert.equal path.posix.extname("file.\\\\"), ".\\\\"
# path.join tests
failures = []
# arguments result
joinTests = [
[
[
"."
"x/b"
".."
"/b/c.js"
]
"x/b/c.js"
]
[
[
"/."
"x/b"
".."
"/b/c.js"
]
"/x/b/c.js"
]
[
[
"/foo"
"../../../bar"
]
"/bar"
]
[
[
"foo"
"../../../bar"
]
"../../bar"
]
[
[
"foo/"
"../../../bar"
]
"../../bar"
]
[
[
"foo/x"
"../../../bar"
]
"../bar"
]
[
[
"foo/x"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"."
"bar"
]
"foo/x/bar"
]
[
["./"]
"./"
]
[
[
"."
"./"
]
"./"
]
[
[
"."
"."
"."
]
"."
]
[
[
"."
"./"
"."
]
"."
]
[
[
"."
"/./"
"."
]
"."
]
[
[
"."
"/////./"
"."
]
"."
]
[
["."]
"."
]
[
[
""
"."
]
"."
]
[
[
""
"foo"
]
"foo"
]
[
[
"foo"
"/bar"
]
"foo/bar"
]
[
[
""
"/foo"
]
"/foo"
]
[
[
""
""
"/foo"
]
"/foo"
]
[
[
""
""
"foo"
]
"foo"
]
[
[
"foo"
""
]
"foo"
]
[
[
"foo/"
""
]
"foo/"
]
[
[
"foo"
""
"/bar"
]
"foo/bar"
]
[
[
"./"
".."
"/foo"
]
"../foo"
]
[
[
"./"
".."
".."
"/foo"
]
"../../foo"
]
[
[
"."
".."
".."
"/foo"
]
"../../foo"
]
[
[
""
".."
".."
"/foo"
]
"../../foo"
]
[
["/"]
"/"
]
[
[
"/"
"."
]
"/"
]
[
[
"/"
".."
]
"/"
]
[
[
"/"
".."
".."
]
"/"
]
[
[""]
"."
]
[
[
""
""
]
"."
]
[
[" /foo"]
" /foo"
]
[
[
" "
"foo"
]
" /foo"
]
[
[
" "
"."
]
" "
]
[
[
" "
"/"
]
" /"
]
[
[
" "
""
]
" "
]
[
[
"/"
"foo"
]
"/foo"
]
[
[
"/"
"/foo"
]
"/foo"
]
[
[
"/"
"//foo"
]
"/foo"
]
[
[
"/"
""
"/foo"
]
"/foo"
]
[
[
""
"/"
"foo"
]
"/foo"
]
[
[
""
"/"
"/foo"
]
"/foo"
]
]
# Windows-specific join tests
if isWindows
joinTests = joinTests.concat([ # UNC path expected
[
["//foo/bar"]
"//foo/bar/"
]
[
["\\/foo/bar"]
"//foo/bar/"
]
[
["\\\\foo/bar"]
"//foo/bar/"
]
# UNC path expected - server and share separate
[
[
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
"//foo"
"/bar"
]
"//foo/bar/"
]
# UNC path expected - questionable
[
[
"//foo"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"/bar"
]
"//foo/bar/"
]
# UNC path expected - even more questionable
[
[
""
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"/bar"
]
"//foo/bar/"
]
# No UNC path expected (no double slash in first component)
[
[
"\\"
"foo/bar"
]
"/foo/bar"
]
[
[
"\\"
"/foo/bar"
]
"/foo/bar"
]
[
[
""
"/"
"/foo/bar"
]
"/foo/bar"
]
# No UNC path expected (no non-slashes in first component - questionable)
[
[
"//"
"foo/bar"
]
"/foo/bar"
]
[
[
"//"
"/foo/bar"
]
"/foo/bar"
]
[
[
"\\\\"
"/"
"/foo/bar"
]
"/foo/bar"
]
[
["//"]
"/"
]
# No UNC path expected (share name missing - questionable).
[
["//foo"]
"/foo"
]
[
["//foo/"]
"/foo/"
]
[
[
"//foo"
"/"
]
"/foo/"
]
[
[
"//foo"
""
"/"
]
"/foo/"
]
# No UNC path expected (too many leading slashes - questionable)
[
["///foo/bar"]
"/foo/bar"
]
[
[
"////foo"
"bar"
]
"/foo/bar"
]
[
["\\\\\\/foo/bar"]
"/foo/bar"
]
# Drive-relative vs drive-absolute paths. This merely describes the
# status quo, rather than being obviously right
[
["c:"]
"c:."
]
[
["c:."]
"c:."
]
[
[
"c:"
""
]
"c:."
]
[
[
""
"c:"
]
"c:."
]
[
[
"c:."
"/"
]
"c:./"
]
[
[
"c:."
"file"
]
"c:file"
]
[
[
"c:"
"/"
]
"c:/"
]
[
[
"c:"
"file"
]
"c:/file"
]
])
# Run the join tests.
joinTests.forEach (test) ->
actual = path.join.apply(path, test[0])
expected = (if isWindows then test[1].replace(/\//g, "\\") else test[1])
message = "path.join(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
joinThrowTests = [
true
false
7
null
{
{}
}
`undefined`
[]
NaN
]
joinThrowTests.forEach (test) ->
assert.throws (->
path.join test
return
), TypeError
assert.throws (->
path.resolve test
return
), TypeError
return
# path normalize tests
assert.equal path.win32.normalize("./fixtures///b/../b/c.js"), "fixtures\\b\\c.js"
assert.equal path.win32.normalize("/foo/../../../bar"), "\\bar"
assert.equal path.win32.normalize("a//b//../b"), "a\\b"
assert.equal path.win32.normalize("a//b//./c"), "a\\b\\c"
assert.equal path.win32.normalize("a//b//."), "a\\b"
assert.equal path.win32.normalize("//server/share/dir/file.ext"), "\\\\server\\share\\dir\\file.ext"
assert.equal path.posix.normalize("./fixtures///b/../b/c.js"), "fixtures/b/c.js"
assert.equal path.posix.normalize("/foo/../../../bar"), "/bar"
assert.equal path.posix.normalize("a//b//../b"), "a/b"
assert.equal path.posix.normalize("a//b//./c"), "a/b/c"
assert.equal path.posix.normalize("a//b//."), "a/b"
# path.resolve tests
if isWindows
# windows
# arguments result
resolveTests = [
[
[
"c:/blah\\blah"
"d:/games"
"c:../a"
]
"c:\\blah\\a"
]
[
[
"c:/ignore"
"d:\\a/b\\c/d"
"\\e.exe"
]
"d:\\e.exe"
]
[
[
"c:/ignore"
"c:/some/file"
]
"c:\\some\\file"
]
[
[
"d:/ignore"
"d:some/dir//"
]
"d:\\ignore\\some\\dir"
]
[
["."]
process.cwd()
]
[
[
"//server/share"
".."
"relative\\"
]
"\\\\server\\share\\relative"
]
[
[
"c:/"
"//"
]
"c:\\"
]
[
[
"c:/"
"//dir"
]
"c:\\dir"
]
[
[
"c:/"
"//server/share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"//server//share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"///some//dir"
]
"c:\\some\\dir"
]
]
else
# Posix
# arguments result
resolveTests = [
[
[
"/var/lib"
"../"
"file/"
]
"/var/file"
]
[
[
"/var/lib"
"/../"
"file/"
]
"/file"
]
[
[
"a/b/c/"
"../../.."
]
process.cwd()
]
[
["."]
process.cwd()
]
[
[
"/some/dir"
"."
"/absolute/"
]
"/absolute"
]
]
failures = []
resolveTests.forEach (test) ->
actual = path.resolve.apply(path, test[0])
expected = test[1]
message = "path.resolve(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
# path.isAbsolute tests
assert.equal path.win32.isAbsolute("//server/file"), true
assert.equal path.win32.isAbsolute("\\\\server\\file"), true
assert.equal path.win32.isAbsolute("C:/Users/"), true
assert.equal path.win32.isAbsolute("C:\\Users\\"), true
assert.equal path.win32.isAbsolute("C:cwd/another"), false
assert.equal path.win32.isAbsolute("C:cwd\\another"), false
assert.equal path.win32.isAbsolute("directory/directory"), false
assert.equal path.win32.isAbsolute("directory\\directory"), false
assert.equal path.posix.isAbsolute("/home/foo"), true
assert.equal path.posix.isAbsolute("/home/foo/.."), true
assert.equal path.posix.isAbsolute("bar/"), false
assert.equal path.posix.isAbsolute("./baz"), false
# path.relative tests
if isWindows
# windows
# arguments result
relativeTests = [
[
"c:/blah\\blah"
"d:/games"
"d:\\games"
]
[
"c:/aaaa/bbbb"
"c:/aaaa"
".."
]
[
"c:/aaaa/bbbb"
"c:/cccc"
"..\\..\\cccc"
]
[
"c:/aaaa/bbbb"
"c:/aaaa/bbbb"
""
]
[
"c:/aaaa/bbbb"
"c:/aaaa/cccc"
"..\\cccc"
]
[
"c:/aaaa/"
"c:/aaaa/cccc"
"cccc"
]
[
"c:/"
"c:\\aaaa\\bbbb"
"aaaa\\bbbb"
]
[
"c:/aaaa/bbbb"
"d:\\"
"d:\\"
]
]
else
# posix
# arguments result
relativeTests = [
[
"/var/lib"
"/var"
".."
]
[
"/var/lib"
"/bin"
"../../bin"
]
[
"/var/lib"
"/var/lib"
""
]
[
"/var/lib"
"/var/apache"
"../apache"
]
[
"/var/"
"/var/lib"
"lib"
]
[
"/"
"/var/lib"
"var/lib"
]
]
failures = []
relativeTests.forEach (test) ->
actual = path.relative(test[0], test[1])
expected = test[2]
message = "path.relative(" + test.slice(0, 2).map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
assert.equal failures.length, 0, failures.join("")
# windows
assert.equal path.win32.sep, "\\"
# posix
assert.equal path.posix.sep, "/"
# path.delimiter tests
# windows
assert.equal path.win32.delimiter, ";"
# posix
assert.equal path.posix.delimiter, ":"
if isWindows
assert.deepEqual path, path.win32, "should be win32 path module"
else
assert.deepEqual path, path.posix, "should be posix path module"
| 220924 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
isWindows = process.platform is "win32"
f = __filename
assert.equal path.basename(f), "test-path.js"
assert.equal path.basename(f, ".js"), "test-path"
assert.equal path.basename(""), ""
assert.equal path.basename("/dir/basename.ext"), "basename.ext"
assert.equal path.basename("/basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext/"), "basename.ext"
assert.equal path.basename("basename.ext//"), "basename.ext"
# On Windows a backslash acts as a path separator.
assert.equal path.win32.basename("\\dir\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\\\"), "basename.ext"
# On unix a backslash is just treated as any other character.
assert.equal path.posix.basename("\\dir\\basename.ext"), "\\dir\\basename.ext"
assert.equal path.posix.basename("\\basename.ext"), "\\basename.ext"
assert.equal path.posix.basename("basename.ext"), "basename.ext"
assert.equal path.posix.basename("basename.ext\\"), "basename.ext\\"
assert.equal path.posix.basename("basename.ext\\\\"), "basename.ext\\\\"
# POSIX filenames may include control characters
# c.f. http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html
unless isWindows
controlCharFilename = "Icon" + String.fromCharCode(13)
assert.equal path.basename("/a/b/" + controlCharFilename), controlCharFilename
assert.equal path.extname(f), ".js"
assert.equal path.dirname(f).substr(-11), (if isWindows then "test\\simple" else "test/simple")
assert.equal path.dirname("/a/b/"), "/a"
assert.equal path.dirname("/a/b"), "/a"
assert.equal path.dirname("/a"), "/"
assert.equal path.dirname(""), "."
assert.equal path.dirname("/"), "/"
assert.equal path.dirname("////"), "/"
assert.equal path.win32.dirname("c:\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\bar"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\baz"), "c:\\foo\\bar"
assert.equal path.win32.dirname("\\"), "\\"
assert.equal path.win32.dirname("\\foo"), "\\"
assert.equal path.win32.dirname("\\foo\\"), "\\"
assert.equal path.win32.dirname("\\foo\\bar"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\baz"), "\\foo\\bar"
assert.equal path.win32.dirname("c:"), "c:"
assert.equal path.win32.dirname("c:foo"), "c:"
assert.equal path.win32.dirname("c:foo\\"), "c:"
assert.equal path.win32.dirname("c:foo\\bar"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\baz"), "c:foo\\bar"
assert.equal path.win32.dirname("\\\\unc\\share"), "\\\\unc\\share"
assert.equal path.win32.dirname("\\\\unc\\share\\foo"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\baz"), "\\\\unc\\share\\foo\\bar"
assert.equal path.extname(""), ""
assert.equal path.extname("/path/to/file"), ""
assert.equal path.extname("/path/to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file"), ""
assert.equal path.extname("/path.to/.file"), ""
assert.equal path.extname("/path.to/.file.ext"), ".ext"
assert.equal path.extname("/path/to/f.ext"), ".ext"
assert.equal path.extname("/path/to/..ext"), ".ext"
assert.equal path.extname("file"), ""
assert.equal path.extname("file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname("/file"), ""
assert.equal path.extname("/file.ext"), ".ext"
assert.equal path.extname("/.file"), ""
assert.equal path.extname("/.file.ext"), ".ext"
assert.equal path.extname(".path/file.ext"), ".ext"
assert.equal path.extname("file.ext.ext"), ".ext"
assert.equal path.extname("file."), "."
assert.equal path.extname("."), ""
assert.equal path.extname("./"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file."), "."
assert.equal path.extname(".file.."), "."
assert.equal path.extname(".."), ""
assert.equal path.extname("../"), ""
assert.equal path.extname("..file.ext"), ".ext"
assert.equal path.extname("..file"), ".file"
assert.equal path.extname("..file."), "."
assert.equal path.extname("..file.."), "."
assert.equal path.extname("..."), "."
assert.equal path.extname("...ext"), ".ext"
assert.equal path.extname("...."), "."
assert.equal path.extname("file.ext/"), ".ext"
assert.equal path.extname("file.ext//"), ".ext"
assert.equal path.extname("file/"), ""
assert.equal path.extname("file//"), ""
assert.equal path.extname("file./"), "."
assert.equal path.extname("file.//"), "."
# On windows, backspace is a path separator.
assert.equal path.win32.extname(".\\"), ""
assert.equal path.win32.extname("..\\"), ""
assert.equal path.win32.extname("file.ext\\"), ".ext"
assert.equal path.win32.extname("file.ext\\\\"), ".ext"
assert.equal path.win32.extname("file\\"), ""
assert.equal path.win32.extname("file\\\\"), ""
assert.equal path.win32.extname("file.\\"), "."
assert.equal path.win32.extname("file.\\\\"), "."
# On unix, backspace is a valid name component like any other character.
assert.equal path.posix.extname(".\\"), ""
assert.equal path.posix.extname("..\\"), ".\\"
assert.equal path.posix.extname("file.ext\\"), ".ext\\"
assert.equal path.posix.extname("file.ext\\\\"), ".ext\\\\"
assert.equal path.posix.extname("file\\"), ""
assert.equal path.posix.extname("file\\\\"), ""
assert.equal path.posix.extname("file.\\"), ".\\"
assert.equal path.posix.extname("file.\\\\"), ".\\\\"
# path.join tests
failures = []
# arguments result
joinTests = [
[
[
"."
"x/b"
".."
"/b/c.js"
]
"x/b/c.js"
]
[
[
"/."
"x/b"
".."
"/b/c.js"
]
"/x/b/c.js"
]
[
[
"/foo"
"../../../bar"
]
"/bar"
]
[
[
"foo"
"../../../bar"
]
"../../bar"
]
[
[
"foo/"
"../../../bar"
]
"../../bar"
]
[
[
"foo/x"
"../../../bar"
]
"../bar"
]
[
[
"foo/x"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"."
"bar"
]
"foo/x/bar"
]
[
["./"]
"./"
]
[
[
"."
"./"
]
"./"
]
[
[
"."
"."
"."
]
"."
]
[
[
"."
"./"
"."
]
"."
]
[
[
"."
"/./"
"."
]
"."
]
[
[
"."
"/////./"
"."
]
"."
]
[
["."]
"."
]
[
[
""
"."
]
"."
]
[
[
""
"foo"
]
"foo"
]
[
[
"foo"
"/bar"
]
"foo/bar"
]
[
[
""
"/foo"
]
"/foo"
]
[
[
""
""
"/foo"
]
"/foo"
]
[
[
""
""
"foo"
]
"foo"
]
[
[
"foo"
""
]
"foo"
]
[
[
"foo/"
""
]
"foo/"
]
[
[
"foo"
""
"/bar"
]
"foo/bar"
]
[
[
"./"
".."
"/foo"
]
"../foo"
]
[
[
"./"
".."
".."
"/foo"
]
"../../foo"
]
[
[
"."
".."
".."
"/foo"
]
"../../foo"
]
[
[
""
".."
".."
"/foo"
]
"../../foo"
]
[
["/"]
"/"
]
[
[
"/"
"."
]
"/"
]
[
[
"/"
".."
]
"/"
]
[
[
"/"
".."
".."
]
"/"
]
[
[""]
"."
]
[
[
""
""
]
"."
]
[
[" /foo"]
" /foo"
]
[
[
" "
"foo"
]
" /foo"
]
[
[
" "
"."
]
" "
]
[
[
" "
"/"
]
" /"
]
[
[
" "
""
]
" "
]
[
[
"/"
"foo"
]
"/foo"
]
[
[
"/"
"/foo"
]
"/foo"
]
[
[
"/"
"//foo"
]
"/foo"
]
[
[
"/"
""
"/foo"
]
"/foo"
]
[
[
""
"/"
"foo"
]
"/foo"
]
[
[
""
"/"
"/foo"
]
"/foo"
]
]
# Windows-specific join tests
if isWindows
joinTests = joinTests.concat([ # UNC path expected
[
["//foo/bar"]
"//foo/bar/"
]
[
["\\/foo/bar"]
"//foo/bar/"
]
[
["\\\\foo/bar"]
"//foo/bar/"
]
# UNC path expected - server and share separate
[
[
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
"//foo"
"/bar"
]
"//foo/bar/"
]
# UNC path expected - questionable
[
[
"//foo"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"/bar"
]
"//foo/bar/"
]
# UNC path expected - even more questionable
[
[
""
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"/bar"
]
"//foo/bar/"
]
# No UNC path expected (no double slash in first component)
[
[
"\\"
"foo/bar"
]
"/foo/bar"
]
[
[
"\\"
"/foo/bar"
]
"/foo/bar"
]
[
[
""
"/"
"/foo/bar"
]
"/foo/bar"
]
# No UNC path expected (no non-slashes in first component - questionable)
[
[
"//"
"foo/bar"
]
"/foo/bar"
]
[
[
"//"
"/foo/bar"
]
"/foo/bar"
]
[
[
"\\\\"
"/"
"/foo/bar"
]
"/foo/bar"
]
[
["//"]
"/"
]
# No UNC path expected (share name missing - questionable).
[
["//foo"]
"/foo"
]
[
["//foo/"]
"/foo/"
]
[
[
"//foo"
"/"
]
"/foo/"
]
[
[
"//foo"
""
"/"
]
"/foo/"
]
# No UNC path expected (too many leading slashes - questionable)
[
["///foo/bar"]
"/foo/bar"
]
[
[
"////foo"
"bar"
]
"/foo/bar"
]
[
["\\\\\\/foo/bar"]
"/foo/bar"
]
# Drive-relative vs drive-absolute paths. This merely describes the
# status quo, rather than being obviously right
[
["c:"]
"c:."
]
[
["c:."]
"c:."
]
[
[
"c:"
""
]
"c:."
]
[
[
""
"c:"
]
"c:."
]
[
[
"c:."
"/"
]
"c:./"
]
[
[
"c:."
"file"
]
"c:file"
]
[
[
"c:"
"/"
]
"c:/"
]
[
[
"c:"
"file"
]
"c:/file"
]
])
# Run the join tests.
joinTests.forEach (test) ->
actual = path.join.apply(path, test[0])
expected = (if isWindows then test[1].replace(/\//g, "\\") else test[1])
message = "path.join(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
joinThrowTests = [
true
false
7
null
{
{}
}
`undefined`
[]
NaN
]
joinThrowTests.forEach (test) ->
assert.throws (->
path.join test
return
), TypeError
assert.throws (->
path.resolve test
return
), TypeError
return
# path normalize tests
assert.equal path.win32.normalize("./fixtures///b/../b/c.js"), "fixtures\\b\\c.js"
assert.equal path.win32.normalize("/foo/../../../bar"), "\\bar"
assert.equal path.win32.normalize("a//b//../b"), "a\\b"
assert.equal path.win32.normalize("a//b//./c"), "a\\b\\c"
assert.equal path.win32.normalize("a//b//."), "a\\b"
assert.equal path.win32.normalize("//server/share/dir/file.ext"), "\\\\server\\share\\dir\\file.ext"
assert.equal path.posix.normalize("./fixtures///b/../b/c.js"), "fixtures/b/c.js"
assert.equal path.posix.normalize("/foo/../../../bar"), "/bar"
assert.equal path.posix.normalize("a//b//../b"), "a/b"
assert.equal path.posix.normalize("a//b//./c"), "a/b/c"
assert.equal path.posix.normalize("a//b//."), "a/b"
# path.resolve tests
if isWindows
# windows
# arguments result
resolveTests = [
[
[
"c:/blah\\blah"
"d:/games"
"c:../a"
]
"c:\\blah\\a"
]
[
[
"c:/ignore"
"d:\\a/b\\c/d"
"\\e.exe"
]
"d:\\e.exe"
]
[
[
"c:/ignore"
"c:/some/file"
]
"c:\\some\\file"
]
[
[
"d:/ignore"
"d:some/dir//"
]
"d:\\ignore\\some\\dir"
]
[
["."]
process.cwd()
]
[
[
"//server/share"
".."
"relative\\"
]
"\\\\server\\share\\relative"
]
[
[
"c:/"
"//"
]
"c:\\"
]
[
[
"c:/"
"//dir"
]
"c:\\dir"
]
[
[
"c:/"
"//server/share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"//server//share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"///some//dir"
]
"c:\\some\\dir"
]
]
else
# Posix
# arguments result
resolveTests = [
[
[
"/var/lib"
"../"
"file/"
]
"/var/file"
]
[
[
"/var/lib"
"/../"
"file/"
]
"/file"
]
[
[
"a/b/c/"
"../../.."
]
process.cwd()
]
[
["."]
process.cwd()
]
[
[
"/some/dir"
"."
"/absolute/"
]
"/absolute"
]
]
failures = []
resolveTests.forEach (test) ->
actual = path.resolve.apply(path, test[0])
expected = test[1]
message = "path.resolve(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
# path.isAbsolute tests
assert.equal path.win32.isAbsolute("//server/file"), true
assert.equal path.win32.isAbsolute("\\\\server\\file"), true
assert.equal path.win32.isAbsolute("C:/Users/"), true
assert.equal path.win32.isAbsolute("C:\\Users\\"), true
assert.equal path.win32.isAbsolute("C:cwd/another"), false
assert.equal path.win32.isAbsolute("C:cwd\\another"), false
assert.equal path.win32.isAbsolute("directory/directory"), false
assert.equal path.win32.isAbsolute("directory\\directory"), false
assert.equal path.posix.isAbsolute("/home/foo"), true
assert.equal path.posix.isAbsolute("/home/foo/.."), true
assert.equal path.posix.isAbsolute("bar/"), false
assert.equal path.posix.isAbsolute("./baz"), false
# path.relative tests
if isWindows
# windows
# arguments result
relativeTests = [
[
"c:/blah\\blah"
"d:/games"
"d:\\games"
]
[
"c:/aaaa/bbbb"
"c:/aaaa"
".."
]
[
"c:/aaaa/bbbb"
"c:/cccc"
"..\\..\\cccc"
]
[
"c:/aaaa/bbbb"
"c:/aaaa/bbbb"
""
]
[
"c:/aaaa/bbbb"
"c:/aaaa/cccc"
"..\\cccc"
]
[
"c:/aaaa/"
"c:/aaaa/cccc"
"cccc"
]
[
"c:/"
"c:\\aaaa\\bbbb"
"aaaa\\bbbb"
]
[
"c:/aaaa/bbbb"
"d:\\"
"d:\\"
]
]
else
# posix
# arguments result
relativeTests = [
[
"/var/lib"
"/var"
".."
]
[
"/var/lib"
"/bin"
"../../bin"
]
[
"/var/lib"
"/var/lib"
""
]
[
"/var/lib"
"/var/apache"
"../apache"
]
[
"/var/"
"/var/lib"
"lib"
]
[
"/"
"/var/lib"
"var/lib"
]
]
failures = []
relativeTests.forEach (test) ->
actual = path.relative(test[0], test[1])
expected = test[2]
message = "path.relative(" + test.slice(0, 2).map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
assert.equal failures.length, 0, failures.join("")
# windows
assert.equal path.win32.sep, "\\"
# posix
assert.equal path.posix.sep, "/"
# path.delimiter tests
# windows
assert.equal path.win32.delimiter, ";"
# posix
assert.equal path.posix.delimiter, ":"
if isWindows
assert.deepEqual path, path.win32, "should be win32 path module"
else
assert.deepEqual path, path.posix, "should be posix path module"
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
isWindows = process.platform is "win32"
f = __filename
assert.equal path.basename(f), "test-path.js"
assert.equal path.basename(f, ".js"), "test-path"
assert.equal path.basename(""), ""
assert.equal path.basename("/dir/basename.ext"), "basename.ext"
assert.equal path.basename("/basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext"), "basename.ext"
assert.equal path.basename("basename.ext/"), "basename.ext"
assert.equal path.basename("basename.ext//"), "basename.ext"
# On Windows a backslash acts as a path separator.
assert.equal path.win32.basename("\\dir\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("\\basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\"), "basename.ext"
assert.equal path.win32.basename("basename.ext\\\\"), "basename.ext"
# On unix a backslash is just treated as any other character.
assert.equal path.posix.basename("\\dir\\basename.ext"), "\\dir\\basename.ext"
assert.equal path.posix.basename("\\basename.ext"), "\\basename.ext"
assert.equal path.posix.basename("basename.ext"), "basename.ext"
assert.equal path.posix.basename("basename.ext\\"), "basename.ext\\"
assert.equal path.posix.basename("basename.ext\\\\"), "basename.ext\\\\"
# POSIX filenames may include control characters
# c.f. http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html
unless isWindows
controlCharFilename = "Icon" + String.fromCharCode(13)
assert.equal path.basename("/a/b/" + controlCharFilename), controlCharFilename
assert.equal path.extname(f), ".js"
assert.equal path.dirname(f).substr(-11), (if isWindows then "test\\simple" else "test/simple")
assert.equal path.dirname("/a/b/"), "/a"
assert.equal path.dirname("/a/b"), "/a"
assert.equal path.dirname("/a"), "/"
assert.equal path.dirname(""), "."
assert.equal path.dirname("/"), "/"
assert.equal path.dirname("////"), "/"
assert.equal path.win32.dirname("c:\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\"), "c:\\"
assert.equal path.win32.dirname("c:\\foo\\bar"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\"), "c:\\foo"
assert.equal path.win32.dirname("c:\\foo\\bar\\baz"), "c:\\foo\\bar"
assert.equal path.win32.dirname("\\"), "\\"
assert.equal path.win32.dirname("\\foo"), "\\"
assert.equal path.win32.dirname("\\foo\\"), "\\"
assert.equal path.win32.dirname("\\foo\\bar"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\"), "\\foo"
assert.equal path.win32.dirname("\\foo\\bar\\baz"), "\\foo\\bar"
assert.equal path.win32.dirname("c:"), "c:"
assert.equal path.win32.dirname("c:foo"), "c:"
assert.equal path.win32.dirname("c:foo\\"), "c:"
assert.equal path.win32.dirname("c:foo\\bar"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\"), "c:foo"
assert.equal path.win32.dirname("c:foo\\bar\\baz"), "c:foo\\bar"
assert.equal path.win32.dirname("\\\\unc\\share"), "\\\\unc\\share"
assert.equal path.win32.dirname("\\\\unc\\share\\foo"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\"), "\\\\unc\\share\\"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\"), "\\\\unc\\share\\foo"
assert.equal path.win32.dirname("\\\\unc\\share\\foo\\bar\\baz"), "\\\\unc\\share\\foo\\bar"
assert.equal path.extname(""), ""
assert.equal path.extname("/path/to/file"), ""
assert.equal path.extname("/path/to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file.ext"), ".ext"
assert.equal path.extname("/path.to/file"), ""
assert.equal path.extname("/path.to/.file"), ""
assert.equal path.extname("/path.to/.file.ext"), ".ext"
assert.equal path.extname("/path/to/f.ext"), ".ext"
assert.equal path.extname("/path/to/..ext"), ".ext"
assert.equal path.extname("file"), ""
assert.equal path.extname("file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname("/file"), ""
assert.equal path.extname("/file.ext"), ".ext"
assert.equal path.extname("/.file"), ""
assert.equal path.extname("/.file.ext"), ".ext"
assert.equal path.extname(".path/file.ext"), ".ext"
assert.equal path.extname("file.ext.ext"), ".ext"
assert.equal path.extname("file."), "."
assert.equal path.extname("."), ""
assert.equal path.extname("./"), ""
assert.equal path.extname(".file.ext"), ".ext"
assert.equal path.extname(".file"), ""
assert.equal path.extname(".file."), "."
assert.equal path.extname(".file.."), "."
assert.equal path.extname(".."), ""
assert.equal path.extname("../"), ""
assert.equal path.extname("..file.ext"), ".ext"
assert.equal path.extname("..file"), ".file"
assert.equal path.extname("..file."), "."
assert.equal path.extname("..file.."), "."
assert.equal path.extname("..."), "."
assert.equal path.extname("...ext"), ".ext"
assert.equal path.extname("...."), "."
assert.equal path.extname("file.ext/"), ".ext"
assert.equal path.extname("file.ext//"), ".ext"
assert.equal path.extname("file/"), ""
assert.equal path.extname("file//"), ""
assert.equal path.extname("file./"), "."
assert.equal path.extname("file.//"), "."
# On windows, backspace is a path separator.
assert.equal path.win32.extname(".\\"), ""
assert.equal path.win32.extname("..\\"), ""
assert.equal path.win32.extname("file.ext\\"), ".ext"
assert.equal path.win32.extname("file.ext\\\\"), ".ext"
assert.equal path.win32.extname("file\\"), ""
assert.equal path.win32.extname("file\\\\"), ""
assert.equal path.win32.extname("file.\\"), "."
assert.equal path.win32.extname("file.\\\\"), "."
# On unix, backspace is a valid name component like any other character.
assert.equal path.posix.extname(".\\"), ""
assert.equal path.posix.extname("..\\"), ".\\"
assert.equal path.posix.extname("file.ext\\"), ".ext\\"
assert.equal path.posix.extname("file.ext\\\\"), ".ext\\\\"
assert.equal path.posix.extname("file\\"), ""
assert.equal path.posix.extname("file\\\\"), ""
assert.equal path.posix.extname("file.\\"), ".\\"
assert.equal path.posix.extname("file.\\\\"), ".\\\\"
# path.join tests
failures = []
# arguments result
joinTests = [
[
[
"."
"x/b"
".."
"/b/c.js"
]
"x/b/c.js"
]
[
[
"/."
"x/b"
".."
"/b/c.js"
]
"/x/b/c.js"
]
[
[
"/foo"
"../../../bar"
]
"/bar"
]
[
[
"foo"
"../../../bar"
]
"../../bar"
]
[
[
"foo/"
"../../../bar"
]
"../../bar"
]
[
[
"foo/x"
"../../../bar"
]
"../bar"
]
[
[
"foo/x"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"./bar"
]
"foo/x/bar"
]
[
[
"foo/x/"
"."
"bar"
]
"foo/x/bar"
]
[
["./"]
"./"
]
[
[
"."
"./"
]
"./"
]
[
[
"."
"."
"."
]
"."
]
[
[
"."
"./"
"."
]
"."
]
[
[
"."
"/./"
"."
]
"."
]
[
[
"."
"/////./"
"."
]
"."
]
[
["."]
"."
]
[
[
""
"."
]
"."
]
[
[
""
"foo"
]
"foo"
]
[
[
"foo"
"/bar"
]
"foo/bar"
]
[
[
""
"/foo"
]
"/foo"
]
[
[
""
""
"/foo"
]
"/foo"
]
[
[
""
""
"foo"
]
"foo"
]
[
[
"foo"
""
]
"foo"
]
[
[
"foo/"
""
]
"foo/"
]
[
[
"foo"
""
"/bar"
]
"foo/bar"
]
[
[
"./"
".."
"/foo"
]
"../foo"
]
[
[
"./"
".."
".."
"/foo"
]
"../../foo"
]
[
[
"."
".."
".."
"/foo"
]
"../../foo"
]
[
[
""
".."
".."
"/foo"
]
"../../foo"
]
[
["/"]
"/"
]
[
[
"/"
"."
]
"/"
]
[
[
"/"
".."
]
"/"
]
[
[
"/"
".."
".."
]
"/"
]
[
[""]
"."
]
[
[
""
""
]
"."
]
[
[" /foo"]
" /foo"
]
[
[
" "
"foo"
]
" /foo"
]
[
[
" "
"."
]
" "
]
[
[
" "
"/"
]
" /"
]
[
[
" "
""
]
" "
]
[
[
"/"
"foo"
]
"/foo"
]
[
[
"/"
"/foo"
]
"/foo"
]
[
[
"/"
"//foo"
]
"/foo"
]
[
[
"/"
""
"/foo"
]
"/foo"
]
[
[
""
"/"
"foo"
]
"/foo"
]
[
[
""
"/"
"/foo"
]
"/foo"
]
]
# Windows-specific join tests
if isWindows
joinTests = joinTests.concat([ # UNC path expected
[
["//foo/bar"]
"//foo/bar/"
]
[
["\\/foo/bar"]
"//foo/bar/"
]
[
["\\\\foo/bar"]
"//foo/bar/"
]
# UNC path expected - server and share separate
[
[
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
"//foo"
"/bar"
]
"//foo/bar/"
]
# UNC path expected - questionable
[
[
"//foo"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"bar"
]
"//foo/bar/"
]
[
[
"//foo/"
""
"/bar"
]
"//foo/bar/"
]
# UNC path expected - even more questionable
[
[
""
"//foo"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"bar"
]
"//foo/bar/"
]
[
[
""
"//foo/"
"/bar"
]
"//foo/bar/"
]
# No UNC path expected (no double slash in first component)
[
[
"\\"
"foo/bar"
]
"/foo/bar"
]
[
[
"\\"
"/foo/bar"
]
"/foo/bar"
]
[
[
""
"/"
"/foo/bar"
]
"/foo/bar"
]
# No UNC path expected (no non-slashes in first component - questionable)
[
[
"//"
"foo/bar"
]
"/foo/bar"
]
[
[
"//"
"/foo/bar"
]
"/foo/bar"
]
[
[
"\\\\"
"/"
"/foo/bar"
]
"/foo/bar"
]
[
["//"]
"/"
]
# No UNC path expected (share name missing - questionable).
[
["//foo"]
"/foo"
]
[
["//foo/"]
"/foo/"
]
[
[
"//foo"
"/"
]
"/foo/"
]
[
[
"//foo"
""
"/"
]
"/foo/"
]
# No UNC path expected (too many leading slashes - questionable)
[
["///foo/bar"]
"/foo/bar"
]
[
[
"////foo"
"bar"
]
"/foo/bar"
]
[
["\\\\\\/foo/bar"]
"/foo/bar"
]
# Drive-relative vs drive-absolute paths. This merely describes the
# status quo, rather than being obviously right
[
["c:"]
"c:."
]
[
["c:."]
"c:."
]
[
[
"c:"
""
]
"c:."
]
[
[
""
"c:"
]
"c:."
]
[
[
"c:."
"/"
]
"c:./"
]
[
[
"c:."
"file"
]
"c:file"
]
[
[
"c:"
"/"
]
"c:/"
]
[
[
"c:"
"file"
]
"c:/file"
]
])
# Run the join tests.
joinTests.forEach (test) ->
actual = path.join.apply(path, test[0])
expected = (if isWindows then test[1].replace(/\//g, "\\") else test[1])
message = "path.join(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
joinThrowTests = [
true
false
7
null
{
{}
}
`undefined`
[]
NaN
]
joinThrowTests.forEach (test) ->
assert.throws (->
path.join test
return
), TypeError
assert.throws (->
path.resolve test
return
), TypeError
return
# path normalize tests
assert.equal path.win32.normalize("./fixtures///b/../b/c.js"), "fixtures\\b\\c.js"
assert.equal path.win32.normalize("/foo/../../../bar"), "\\bar"
assert.equal path.win32.normalize("a//b//../b"), "a\\b"
assert.equal path.win32.normalize("a//b//./c"), "a\\b\\c"
assert.equal path.win32.normalize("a//b//."), "a\\b"
assert.equal path.win32.normalize("//server/share/dir/file.ext"), "\\\\server\\share\\dir\\file.ext"
assert.equal path.posix.normalize("./fixtures///b/../b/c.js"), "fixtures/b/c.js"
assert.equal path.posix.normalize("/foo/../../../bar"), "/bar"
assert.equal path.posix.normalize("a//b//../b"), "a/b"
assert.equal path.posix.normalize("a//b//./c"), "a/b/c"
assert.equal path.posix.normalize("a//b//."), "a/b"
# path.resolve tests
if isWindows
# windows
# arguments result
resolveTests = [
[
[
"c:/blah\\blah"
"d:/games"
"c:../a"
]
"c:\\blah\\a"
]
[
[
"c:/ignore"
"d:\\a/b\\c/d"
"\\e.exe"
]
"d:\\e.exe"
]
[
[
"c:/ignore"
"c:/some/file"
]
"c:\\some\\file"
]
[
[
"d:/ignore"
"d:some/dir//"
]
"d:\\ignore\\some\\dir"
]
[
["."]
process.cwd()
]
[
[
"//server/share"
".."
"relative\\"
]
"\\\\server\\share\\relative"
]
[
[
"c:/"
"//"
]
"c:\\"
]
[
[
"c:/"
"//dir"
]
"c:\\dir"
]
[
[
"c:/"
"//server/share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"//server//share"
]
"\\\\server\\share\\"
]
[
[
"c:/"
"///some//dir"
]
"c:\\some\\dir"
]
]
else
# Posix
# arguments result
resolveTests = [
[
[
"/var/lib"
"../"
"file/"
]
"/var/file"
]
[
[
"/var/lib"
"/../"
"file/"
]
"/file"
]
[
[
"a/b/c/"
"../../.."
]
process.cwd()
]
[
["."]
process.cwd()
]
[
[
"/some/dir"
"."
"/absolute/"
]
"/absolute"
]
]
failures = []
resolveTests.forEach (test) ->
actual = path.resolve.apply(path, test[0])
expected = test[1]
message = "path.resolve(" + test[0].map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
# assert.equal(actual, expected, message);
assert.equal failures.length, 0, failures.join("")
# path.isAbsolute tests
assert.equal path.win32.isAbsolute("//server/file"), true
assert.equal path.win32.isAbsolute("\\\\server\\file"), true
assert.equal path.win32.isAbsolute("C:/Users/"), true
assert.equal path.win32.isAbsolute("C:\\Users\\"), true
assert.equal path.win32.isAbsolute("C:cwd/another"), false
assert.equal path.win32.isAbsolute("C:cwd\\another"), false
assert.equal path.win32.isAbsolute("directory/directory"), false
assert.equal path.win32.isAbsolute("directory\\directory"), false
assert.equal path.posix.isAbsolute("/home/foo"), true
assert.equal path.posix.isAbsolute("/home/foo/.."), true
assert.equal path.posix.isAbsolute("bar/"), false
assert.equal path.posix.isAbsolute("./baz"), false
# path.relative tests
if isWindows
# windows
# arguments result
relativeTests = [
[
"c:/blah\\blah"
"d:/games"
"d:\\games"
]
[
"c:/aaaa/bbbb"
"c:/aaaa"
".."
]
[
"c:/aaaa/bbbb"
"c:/cccc"
"..\\..\\cccc"
]
[
"c:/aaaa/bbbb"
"c:/aaaa/bbbb"
""
]
[
"c:/aaaa/bbbb"
"c:/aaaa/cccc"
"..\\cccc"
]
[
"c:/aaaa/"
"c:/aaaa/cccc"
"cccc"
]
[
"c:/"
"c:\\aaaa\\bbbb"
"aaaa\\bbbb"
]
[
"c:/aaaa/bbbb"
"d:\\"
"d:\\"
]
]
else
# posix
# arguments result
relativeTests = [
[
"/var/lib"
"/var"
".."
]
[
"/var/lib"
"/bin"
"../../bin"
]
[
"/var/lib"
"/var/lib"
""
]
[
"/var/lib"
"/var/apache"
"../apache"
]
[
"/var/"
"/var/lib"
"lib"
]
[
"/"
"/var/lib"
"var/lib"
]
]
failures = []
relativeTests.forEach (test) ->
actual = path.relative(test[0], test[1])
expected = test[2]
message = "path.relative(" + test.slice(0, 2).map(JSON.stringify).join(",") + ")" + "\n expect=" + JSON.stringify(expected) + "\n actual=" + JSON.stringify(actual)
failures.push "\n" + message if actual isnt expected
return
assert.equal failures.length, 0, failures.join("")
# windows
assert.equal path.win32.sep, "\\"
# posix
assert.equal path.posix.sep, "/"
# path.delimiter tests
# windows
assert.equal path.win32.delimiter, ";"
# posix
assert.equal path.posix.delimiter, ":"
if isWindows
assert.deepEqual path, path.win32, "should be win32 path module"
else
assert.deepEqual path, path.posix, "should be posix path module"
|
[
{
"context": "#Language: Dutch\n#Translators: willemx\n\nnl =\n\n add: \"toevoegen\"\n and: \"en\"\n back: \"te",
"end": 38,
"score": 0.9971036314964294,
"start": 31,
"tag": "USERNAME",
"value": "willemx"
},
{
"context": "n\"\n and: \"en\"\n back: \"terug\"\n changePassword: \"Wachtwoord wijzigen\"\n choosePassword: \"Wachtwoord kiezen\"\n clickAgr",
"end": 131,
"score": 0.9972209930419922,
"start": 112,
"tag": "PASSWORD",
"value": "Wachtwoord wijzigen"
},
{
"context": "assword: \"Wachtwoord wijzigen\"\n choosePassword: \"Wachtwoord kiezen\"\n clickAgree: \"Door te registreren accepteert u ",
"end": 169,
"score": 0.9950218796730042,
"start": 152,
"tag": "PASSWORD",
"value": "Wachtwoord kiezen"
},
{
"context": "teAccount: \"Account aanmaken\"\n currentPassword: \"Huidig wachtwoord\"\n dontHaveAnAccount: \"Nog geen account?\"\n email",
"end": 325,
"score": 0.9988369941711426,
"start": 308,
"tag": "PASSWORD",
"value": "Huidig wachtwoord"
},
{
"context": "\n emailResetLink: \"Verzenden\"\n forgotPassword: \"Wachtwoord vergeten?\"\n ifYouAlreadyHaveAnAccount: \"Heeft u al een ac",
"end": 484,
"score": 0.992165744304657,
"start": 465,
"tag": "PASSWORD",
"value": "Wachtwoord vergeten"
},
{
"context": "ccount: \"Heeft u al een account?\"\n newPassword: \"Nieuw wachtwoord\"\n newPasswordAgain: \"Nieuw wachtwoord (herhalen)",
"end": 574,
"score": 0.9982390403747559,
"start": 558,
"tag": "PASSWORD",
"value": "Nieuw wachtwoord"
},
{
"context": "Password: \"Nieuw wachtwoord\"\n newPasswordAgain: \"Nieuw wachtwoord (herhalen)\"\n optional: \"Optioneel\"\n OR: \"OF\"\n ",
"end": 613,
"score": 0.8306926488876343,
"start": 597,
"tag": "PASSWORD",
"value": "Nieuw wachtwoord"
},
{
"context": "\"\n optional: \"Optioneel\"\n OR: \"OF\"\n password: \"Wachtwoord\"\n passwordAgain: \"Wachtwoord (herhalen)\"\n priva",
"end": 684,
"score": 0.9952282309532166,
"start": 674,
"tag": "PASSWORD",
"value": "Wachtwoord"
},
{
"context": ": \"OF\"\n password: \"Wachtwoord\"\n passwordAgain: \"Wachtwoord (herhalen)\"\n privacyPolicy: \"privacyverklaring\"\n remove: \"v",
"end": 724,
"score": 0.8908247351646423,
"start": 704,
"tag": "PASSWORD",
"value": "Wachtwoord (herhalen"
},
{
"context": "ng\"\n remove: \"verwijderen\"\n resetYourPassword: \"Wachtwoord resetten\"\n setPassword: \"Wachtwoord instellen\"\n sign: \"A",
"end": 829,
"score": 0.9980857372283936,
"start": 810,
"tag": "PASSWORD",
"value": "Wachtwoord resetten"
},
{
"context": "urPassword: \"Wachtwoord resetten\"\n setPassword: \"Wachtwoord instellen\"\n sign: \"Aanmelden\"\n signIn: \"Aanmelden\"\n sign",
"end": 867,
"score": 0.9972221851348877,
"start": 847,
"tag": "PASSWORD",
"value": "Wachtwoord instellen"
},
{
"context": "rms: \"gebruiksvoorwaarden\"\n updateYourPassword: \"Wachtwoord veranderen\"\n username: \"Gebruikersnaam\"\n usernameOrEmail: ",
"end": 1146,
"score": 0.9985074400901794,
"start": 1125,
"tag": "PASSWORD",
"value": "Wachtwoord veranderen"
},
{
"context": "ourPassword: \"Wachtwoord veranderen\"\n username: \"Gebruikersnaam\"\n usernameOrEmail: \"Gebruikersnaam of e-mailadre",
"end": 1175,
"score": 0.9995160102844238,
"start": 1161,
"tag": "USERNAME",
"value": "Gebruikersnaam"
},
{
"context": "\n username: \"Gebruikersnaam\"\n usernameOrEmail: \"Gebruikersnaam of e-mailadres\"\n with: \"met\"\n\n\n inf",
"end": 1199,
"score": 0.9202255606651306,
"start": 1197,
"tag": "USERNAME",
"value": "Ge"
},
{
"context": "fied: \"E-mail geverifieerd\"\n PasswordChanged: \"Wachtwoord gewijzigd\"\n PasswordReset: \"Wachtwoord gereset\"\n\n\n erro",
"end": 1369,
"score": 0.9919464588165283,
"start": 1349,
"tag": "PASSWORD",
"value": "Wachtwoord gewijzigd"
},
{
"context": "anged: \"Wachtwoord gewijzigd\"\n PasswordReset: \"Wachtwoord gereset\"\n\n\n error:\n emailRequired: \"E-mailadres is ve",
"end": 1409,
"score": 0.9359817504882812,
"start": 1391,
"tag": "PASSWORD",
"value": "Wachtwoord gereset"
},
{
"context": "-- accounts-Password\n \"Incorrect password\": \"Onjuist wachtwoord\"\n \"Invalid email\": \"Ongeldig e-mailadres\"\n ",
"end": 3318,
"score": 0.9992321133613586,
"start": 3300,
"tag": "PASSWORD",
"value": "Onjuist wachtwoord"
}
] | t9n/nl.coffee | coWorkr-InSights/meteor-accounts-t9n | 80 | #Language: Dutch
#Translators: willemx
nl =
add: "toevoegen"
and: "en"
back: "terug"
changePassword: "Wachtwoord wijzigen"
choosePassword: "Wachtwoord kiezen"
clickAgree: "Door te registreren accepteert u onze"
configure: "Configureer"
createAccount: "Account aanmaken"
currentPassword: "Huidig wachtwoord"
dontHaveAnAccount: "Nog geen account?"
email: "E-mail"
emailAddress: "E-mailadres"
emailResetLink: "Verzenden"
forgotPassword: "Wachtwoord vergeten?"
ifYouAlreadyHaveAnAccount: "Heeft u al een account?"
newPassword: "Nieuw wachtwoord"
newPasswordAgain: "Nieuw wachtwoord (herhalen)"
optional: "Optioneel"
OR: "OF"
password: "Wachtwoord"
passwordAgain: "Wachtwoord (herhalen)"
privacyPolicy: "privacyverklaring"
remove: "verwijderen"
resetYourPassword: "Wachtwoord resetten"
setPassword: "Wachtwoord instellen"
sign: "Aanmelden"
signIn: "Aanmelden"
signin: "Aanmelden"
signOut: "Afmelden"
signUp: "Registreren"
signupCode: "Registratiecode"
signUpWithYourEmailAddress: "Met e-mailadres registreren"
terms: "gebruiksvoorwaarden"
updateYourPassword: "Wachtwoord veranderen"
username: "Gebruikersnaam"
usernameOrEmail: "Gebruikersnaam of e-mailadres"
with: "met"
info:
emailSent: "E-mail verzonden"
emailVerified: "E-mail geverifieerd"
PasswordChanged: "Wachtwoord gewijzigd"
PasswordReset: "Wachtwoord gereset"
error:
emailRequired: "E-mailadres is verplicht"
minChar: "Wachtwoord moet tenminste 7 tekens lang zijn."
pwdsDontMatch: "Wachtwoorden zijn niet gelijk."
pwOneDigit: "Wachtwoord moet tenminste 1 cijfer bevatten."
pwOneLetter: "Wachtwoord moet tenminste 1 letter bevatten."
signInRequired: "U moet aangemeld zijn."
signupCodeIncorrect: "Registratiecode is ongeldig."
signupCodeRequired: "Registratiecode is verplicht."
usernameIsEmail: "Gebruikersnaam is gelijk aan e-mail."
usernameRequired: "Gebruikersnaam is verplicht."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "Dit e-mailadres is al in gebruik."
"Email doesn't match the criteria.": "e-mail voldoet niet aan de voorwaarden."
"Invalid login token": "Ongeldig inlogtoken"
"Login forbidden": "Aanmelding geweigerd"
#"Service " + options.service + " already configured":
"Service unknown": "Sevice onbekend"
"Unrecognized options for login request": "Onbekende optie voor inlogverzoek"
"User validation failed": "Gebruikersvalidatie mislukt"
"Username already exists.": "Gebruikersnaam bestaat al."
"You are not logged in.": "U bent niet ingelogd."
"You've been logged out by the server. Please log in again.": "U bent door de server afgemeld. Meld a.u.b. opnieuw aan."
"Your session has expired. Please log in again.": "Uw sessie is verlopen. Meld a.u.b. opnieuw aan."
#---- accounts-oauth
"No matching login attempt found": "Geen overeenkomstig inlogverzoek gevonden."
#---- accounts-Password-client
"Password is old. Please reset your Password.": "Wachtwoord is verlopen. Reset a.u.b. uw wachtwoord."
#---- accounts-Password
"Incorrect password": "Onjuist wachtwoord"
"Invalid email": "Ongeldig e-mailadres"
"Must be logged in": "U moet aangemeld zijn"
"Need to set a username or email": "Gebruikersnaam of e-mailadres moet ingesteld zijn"
#> "old Password format": "Oude wachtwoord format"
"Password may not be empty": "Wachtwoord mag niet leeg zijn"
"Signups forbidden": "Registratie verboden"
"Token expired": "Token is verlopen"
"Token has invalid email address": "Token heeft ongeldig e-mailadres"
"User has no Password set": "Geen wachtwoord ingesteld voor gebruiker"
"User not found": "Gebruiker niet gevonden"
"Verify email link expired": "Verificatielink is verlopen"
"Verify email link is for unknown address": "Verificatielink is voor onbekend e-mailadres"
#---- match
"Match failed": "Geen match"
#---- Misc...
"Unknown error": "Onbekende fout"
T9n?.map "nl", nl
module?.exports = nl
| 89164 | #Language: Dutch
#Translators: willemx
nl =
add: "toevoegen"
and: "en"
back: "terug"
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Door te registreren accepteert u onze"
configure: "Configureer"
createAccount: "Account aanmaken"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "Nog geen account?"
email: "E-mail"
emailAddress: "E-mailadres"
emailResetLink: "Verzenden"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Heeft u al een account?"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD> (herhalen)"
optional: "Optioneel"
OR: "OF"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD>)"
privacyPolicy: "privacyverklaring"
remove: "verwijderen"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Aanmelden"
signIn: "Aanmelden"
signin: "Aanmelden"
signOut: "Afmelden"
signUp: "Registreren"
signupCode: "Registratiecode"
signUpWithYourEmailAddress: "Met e-mailadres registreren"
terms: "gebruiksvoorwaarden"
updateYourPassword: "<PASSWORD>"
username: "Gebruikersnaam"
usernameOrEmail: "Gebruikersnaam of e-mailadres"
with: "met"
info:
emailSent: "E-mail verzonden"
emailVerified: "E-mail geverifieerd"
PasswordChanged: "<PASSWORD>"
PasswordReset: "<PASSWORD>"
error:
emailRequired: "E-mailadres is verplicht"
minChar: "Wachtwoord moet tenminste 7 tekens lang zijn."
pwdsDontMatch: "Wachtwoorden zijn niet gelijk."
pwOneDigit: "Wachtwoord moet tenminste 1 cijfer bevatten."
pwOneLetter: "Wachtwoord moet tenminste 1 letter bevatten."
signInRequired: "U moet aangemeld zijn."
signupCodeIncorrect: "Registratiecode is ongeldig."
signupCodeRequired: "Registratiecode is verplicht."
usernameIsEmail: "Gebruikersnaam is gelijk aan e-mail."
usernameRequired: "Gebruikersnaam is verplicht."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "Dit e-mailadres is al in gebruik."
"Email doesn't match the criteria.": "e-mail voldoet niet aan de voorwaarden."
"Invalid login token": "Ongeldig inlogtoken"
"Login forbidden": "Aanmelding geweigerd"
#"Service " + options.service + " already configured":
"Service unknown": "Sevice onbekend"
"Unrecognized options for login request": "Onbekende optie voor inlogverzoek"
"User validation failed": "Gebruikersvalidatie mislukt"
"Username already exists.": "Gebruikersnaam bestaat al."
"You are not logged in.": "U bent niet ingelogd."
"You've been logged out by the server. Please log in again.": "U bent door de server afgemeld. Meld a.u.b. opnieuw aan."
"Your session has expired. Please log in again.": "Uw sessie is verlopen. Meld a.u.b. opnieuw aan."
#---- accounts-oauth
"No matching login attempt found": "Geen overeenkomstig inlogverzoek gevonden."
#---- accounts-Password-client
"Password is old. Please reset your Password.": "Wachtwoord is verlopen. Reset a.u.b. uw wachtwoord."
#---- accounts-Password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Ongeldig e-mailadres"
"Must be logged in": "U moet aangemeld zijn"
"Need to set a username or email": "Gebruikersnaam of e-mailadres moet ingesteld zijn"
#> "old Password format": "Oude wachtwoord format"
"Password may not be empty": "Wachtwoord mag niet leeg zijn"
"Signups forbidden": "Registratie verboden"
"Token expired": "Token is verlopen"
"Token has invalid email address": "Token heeft ongeldig e-mailadres"
"User has no Password set": "Geen wachtwoord ingesteld voor gebruiker"
"User not found": "Gebruiker niet gevonden"
"Verify email link expired": "Verificatielink is verlopen"
"Verify email link is for unknown address": "Verificatielink is voor onbekend e-mailadres"
#---- match
"Match failed": "Geen match"
#---- Misc...
"Unknown error": "Onbekende fout"
T9n?.map "nl", nl
module?.exports = nl
| true | #Language: Dutch
#Translators: willemx
nl =
add: "toevoegen"
and: "en"
back: "terug"
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Door te registreren accepteert u onze"
configure: "Configureer"
createAccount: "Account aanmaken"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "Nog geen account?"
email: "E-mail"
emailAddress: "E-mailadres"
emailResetLink: "Verzenden"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Heeft u al een account?"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI (herhalen)"
optional: "Optioneel"
OR: "OF"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI)"
privacyPolicy: "privacyverklaring"
remove: "verwijderen"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Aanmelden"
signIn: "Aanmelden"
signin: "Aanmelden"
signOut: "Afmelden"
signUp: "Registreren"
signupCode: "Registratiecode"
signUpWithYourEmailAddress: "Met e-mailadres registreren"
terms: "gebruiksvoorwaarden"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
username: "Gebruikersnaam"
usernameOrEmail: "Gebruikersnaam of e-mailadres"
with: "met"
info:
emailSent: "E-mail verzonden"
emailVerified: "E-mail geverifieerd"
PasswordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
PasswordReset: "PI:PASSWORD:<PASSWORD>END_PI"
error:
emailRequired: "E-mailadres is verplicht"
minChar: "Wachtwoord moet tenminste 7 tekens lang zijn."
pwdsDontMatch: "Wachtwoorden zijn niet gelijk."
pwOneDigit: "Wachtwoord moet tenminste 1 cijfer bevatten."
pwOneLetter: "Wachtwoord moet tenminste 1 letter bevatten."
signInRequired: "U moet aangemeld zijn."
signupCodeIncorrect: "Registratiecode is ongeldig."
signupCodeRequired: "Registratiecode is verplicht."
usernameIsEmail: "Gebruikersnaam is gelijk aan e-mail."
usernameRequired: "Gebruikersnaam is verplicht."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "Dit e-mailadres is al in gebruik."
"Email doesn't match the criteria.": "e-mail voldoet niet aan de voorwaarden."
"Invalid login token": "Ongeldig inlogtoken"
"Login forbidden": "Aanmelding geweigerd"
#"Service " + options.service + " already configured":
"Service unknown": "Sevice onbekend"
"Unrecognized options for login request": "Onbekende optie voor inlogverzoek"
"User validation failed": "Gebruikersvalidatie mislukt"
"Username already exists.": "Gebruikersnaam bestaat al."
"You are not logged in.": "U bent niet ingelogd."
"You've been logged out by the server. Please log in again.": "U bent door de server afgemeld. Meld a.u.b. opnieuw aan."
"Your session has expired. Please log in again.": "Uw sessie is verlopen. Meld a.u.b. opnieuw aan."
#---- accounts-oauth
"No matching login attempt found": "Geen overeenkomstig inlogverzoek gevonden."
#---- accounts-Password-client
"Password is old. Please reset your Password.": "Wachtwoord is verlopen. Reset a.u.b. uw wachtwoord."
#---- accounts-Password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Ongeldig e-mailadres"
"Must be logged in": "U moet aangemeld zijn"
"Need to set a username or email": "Gebruikersnaam of e-mailadres moet ingesteld zijn"
#> "old Password format": "Oude wachtwoord format"
"Password may not be empty": "Wachtwoord mag niet leeg zijn"
"Signups forbidden": "Registratie verboden"
"Token expired": "Token is verlopen"
"Token has invalid email address": "Token heeft ongeldig e-mailadres"
"User has no Password set": "Geen wachtwoord ingesteld voor gebruiker"
"User not found": "Gebruiker niet gevonden"
"Verify email link expired": "Verificatielink is verlopen"
"Verify email link is for unknown address": "Verificatielink is voor onbekend e-mailadres"
#---- match
"Match failed": "Geen match"
#---- Misc...
"Unknown error": "Onbekende fout"
T9n?.map "nl", nl
module?.exports = nl
|
[
{
"context": "###\n * https://github.com/jkuetemeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg K",
"end": 37,
"score": 0.9976932406425476,
"start": 26,
"tag": "USERNAME",
"value": "jkuetemeier"
},
{
"context": "temeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg Kütemeier\n * Licensed under the MIT license.\n###\n\n# Load al",
"end": 95,
"score": 0.9998851418495178,
"start": 81,
"tag": "NAME",
"value": "Jörg Kütemeier"
}
] | example/gulpfile.coffee | kuetemeier/gulp-tasks-common | 0 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 Jörg Kütemeier
* Licensed under the MIT license.
###
# Load all required libraries.
gulp = require 'gulp'
common = require '..'
common.gulp_common(gulp)
gulp.task 'default', ['jshint']
| 37149 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
# Load all required libraries.
gulp = require 'gulp'
common = require '..'
common.gulp_common(gulp)
gulp.task 'default', ['jshint']
| true | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
# Load all required libraries.
gulp = require 'gulp'
common = require '..'
common.gulp_common(gulp)
gulp.task 'default', ['jshint']
|
[
{
"context": "nt called, amount '+amount\n hunts = [\n \"Met Jan\"\n \"Met Pieter\"\n \"Met de teamlei",
"end": 656,
"score": 0.5913830399513245,
"start": 653,
"tag": "NAME",
"value": "Met"
},
{
"context": "called, amount '+amount\n hunts = [\n \"Met Jan\"\n \"Met Pieter\"\n \"Met de teamleider\"",
"end": 660,
"score": 0.9939066171646118,
"start": 657,
"tag": "NAME",
"value": "Jan"
},
{
"context": "'+amount\n hunts = [\n \"Met Jan\"\n \"Met Pieter\"\n \"Met de teamleider\"\n \"Met ",
"end": 674,
"score": 0.9668794274330139,
"start": 671,
"tag": "NAME",
"value": "Met"
},
{
"context": "mount\n hunts = [\n \"Met Jan\"\n \"Met Pieter\"\n \"Met de teamleider\"\n \"Met Coördin",
"end": 681,
"score": 0.9961449503898621,
"start": 675,
"tag": "NAME",
"value": "Pieter"
},
{
"context": " de teamleider\"\n \"Met Coördinator Projecten Primair Onderwijs \"\n \"Met Coördinator Externe Betrekkingen\"\n",
"end": 763,
"score": 0.9052229523658752,
"start": 746,
"tag": "NAME",
"value": "Primair Onderwijs"
}
] | server.coffee | twenteacademy/TAPhotoHunt | 1 | Timer = require 'timer'
Plugin = require 'plugin'
Db = require 'db'
Event = require 'event'
exports.onInstall = () !->
newHunt(5) # we'll start with 5 subjects
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: verdien punten, jee!"
exports.onUpgrade = !->
# apparently a timer did not fire, correct it
if 0 < Db.shared.get('next') < Plugin.time()
newHunt()
exports.client_newHunt = exports.newHunt = newHunt = (amount = 1, cb = false) !->
return if Db.shared.get('next') is -1
# used to disable my plugins and master instances
log 'newHunt called, amount '+amount
hunts = [
"Met Jan"
"Met Pieter"
"Met de teamleider"
"Met Coördinator Projecten Primair Onderwijs "
"Met Coördinator Externe Betrekkingen"
"Met Coördinator Algemeen Medewerkers"
"Met Coördinator Personeelszaken"
"Met Coördinator Marketing"
"Met Coördinator Internationalisering"
"Met Coördinator Online Leeromgeving"
"Met Coördinator Database"
"Met Coördinator Masterclasses"
"Met Coördinator Bijspijkerkampen"
"Met Coördinator Projecten"
"Met Coördinator Decentrale Projecten"
"Met Coördinator Projecten"
"Met één algemene medewerker"
"Met twee algemene medewerkers"
"Met drie algemene medewerkers"
"Met een PAL"
"Met een workshopbegeleider"
"Met een kamp-deelnemer"
"Bij een Zabuki-middag"
"Bij een beurs"
"Bij een middelbare school"
"Bij een basisschool"
"Met een TA-shirt"
"Op de bank"
"Tijdens een TA-borrel"
"Met een oud-medewerker"
"Bij een masterclass"
"Bij een TA-poster"
]
# remove hunts that have taken place already
if prevHunts = Db.shared.get('tahunts')
for huntId, hunt of prevHunts
continue if !+huntId
if (pos = hunts.indexOf(hunt.subject)) >= 0
hunts.splice pos, 1
# find some Nieuwe Hunts
newHunts = []
while amount-- and hunts.length
sel = Math.floor(Math.random()*hunts.length)
newHunts.push hunts[sel]
hunts.splice sel, 1
if !newHunts.length
log 'no more hunts available'
if cb
cb.reply true
else
log 'selected Nieuwe Hunts: '+JSON.stringify(newHunts)
for newHunt in newHunts
maxId = Db.shared.ref('tahunts').incr 'maxId'
# first referencing hunts, as Db.shared.incr 'tahunts', 'maxId' is buggy
Db.shared.set 'tahunts', maxId,
subject: newHunt
time: 0|(Date.now()*.001)
photos: {}
# schedule the next hunt when there are still hunts left
if hunts.length
tomorrowStart = Math.floor(Plugin.time()/86400)*86400 + 86400
nextTime = tomorrowStart + (10*3600) + Math.floor(Math.random()*(12*3600))
Timer.cancel()
Timer.set (nextTime-Plugin.time())*1000, 'newHunt'
Db.shared.set 'next', nextTime
# we'll only notify when this is about a single Nieuwe Hunt
if newHunts.length is 1
subj = newHunts[0]
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: Maak een foto.. " + subj.charAt(0).toLowerCase() + subj.slice(1)
exports.client_removePhoto = (huntId, photoId, disqualify = false) !->
photos = Db.shared.ref 'tahunts', huntId, 'fotos'
return if !photos.get photoId
thisUserSubmission = Plugin.userId() is photos.get(photoId, 'userId')
name = Plugin.userName(photos.get photoId, 'userId')
possessive = if name.charAt(name.length-1).toLowerCase() is 's' then "'" else "'s"
if disqualify
photos.set photoId, 'disqualified', true
else
photos.remove photoId
# find a new winner if necessary
newWinnerName = null
if Db.shared.get('tahunts', huntId, 'winner') is photoId
smId = (+k for k, v of photos.get() when !v.disqualified)?.sort()[0]
Db.shared.set 'tahunts', huntId, 'winner', smId
if smId
newWinnerName = Plugin.userName(photos.get smId, 'userId')
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: results revised, "+newWinnerName+" won! ("+Db.shared.get('tahunts', huntId, 'subject')+")"
comment = null
if disqualify
comment = "disqualified " + name + possessive + " submission"
else if thisUserSubmission
comment = "retracted submission"
else if !thisUserSubmission
comment = "removed " + name + possessive + " submission"
if comment
if newWinnerName
comment = comment + ", making " + newWinnerName + " the new winner!"
addComment huntId, comment
exports.onPhoto = (info, huntId) !->
huntId = huntId[0]
log 'got photo', JSON.stringify(info), Plugin.userId()
# test whether the user hasn't uploaded a photo in this hunt yet
allPhotos = Db.shared.get 'tahunts', huntId, 'photos'
for k, v of allPhotos
if +v.userId is Plugin.userId()
log "user #{Plugin.userId()} already submitted a photo for hunt "+huntId
return
hunt = Db.shared.ref 'tahunts', huntId
maxId = hunt.incr 'photos', 'maxId'
hunt.set 'photos', maxId, info
if !hunt.get 'winner'
hunt.set 'winner', maxId
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: "+Plugin.userName()+" wint! ("+hunt.get('subject')+")"
else
addComment huntId, "added a runner-up"
addComment = (huntId, comment) !->
comment =
t: 0|Plugin.time()
u: Plugin.userId()
s: true
c: comment
comments = Db.shared.createRef("comments", huntId)
max = comments.incr 'max'
comments.set max, comment
| 41256 | Timer = require 'timer'
Plugin = require 'plugin'
Db = require 'db'
Event = require 'event'
exports.onInstall = () !->
newHunt(5) # we'll start with 5 subjects
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: verdien punten, jee!"
exports.onUpgrade = !->
# apparently a timer did not fire, correct it
if 0 < Db.shared.get('next') < Plugin.time()
newHunt()
exports.client_newHunt = exports.newHunt = newHunt = (amount = 1, cb = false) !->
return if Db.shared.get('next') is -1
# used to disable my plugins and master instances
log 'newHunt called, amount '+amount
hunts = [
"<NAME> <NAME>"
"<NAME> <NAME>"
"Met de teamleider"
"Met Coördinator Projecten <NAME> "
"Met Coördinator Externe Betrekkingen"
"Met Coördinator Algemeen Medewerkers"
"Met Coördinator Personeelszaken"
"Met Coördinator Marketing"
"Met Coördinator Internationalisering"
"Met Coördinator Online Leeromgeving"
"Met Coördinator Database"
"Met Coördinator Masterclasses"
"Met Coördinator Bijspijkerkampen"
"Met Coördinator Projecten"
"Met Coördinator Decentrale Projecten"
"Met Coördinator Projecten"
"Met één algemene medewerker"
"Met twee algemene medewerkers"
"Met drie algemene medewerkers"
"Met een PAL"
"Met een workshopbegeleider"
"Met een kamp-deelnemer"
"Bij een Zabuki-middag"
"Bij een beurs"
"Bij een middelbare school"
"Bij een basisschool"
"Met een TA-shirt"
"Op de bank"
"Tijdens een TA-borrel"
"Met een oud-medewerker"
"Bij een masterclass"
"Bij een TA-poster"
]
# remove hunts that have taken place already
if prevHunts = Db.shared.get('tahunts')
for huntId, hunt of prevHunts
continue if !+huntId
if (pos = hunts.indexOf(hunt.subject)) >= 0
hunts.splice pos, 1
# find some Nieuwe Hunts
newHunts = []
while amount-- and hunts.length
sel = Math.floor(Math.random()*hunts.length)
newHunts.push hunts[sel]
hunts.splice sel, 1
if !newHunts.length
log 'no more hunts available'
if cb
cb.reply true
else
log 'selected Nieuwe Hunts: '+JSON.stringify(newHunts)
for newHunt in newHunts
maxId = Db.shared.ref('tahunts').incr 'maxId'
# first referencing hunts, as Db.shared.incr 'tahunts', 'maxId' is buggy
Db.shared.set 'tahunts', maxId,
subject: newHunt
time: 0|(Date.now()*.001)
photos: {}
# schedule the next hunt when there are still hunts left
if hunts.length
tomorrowStart = Math.floor(Plugin.time()/86400)*86400 + 86400
nextTime = tomorrowStart + (10*3600) + Math.floor(Math.random()*(12*3600))
Timer.cancel()
Timer.set (nextTime-Plugin.time())*1000, 'newHunt'
Db.shared.set 'next', nextTime
# we'll only notify when this is about a single Nieuwe Hunt
if newHunts.length is 1
subj = newHunts[0]
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: Maak een foto.. " + subj.charAt(0).toLowerCase() + subj.slice(1)
exports.client_removePhoto = (huntId, photoId, disqualify = false) !->
photos = Db.shared.ref 'tahunts', huntId, 'fotos'
return if !photos.get photoId
thisUserSubmission = Plugin.userId() is photos.get(photoId, 'userId')
name = Plugin.userName(photos.get photoId, 'userId')
possessive = if name.charAt(name.length-1).toLowerCase() is 's' then "'" else "'s"
if disqualify
photos.set photoId, 'disqualified', true
else
photos.remove photoId
# find a new winner if necessary
newWinnerName = null
if Db.shared.get('tahunts', huntId, 'winner') is photoId
smId = (+k for k, v of photos.get() when !v.disqualified)?.sort()[0]
Db.shared.set 'tahunts', huntId, 'winner', smId
if smId
newWinnerName = Plugin.userName(photos.get smId, 'userId')
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: results revised, "+newWinnerName+" won! ("+Db.shared.get('tahunts', huntId, 'subject')+")"
comment = null
if disqualify
comment = "disqualified " + name + possessive + " submission"
else if thisUserSubmission
comment = "retracted submission"
else if !thisUserSubmission
comment = "removed " + name + possessive + " submission"
if comment
if newWinnerName
comment = comment + ", making " + newWinnerName + " the new winner!"
addComment huntId, comment
exports.onPhoto = (info, huntId) !->
huntId = huntId[0]
log 'got photo', JSON.stringify(info), Plugin.userId()
# test whether the user hasn't uploaded a photo in this hunt yet
allPhotos = Db.shared.get 'tahunts', huntId, 'photos'
for k, v of allPhotos
if +v.userId is Plugin.userId()
log "user #{Plugin.userId()} already submitted a photo for hunt "+huntId
return
hunt = Db.shared.ref 'tahunts', huntId
maxId = hunt.incr 'photos', 'maxId'
hunt.set 'photos', maxId, info
if !hunt.get 'winner'
hunt.set 'winner', maxId
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: "+Plugin.userName()+" wint! ("+hunt.get('subject')+")"
else
addComment huntId, "added a runner-up"
addComment = (huntId, comment) !->
comment =
t: 0|Plugin.time()
u: Plugin.userId()
s: true
c: comment
comments = Db.shared.createRef("comments", huntId)
max = comments.incr 'max'
comments.set max, comment
| true | Timer = require 'timer'
Plugin = require 'plugin'
Db = require 'db'
Event = require 'event'
exports.onInstall = () !->
newHunt(5) # we'll start with 5 subjects
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: verdien punten, jee!"
exports.onUpgrade = !->
# apparently a timer did not fire, correct it
if 0 < Db.shared.get('next') < Plugin.time()
newHunt()
exports.client_newHunt = exports.newHunt = newHunt = (amount = 1, cb = false) !->
return if Db.shared.get('next') is -1
# used to disable my plugins and master instances
log 'newHunt called, amount '+amount
hunts = [
"PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"
"Met de teamleider"
"Met Coördinator Projecten PI:NAME:<NAME>END_PI "
"Met Coördinator Externe Betrekkingen"
"Met Coördinator Algemeen Medewerkers"
"Met Coördinator Personeelszaken"
"Met Coördinator Marketing"
"Met Coördinator Internationalisering"
"Met Coördinator Online Leeromgeving"
"Met Coördinator Database"
"Met Coördinator Masterclasses"
"Met Coördinator Bijspijkerkampen"
"Met Coördinator Projecten"
"Met Coördinator Decentrale Projecten"
"Met Coördinator Projecten"
"Met één algemene medewerker"
"Met twee algemene medewerkers"
"Met drie algemene medewerkers"
"Met een PAL"
"Met een workshopbegeleider"
"Met een kamp-deelnemer"
"Bij een Zabuki-middag"
"Bij een beurs"
"Bij een middelbare school"
"Bij een basisschool"
"Met een TA-shirt"
"Op de bank"
"Tijdens een TA-borrel"
"Met een oud-medewerker"
"Bij een masterclass"
"Bij een TA-poster"
]
# remove hunts that have taken place already
if prevHunts = Db.shared.get('tahunts')
for huntId, hunt of prevHunts
continue if !+huntId
if (pos = hunts.indexOf(hunt.subject)) >= 0
hunts.splice pos, 1
# find some Nieuwe Hunts
newHunts = []
while amount-- and hunts.length
sel = Math.floor(Math.random()*hunts.length)
newHunts.push hunts[sel]
hunts.splice sel, 1
if !newHunts.length
log 'no more hunts available'
if cb
cb.reply true
else
log 'selected Nieuwe Hunts: '+JSON.stringify(newHunts)
for newHunt in newHunts
maxId = Db.shared.ref('tahunts').incr 'maxId'
# first referencing hunts, as Db.shared.incr 'tahunts', 'maxId' is buggy
Db.shared.set 'tahunts', maxId,
subject: newHunt
time: 0|(Date.now()*.001)
photos: {}
# schedule the next hunt when there are still hunts left
if hunts.length
tomorrowStart = Math.floor(Plugin.time()/86400)*86400 + 86400
nextTime = tomorrowStart + (10*3600) + Math.floor(Math.random()*(12*3600))
Timer.cancel()
Timer.set (nextTime-Plugin.time())*1000, 'newHunt'
Db.shared.set 'next', nextTime
# we'll only notify when this is about a single Nieuwe Hunt
if newHunts.length is 1
subj = newHunts[0]
Event.create
unit: 'tahunts'
text: "Nieuwe TA-fotohunt: Maak een foto.. " + subj.charAt(0).toLowerCase() + subj.slice(1)
exports.client_removePhoto = (huntId, photoId, disqualify = false) !->
photos = Db.shared.ref 'tahunts', huntId, 'fotos'
return if !photos.get photoId
thisUserSubmission = Plugin.userId() is photos.get(photoId, 'userId')
name = Plugin.userName(photos.get photoId, 'userId')
possessive = if name.charAt(name.length-1).toLowerCase() is 's' then "'" else "'s"
if disqualify
photos.set photoId, 'disqualified', true
else
photos.remove photoId
# find a new winner if necessary
newWinnerName = null
if Db.shared.get('tahunts', huntId, 'winner') is photoId
smId = (+k for k, v of photos.get() when !v.disqualified)?.sort()[0]
Db.shared.set 'tahunts', huntId, 'winner', smId
if smId
newWinnerName = Plugin.userName(photos.get smId, 'userId')
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: results revised, "+newWinnerName+" won! ("+Db.shared.get('tahunts', huntId, 'subject')+")"
comment = null
if disqualify
comment = "disqualified " + name + possessive + " submission"
else if thisUserSubmission
comment = "retracted submission"
else if !thisUserSubmission
comment = "removed " + name + possessive + " submission"
if comment
if newWinnerName
comment = comment + ", making " + newWinnerName + " the new winner!"
addComment huntId, comment
exports.onPhoto = (info, huntId) !->
huntId = huntId[0]
log 'got photo', JSON.stringify(info), Plugin.userId()
# test whether the user hasn't uploaded a photo in this hunt yet
allPhotos = Db.shared.get 'tahunts', huntId, 'photos'
for k, v of allPhotos
if +v.userId is Plugin.userId()
log "user #{Plugin.userId()} already submitted a photo for hunt "+huntId
return
hunt = Db.shared.ref 'tahunts', huntId
maxId = hunt.incr 'photos', 'maxId'
hunt.set 'photos', maxId, info
if !hunt.get 'winner'
hunt.set 'winner', maxId
Event.create
unit: 'tahunts'
text: "TA Foto Hunt: "+Plugin.userName()+" wint! ("+hunt.get('subject')+")"
else
addComment huntId, "added a runner-up"
addComment = (huntId, comment) !->
comment =
t: 0|Plugin.time()
u: Plugin.userId()
s: true
c: comment
comments = Db.shared.createRef("comments", huntId)
max = comments.incr 'max'
comments.set max, comment
|
[
{
"context": "reate\n\t\t###\n\t\tmollie.customers.create({\n\t\t\tname: \"Luke Skywalker\",\n\t\t\temail: \"luke@example.org\",\n\t\t\tmetadata: {\n\t\t",
"end": 339,
"score": 0.999895453453064,
"start": 325,
"tag": "NAME",
"value": "Luke Skywalker"
},
{
"context": "rs.create({\n\t\t\tname: \"Luke Skywalker\",\n\t\t\temail: \"luke@example.org\",\n\t\t\tmetadata: {\n\t\t\t\tisJedi: true,\n\t\t\t},\n\t\t}, (cu",
"end": 369,
"score": 0.9999270439147949,
"start": 353,
"tag": "EMAIL",
"value": "luke@example.org"
}
] | src/examples/11-new-customer.coffee | veselinoskih/mollie-test | 0 | ###
Example 11 - How to create a new customer in the Mollie API.
###
mollie = require("./mollie");
fs = require("fs");
class example
constructor: (request, response) ->
###
Customer creation parameters.
See: https://www.mollie.com/en/docs/reference/customers/create
###
mollie.customers.create({
name: "Luke Skywalker",
email: "luke@example.org",
metadata: {
isJedi: true,
},
}, (customer) =>
if (customer.error)
console.error(customer.error);
return response.end();
response.write("<p>New customer created #{customer.id} (#{customer.name}).</p>");
response.end();
);
module.exports = example
| 86047 | ###
Example 11 - How to create a new customer in the Mollie API.
###
mollie = require("./mollie");
fs = require("fs");
class example
constructor: (request, response) ->
###
Customer creation parameters.
See: https://www.mollie.com/en/docs/reference/customers/create
###
mollie.customers.create({
name: "<NAME>",
email: "<EMAIL>",
metadata: {
isJedi: true,
},
}, (customer) =>
if (customer.error)
console.error(customer.error);
return response.end();
response.write("<p>New customer created #{customer.id} (#{customer.name}).</p>");
response.end();
);
module.exports = example
| true | ###
Example 11 - How to create a new customer in the Mollie API.
###
mollie = require("./mollie");
fs = require("fs");
class example
constructor: (request, response) ->
###
Customer creation parameters.
See: https://www.mollie.com/en/docs/reference/customers/create
###
mollie.customers.create({
name: "PI:NAME:<NAME>END_PI",
email: "PI:EMAIL:<EMAIL>END_PI",
metadata: {
isJedi: true,
},
}, (customer) =>
if (customer.error)
console.error(customer.error);
return response.end();
response.write("<p>New customer created #{customer.id} (#{customer.name}).</p>");
response.end();
);
module.exports = example
|
[
{
"context": "@fileoverview Tests for no-children-prop\n# @author Benjamin Stepp\n###\n\n'use strict'\n\n# ----------------------------",
"end": 72,
"score": 0.9998608827590942,
"start": 58,
"tag": "NAME",
"value": "Benjamin Stepp"
}
] | src/tests/rules/no-children-prop.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-children-prop
# @author Benjamin Stepp
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-children-prop'
{RuleTester} = require 'eslint'
path = require 'path'
JSX_ERROR =
'Do not pass children as props. Instead, nest children between the opening and closing tags.'
CREATE_ELEMENT_ERROR =
'Do not pass children as props. Instead, pass them as additional arguments to React.createElement.'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-children-prop', rule,
valid: [
code: '<div />'
,
code: '<div></div>'
,
code: 'React.createElement("div", {})'
,
code: 'React.createElement("div", undefined)'
,
code: '<div className="class-name"></div>'
,
code: 'React.createElement("div", {className: "class-name"})'
,
code: '<div>Children</div>'
,
code: 'React.createElement("div", "Children")'
,
code: 'React.createElement("div", {}, "Children")'
,
code: 'React.createElement("div", undefined, "Children")'
,
code: '<div className="class-name">Children</div>'
,
code: 'React.createElement("div", {className: "class-name"}, "Children")'
,
code: '<div><div /></div>'
,
code: 'React.createElement("div", React.createElement("div"))'
,
code: 'React.createElement("div", {}, React.createElement("div"))'
,
code: 'React.createElement("div", undefined, React.createElement("div"))'
,
code: '<div><div /><div /></div>'
,
code:
'React.createElement("div", React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", {}, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", undefined, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", {}, [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", undefined, [React.createElement("div"), React.createElement("div")])'
,
code: '<MyComponent />'
,
code: 'React.createElement(MyComponent)'
,
code: 'React.createElement(MyComponent, {})'
,
code: 'React.createElement(MyComponent, undefined)'
,
code: '<MyComponent>Children</MyComponent>'
,
code: 'React.createElement(MyComponent, "Children")'
,
code: 'React.createElement(MyComponent, {}, "Children")'
,
code: 'React.createElement(MyComponent, undefined, "Children")'
,
code: '<MyComponent className="class-name"></MyComponent>'
,
code: 'React.createElement(MyComponent, {className: "class-name"})'
,
code: '<MyComponent className="class-name">Children</MyComponent>'
,
code:
'React.createElement(MyComponent, {className: "class-name"}, "Children")'
,
code: '<MyComponent className="class-name" {...props} />'
,
code:
'React.createElement(MyComponent, {className: "class-name", ...props})'
]
invalid: [
code: '<div children="Children" />'
errors: [message: JSX_ERROR]
,
code: '<div children={<div />} />'
errors: [message: JSX_ERROR]
,
code: '<div children={[<div />, <div />]} />'
errors: [message: JSX_ERROR]
,
code: '<div children="Children">Children</div>'
errors: [message: JSX_ERROR]
,
code: 'React.createElement("div", {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: "Children"}, "Children")'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: React.createElement("div")})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code:
'React.createElement("div", {children: [React.createElement("div"), React.createElement("div")]})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent className="class-name" children="Children" />'
errors: [message: JSX_ERROR]
,
code:
'React.createElement(MyComponent, {children: "Children", className: "class-name"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent {...props} children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {...props, children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
]
| 66839 | ###*
# @fileoverview Tests for no-children-prop
# @author <NAME>
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-children-prop'
{RuleTester} = require 'eslint'
path = require 'path'
JSX_ERROR =
'Do not pass children as props. Instead, nest children between the opening and closing tags.'
CREATE_ELEMENT_ERROR =
'Do not pass children as props. Instead, pass them as additional arguments to React.createElement.'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-children-prop', rule,
valid: [
code: '<div />'
,
code: '<div></div>'
,
code: 'React.createElement("div", {})'
,
code: 'React.createElement("div", undefined)'
,
code: '<div className="class-name"></div>'
,
code: 'React.createElement("div", {className: "class-name"})'
,
code: '<div>Children</div>'
,
code: 'React.createElement("div", "Children")'
,
code: 'React.createElement("div", {}, "Children")'
,
code: 'React.createElement("div", undefined, "Children")'
,
code: '<div className="class-name">Children</div>'
,
code: 'React.createElement("div", {className: "class-name"}, "Children")'
,
code: '<div><div /></div>'
,
code: 'React.createElement("div", React.createElement("div"))'
,
code: 'React.createElement("div", {}, React.createElement("div"))'
,
code: 'React.createElement("div", undefined, React.createElement("div"))'
,
code: '<div><div /><div /></div>'
,
code:
'React.createElement("div", React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", {}, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", undefined, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", {}, [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", undefined, [React.createElement("div"), React.createElement("div")])'
,
code: '<MyComponent />'
,
code: 'React.createElement(MyComponent)'
,
code: 'React.createElement(MyComponent, {})'
,
code: 'React.createElement(MyComponent, undefined)'
,
code: '<MyComponent>Children</MyComponent>'
,
code: 'React.createElement(MyComponent, "Children")'
,
code: 'React.createElement(MyComponent, {}, "Children")'
,
code: 'React.createElement(MyComponent, undefined, "Children")'
,
code: '<MyComponent className="class-name"></MyComponent>'
,
code: 'React.createElement(MyComponent, {className: "class-name"})'
,
code: '<MyComponent className="class-name">Children</MyComponent>'
,
code:
'React.createElement(MyComponent, {className: "class-name"}, "Children")'
,
code: '<MyComponent className="class-name" {...props} />'
,
code:
'React.createElement(MyComponent, {className: "class-name", ...props})'
]
invalid: [
code: '<div children="Children" />'
errors: [message: JSX_ERROR]
,
code: '<div children={<div />} />'
errors: [message: JSX_ERROR]
,
code: '<div children={[<div />, <div />]} />'
errors: [message: JSX_ERROR]
,
code: '<div children="Children">Children</div>'
errors: [message: JSX_ERROR]
,
code: 'React.createElement("div", {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: "Children"}, "Children")'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: React.createElement("div")})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code:
'React.createElement("div", {children: [React.createElement("div"), React.createElement("div")]})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent className="class-name" children="Children" />'
errors: [message: JSX_ERROR]
,
code:
'React.createElement(MyComponent, {children: "Children", className: "class-name"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent {...props} children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {...props, children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
]
| true | ###*
# @fileoverview Tests for no-children-prop
# @author PI:NAME:<NAME>END_PI
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-children-prop'
{RuleTester} = require 'eslint'
path = require 'path'
JSX_ERROR =
'Do not pass children as props. Instead, nest children between the opening and closing tags.'
CREATE_ELEMENT_ERROR =
'Do not pass children as props. Instead, pass them as additional arguments to React.createElement.'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-children-prop', rule,
valid: [
code: '<div />'
,
code: '<div></div>'
,
code: 'React.createElement("div", {})'
,
code: 'React.createElement("div", undefined)'
,
code: '<div className="class-name"></div>'
,
code: 'React.createElement("div", {className: "class-name"})'
,
code: '<div>Children</div>'
,
code: 'React.createElement("div", "Children")'
,
code: 'React.createElement("div", {}, "Children")'
,
code: 'React.createElement("div", undefined, "Children")'
,
code: '<div className="class-name">Children</div>'
,
code: 'React.createElement("div", {className: "class-name"}, "Children")'
,
code: '<div><div /></div>'
,
code: 'React.createElement("div", React.createElement("div"))'
,
code: 'React.createElement("div", {}, React.createElement("div"))'
,
code: 'React.createElement("div", undefined, React.createElement("div"))'
,
code: '<div><div /><div /></div>'
,
code:
'React.createElement("div", React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", {}, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", undefined, React.createElement("div"), React.createElement("div"))'
,
code:
'React.createElement("div", [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", {}, [React.createElement("div"), React.createElement("div")])'
,
code:
'React.createElement("div", undefined, [React.createElement("div"), React.createElement("div")])'
,
code: '<MyComponent />'
,
code: 'React.createElement(MyComponent)'
,
code: 'React.createElement(MyComponent, {})'
,
code: 'React.createElement(MyComponent, undefined)'
,
code: '<MyComponent>Children</MyComponent>'
,
code: 'React.createElement(MyComponent, "Children")'
,
code: 'React.createElement(MyComponent, {}, "Children")'
,
code: 'React.createElement(MyComponent, undefined, "Children")'
,
code: '<MyComponent className="class-name"></MyComponent>'
,
code: 'React.createElement(MyComponent, {className: "class-name"})'
,
code: '<MyComponent className="class-name">Children</MyComponent>'
,
code:
'React.createElement(MyComponent, {className: "class-name"}, "Children")'
,
code: '<MyComponent className="class-name" {...props} />'
,
code:
'React.createElement(MyComponent, {className: "class-name", ...props})'
]
invalid: [
code: '<div children="Children" />'
errors: [message: JSX_ERROR]
,
code: '<div children={<div />} />'
errors: [message: JSX_ERROR]
,
code: '<div children={[<div />, <div />]} />'
errors: [message: JSX_ERROR]
,
code: '<div children="Children">Children</div>'
errors: [message: JSX_ERROR]
,
code: 'React.createElement("div", {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: "Children"}, "Children")'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: 'React.createElement("div", {children: React.createElement("div")})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code:
'React.createElement("div", {children: [React.createElement("div"), React.createElement("div")]})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent className="class-name" children="Children" />'
errors: [message: JSX_ERROR]
,
code:
'React.createElement(MyComponent, {children: "Children", className: "class-name"})'
errors: [message: CREATE_ELEMENT_ERROR]
,
code: '<MyComponent {...props} children="Children" />'
errors: [message: JSX_ERROR]
,
code: 'React.createElement(MyComponent, {...props, children: "Children"})'
errors: [message: CREATE_ELEMENT_ERROR]
]
|
[
{
"context": "# Written by Nick Presta (nick@nickpresta.ca)\n# Released under the MIT lic",
"end": 24,
"score": 0.9998839497566223,
"start": 13,
"tag": "NAME",
"value": "Nick Presta"
},
{
"context": "# Written by Nick Presta (nick@nickpresta.ca)\n# Released under the MIT license.\n# See: https:/",
"end": 44,
"score": 0.9999305605888367,
"start": 26,
"tag": "EMAIL",
"value": "nick@nickpresta.ca"
},
{
"context": " under the MIT license.\n# See: https://github.com/NickPresta/phantomjs-render-html\npage = require('webpage').c",
"end": 116,
"score": 0.9994569420814514,
"start": 106,
"tag": "USERNAME",
"value": "NickPresta"
}
] | render.coffee | nickpresta/phantomjs-render-html | 1 | # Written by Nick Presta (nick@nickpresta.ca)
# Released under the MIT license.
# See: https://github.com/NickPresta/phantomjs-render-html
page = require('webpage').create()
system = require 'system'
usage = ->
console.log '''Usage: rasterize.coffee -url URL -paper [paperwidth*paperheight|paperformat] -format pdf -output output [< stdin]
Examples:
phantomjs rasterize.coffee -url http://google.com -output /dev/stdout
phantomjs rasterize.coffee -output /dev/stdout < myfile.html
phantomjs rasterize.coffee -url http://google.com -paper letter -output out.pdf
Argument types:
url: local (file:///foobar.html), internet (http://google.com)
paper: 5in*7.5in, 10cm*20cm, A4, Letter
format: pdf, png, gif, jpeg
output: out.pdf, out.png, /dev/stdout
stdin: a file or string (standard unix redirection)'''
phantom.exit 1
optionParser = ->
if phantom.args.length < 1
usage()
opts = {}
opt = 0
while opt < phantom.args.length and phantom.args[opt][0] is '-'
option = phantom.args[opt]
switch(option)
when '-url'
opts.url = phantom.args[++opt]
when '-paper'
opts.paper = phantom.args[++opt]
when '-format'
opts.format = phantom.args[++opt]
when '-output'
opts.output = phantom.args[++opt]
when '-help'
usage()
else
console.log "Unknown switch: #{phantom.args[opt]}"
usage()
opt++
return opts
render = (status, opts) ->
if status isnt 'success'
console.log 'Unable to load the content!'
phantom.exit 1
else
window.setTimeout (-> page.render(opts.output, {format: opts.format}); phantom.exit()), 1000
opts = optionParser()
unless opts.output
usage()
else
# Argument checking
unless opts.output
console.log 'No output provided'
phantom.exit 1
unless opts.format
opts.format = 'pdf'
if opts.format not in ['png', 'pdf']
console.log 'Invalid output format - should either be png or pdf'
phantom.exit 1
unless opts.paper
opts.paper = 'Letter'
if '*' in opts.paper
size = opts.paper.split '*'
if size.length is 2
# Specified something like 5in*7.5in
page.paperSize = {width: size[0], height: size[1], border: '0'}
else
# Otherwise single value (i.e. Letter)
page.paperSize = {format: opts.paper, orientation: 'portrait', border: '1cm'}
unless opts.url
content = system.stdin.read()
unless content
console.log 'No URL or stdin provided'
phantom.exit 1
page.content = content
# Run the script
if opts.url
page.open opts.url, (status) ->
render status, opts
else if page.content
page.onLoadFinished = (status) -> render status, opts
else
console.log 'No URL or stdin provided'
phantom.exit 1
| 172215 | # Written by <NAME> (<EMAIL>)
# Released under the MIT license.
# See: https://github.com/NickPresta/phantomjs-render-html
page = require('webpage').create()
system = require 'system'
usage = ->
console.log '''Usage: rasterize.coffee -url URL -paper [paperwidth*paperheight|paperformat] -format pdf -output output [< stdin]
Examples:
phantomjs rasterize.coffee -url http://google.com -output /dev/stdout
phantomjs rasterize.coffee -output /dev/stdout < myfile.html
phantomjs rasterize.coffee -url http://google.com -paper letter -output out.pdf
Argument types:
url: local (file:///foobar.html), internet (http://google.com)
paper: 5in*7.5in, 10cm*20cm, A4, Letter
format: pdf, png, gif, jpeg
output: out.pdf, out.png, /dev/stdout
stdin: a file or string (standard unix redirection)'''
phantom.exit 1
optionParser = ->
if phantom.args.length < 1
usage()
opts = {}
opt = 0
while opt < phantom.args.length and phantom.args[opt][0] is '-'
option = phantom.args[opt]
switch(option)
when '-url'
opts.url = phantom.args[++opt]
when '-paper'
opts.paper = phantom.args[++opt]
when '-format'
opts.format = phantom.args[++opt]
when '-output'
opts.output = phantom.args[++opt]
when '-help'
usage()
else
console.log "Unknown switch: #{phantom.args[opt]}"
usage()
opt++
return opts
render = (status, opts) ->
if status isnt 'success'
console.log 'Unable to load the content!'
phantom.exit 1
else
window.setTimeout (-> page.render(opts.output, {format: opts.format}); phantom.exit()), 1000
opts = optionParser()
unless opts.output
usage()
else
# Argument checking
unless opts.output
console.log 'No output provided'
phantom.exit 1
unless opts.format
opts.format = 'pdf'
if opts.format not in ['png', 'pdf']
console.log 'Invalid output format - should either be png or pdf'
phantom.exit 1
unless opts.paper
opts.paper = 'Letter'
if '*' in opts.paper
size = opts.paper.split '*'
if size.length is 2
# Specified something like 5in*7.5in
page.paperSize = {width: size[0], height: size[1], border: '0'}
else
# Otherwise single value (i.e. Letter)
page.paperSize = {format: opts.paper, orientation: 'portrait', border: '1cm'}
unless opts.url
content = system.stdin.read()
unless content
console.log 'No URL or stdin provided'
phantom.exit 1
page.content = content
# Run the script
if opts.url
page.open opts.url, (status) ->
render status, opts
else if page.content
page.onLoadFinished = (status) -> render status, opts
else
console.log 'No URL or stdin provided'
phantom.exit 1
| true | # Written by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# Released under the MIT license.
# See: https://github.com/NickPresta/phantomjs-render-html
page = require('webpage').create()
system = require 'system'
usage = ->
console.log '''Usage: rasterize.coffee -url URL -paper [paperwidth*paperheight|paperformat] -format pdf -output output [< stdin]
Examples:
phantomjs rasterize.coffee -url http://google.com -output /dev/stdout
phantomjs rasterize.coffee -output /dev/stdout < myfile.html
phantomjs rasterize.coffee -url http://google.com -paper letter -output out.pdf
Argument types:
url: local (file:///foobar.html), internet (http://google.com)
paper: 5in*7.5in, 10cm*20cm, A4, Letter
format: pdf, png, gif, jpeg
output: out.pdf, out.png, /dev/stdout
stdin: a file or string (standard unix redirection)'''
phantom.exit 1
optionParser = ->
if phantom.args.length < 1
usage()
opts = {}
opt = 0
while opt < phantom.args.length and phantom.args[opt][0] is '-'
option = phantom.args[opt]
switch(option)
when '-url'
opts.url = phantom.args[++opt]
when '-paper'
opts.paper = phantom.args[++opt]
when '-format'
opts.format = phantom.args[++opt]
when '-output'
opts.output = phantom.args[++opt]
when '-help'
usage()
else
console.log "Unknown switch: #{phantom.args[opt]}"
usage()
opt++
return opts
render = (status, opts) ->
if status isnt 'success'
console.log 'Unable to load the content!'
phantom.exit 1
else
window.setTimeout (-> page.render(opts.output, {format: opts.format}); phantom.exit()), 1000
opts = optionParser()
unless opts.output
usage()
else
# Argument checking
unless opts.output
console.log 'No output provided'
phantom.exit 1
unless opts.format
opts.format = 'pdf'
if opts.format not in ['png', 'pdf']
console.log 'Invalid output format - should either be png or pdf'
phantom.exit 1
unless opts.paper
opts.paper = 'Letter'
if '*' in opts.paper
size = opts.paper.split '*'
if size.length is 2
# Specified something like 5in*7.5in
page.paperSize = {width: size[0], height: size[1], border: '0'}
else
# Otherwise single value (i.e. Letter)
page.paperSize = {format: opts.paper, orientation: 'portrait', border: '1cm'}
unless opts.url
content = system.stdin.read()
unless content
console.log 'No URL or stdin provided'
phantom.exit 1
page.content = content
# Run the script
if opts.url
page.open opts.url, (status) ->
render status, opts
else if page.content
page.onLoadFinished = (status) -> render status, opts
else
console.log 'No URL or stdin provided'
phantom.exit 1
|
[
{
"context": "user =\n name: 'Werner Heisenberg'\n occupation: 'theoretical physicist'\n\ncurrentUs",
"end": 33,
"score": 0.9998682737350464,
"start": 16,
"tag": "NAME",
"value": "Werner Heisenberg"
}
] | documentation/examples/object_spread.coffee | Trott/coffeescript | 8,728 | user =
name: 'Werner Heisenberg'
occupation: 'theoretical physicist'
currentUser = { user..., status: 'Uncertain' }
| 191490 | user =
name: '<NAME>'
occupation: 'theoretical physicist'
currentUser = { user..., status: 'Uncertain' }
| true | user =
name: 'PI:NAME:<NAME>END_PI'
occupation: 'theoretical physicist'
currentUser = { user..., status: 'Uncertain' }
|
[
{
"context": "###\n grunt-deadlink\n https://github.com/lnyarl/grunt-deadlink\n\n Copyright (c) +2013 choi yongja",
"end": 48,
"score": 0.9996417164802551,
"start": 42,
"tag": "USERNAME",
"value": "lnyarl"
},
{
"context": "b.com/lnyarl/grunt-deadlink\n\n Copyright (c) +2013 choi yongjae\n Licensed under the MIT license.\n###\n\nmodule.exp",
"end": 99,
"score": 0.9996429085731506,
"start": 87,
"tag": "NAME",
"value": "choi yongjae"
}
] | src/tasks/deadlink.coffee | gruntjs-updater/grunt-deadlink | 2 | ###
grunt-deadlink
https://github.com/lnyarl/grunt-deadlink
Copyright (c) +2013 choi yongjae
Licensed under the MIT license.
###
module.exports = (grunt) ->
util = (require './util')(grunt)
Logger = (require './logger')(grunt)
Checker = (require './checker')(grunt)
_ = grunt.util._
grunt.registerMultiTask 'deadlink', 'check dead links in files.', ->
done = @async()
fail = (failCount) ->
grunt.fail.warn("Found #{failCount} dead links")
@async()
options = @options
# this expression can changed to recognizing other url format.
# eg. markdown, wiki syntax, html
# markdown is default
filter: (content)->
expressions = [
/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
#/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/((\.?\.?\/?[a-zA-Z\.\-_])+)/g
]
util.searchAllLink expressions, content
maxAttempts : 3
retryDelay : 60000
logToFile : false
logFilename: 'deadlink.log'
logAll : false
files = grunt.file.expand @data.src
filter = @data.filter || options.filter
logger = new Logger options
checker = new Checker options, logger
logger.progress()
util.extractURL files, filter, (filepath, link) ->
logger.increaseLinkCount()
checker.checkDeadlink filepath, link
logger.printResult done, fail
| 163725 | ###
grunt-deadlink
https://github.com/lnyarl/grunt-deadlink
Copyright (c) +2013 <NAME>
Licensed under the MIT license.
###
module.exports = (grunt) ->
util = (require './util')(grunt)
Logger = (require './logger')(grunt)
Checker = (require './checker')(grunt)
_ = grunt.util._
grunt.registerMultiTask 'deadlink', 'check dead links in files.', ->
done = @async()
fail = (failCount) ->
grunt.fail.warn("Found #{failCount} dead links")
@async()
options = @options
# this expression can changed to recognizing other url format.
# eg. markdown, wiki syntax, html
# markdown is default
filter: (content)->
expressions = [
/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
#/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/((\.?\.?\/?[a-zA-Z\.\-_])+)/g
]
util.searchAllLink expressions, content
maxAttempts : 3
retryDelay : 60000
logToFile : false
logFilename: 'deadlink.log'
logAll : false
files = grunt.file.expand @data.src
filter = @data.filter || options.filter
logger = new Logger options
checker = new Checker options, logger
logger.progress()
util.extractURL files, filter, (filepath, link) ->
logger.increaseLinkCount()
checker.checkDeadlink filepath, link
logger.printResult done, fail
| true | ###
grunt-deadlink
https://github.com/lnyarl/grunt-deadlink
Copyright (c) +2013 PI:NAME:<NAME>END_PI
Licensed under the MIT license.
###
module.exports = (grunt) ->
util = (require './util')(grunt)
Logger = (require './logger')(grunt)
Checker = (require './checker')(grunt)
_ = grunt.util._
grunt.registerMultiTask 'deadlink', 'check dead links in files.', ->
done = @async()
fail = (failCount) ->
grunt.fail.warn("Found #{failCount} dead links")
@async()
options = @options
# this expression can changed to recognizing other url format.
# eg. markdown, wiki syntax, html
# markdown is default
filter: (content)->
expressions = [
/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/\[[^\]]*\]\((http[s]?:\/\/[^\) ]+)/g, #[...](<url>)
#/\[[^\]]*\]\s*:\s*(http[s]?:\/\/.*)/g, #[...]: <url>
#/((\.?\.?\/?[a-zA-Z\.\-_])+)/g
]
util.searchAllLink expressions, content
maxAttempts : 3
retryDelay : 60000
logToFile : false
logFilename: 'deadlink.log'
logAll : false
files = grunt.file.expand @data.src
filter = @data.filter || options.filter
logger = new Logger options
checker = new Checker options, logger
logger.progress()
util.extractURL files, filter, (filepath, link) ->
logger.increaseLinkCount()
checker.checkDeadlink filepath, link
logger.printResult done, fail
|
[
{
"context": "#{constants.keygen.expire}\"\n # \"Passphrase: #{passphrase}\"\n # \"%commit\"\n # ]\n # stdin = script.joi",
"end": 2567,
"score": 0.9935034513473511,
"start": 2557,
"tag": "PASSWORD",
"value": "passphrase"
}
] | util/index.iced | taterbase/node-keybase | 10 | triplesec = require 'triplesec'
constants = require '../constants'
crypto = require 'crypto'
{BufferOutStream} = require 'iced-spawn'
{master_ring} = require('gpg-wrapper').keyring
{make_esc} = require 'iced-error'
KeyManager = require('kbpgp').KeyManager
Encryptor = require('triplesec').Encryptor
class StatusParser
constructor : () ->
@_all = []
@_table = []
parse : ({buf}) ->
lines = buf.toString('utf8').split /\r?\n/
for line in lines
words = line.split /\s+/
if words[0] is '[GNUPG:]'
@_all.push words[1...]
@_table[words[1]] = words[2...]
@
lookup : (key) -> @_table[key]
module.exports =
gen_pwh: ({passphrase, salt}, cb) ->
if not (salt instanceof Buffer) then salt = new Buffer salt, 'hex'
enc = new triplesec.Encryptor
key: new Buffer(passphrase, 'utf8'),
version: constants.triplesec.version
extra_keymaterial = constants.pwh.derived_key_bytes + constants.openpgp.derived_key_bytes
await enc.resalt {salt, extra_keymaterial}, defer err, km
unless err?
_pwh = km.extra.slice(0, constants.pwh.derived_key_bytes)
_salt = enc.salt.to_buffer()
_pwh_version = triplesec.CURRENT_VERSION
cb err, _pwh, _salt, _pwh_version
gen_hmac_pwh : ( {passphrase, salt, login_session}, cb) ->
await module.exports.gen_pwh { passphrase, salt }, defer err, pwh
unless err?
hmac_pwh = crypto.createHmac('SHA512', pwh)
.update(new Buffer(login_session, 'base64'))
.digest('hex')
cb err, hmac_pwh
import_from_p3skb : ( {raw, passphrase}, cb ) ->
esc = make_esc cb, "NodeKeybase::import_from_p3skb"
await KeyManager.import_from_p3skb {raw}, esc defer km, warnings
await km.unlock_p3skb {tsenc: new Encryptor(key: new Buffer(passphrase, 'utf8'))}, esc defer()
await km.sign {}, esc defer()
await km.export_pgp_private_to_client {passphrase}, defer err, key_data
cb err, key_data
#gen_key: ({username, passphrase}, cb) ->
# ring = master_ring()
# console.log ring
# host = constants.canonical_host
# email = "#{username}@#{host}"
# script = [
# "%echo generating"
# "Key-Type: RSA"
# "Key-Length: #{constants.keygen.master.bits}"
# "Key-Usage: sign,auth"
# "Subkey-Type: RSA"
# "Subkey-Length: #{constants.keygen.subkey.bits}"
# "Subkey-Usage: encrypt"
# "Name-Real: #{host}/#{username}"
# "Name-Email: #{email}"
# "Expire-date: #{constants.keygen.expire}"
# "Passphrase: #{passphrase}"
# "%commit"
# ]
# stdin = script.join("\n")
# args = [ "--batch", "--gen-key", "--keyid-format", "long", "--status-fd", '2' ]
# stderr = new BufferOutStream()
# await ring.gpg { args, stdin, stderr, secret : true }, defer err, out
# if err?
# console.log "Error: #{stderr.data().toString()}"
# else
# status_parser = (new StatusParser).parse { buf : stderr.data() }
# if (kc = status_parser.lookup('KEY_CREATED'))? and kc.length >= 2
# fingerprint = kc[1]
# key = ring.make_key { fingerprint, secret : true }
# await key.load esc defer()
# else
# err = new Error "Failed to parse output of key generation"
# cb err
| 47136 | triplesec = require 'triplesec'
constants = require '../constants'
crypto = require 'crypto'
{BufferOutStream} = require 'iced-spawn'
{master_ring} = require('gpg-wrapper').keyring
{make_esc} = require 'iced-error'
KeyManager = require('kbpgp').KeyManager
Encryptor = require('triplesec').Encryptor
class StatusParser
constructor : () ->
@_all = []
@_table = []
parse : ({buf}) ->
lines = buf.toString('utf8').split /\r?\n/
for line in lines
words = line.split /\s+/
if words[0] is '[GNUPG:]'
@_all.push words[1...]
@_table[words[1]] = words[2...]
@
lookup : (key) -> @_table[key]
module.exports =
gen_pwh: ({passphrase, salt}, cb) ->
if not (salt instanceof Buffer) then salt = new Buffer salt, 'hex'
enc = new triplesec.Encryptor
key: new Buffer(passphrase, 'utf8'),
version: constants.triplesec.version
extra_keymaterial = constants.pwh.derived_key_bytes + constants.openpgp.derived_key_bytes
await enc.resalt {salt, extra_keymaterial}, defer err, km
unless err?
_pwh = km.extra.slice(0, constants.pwh.derived_key_bytes)
_salt = enc.salt.to_buffer()
_pwh_version = triplesec.CURRENT_VERSION
cb err, _pwh, _salt, _pwh_version
gen_hmac_pwh : ( {passphrase, salt, login_session}, cb) ->
await module.exports.gen_pwh { passphrase, salt }, defer err, pwh
unless err?
hmac_pwh = crypto.createHmac('SHA512', pwh)
.update(new Buffer(login_session, 'base64'))
.digest('hex')
cb err, hmac_pwh
import_from_p3skb : ( {raw, passphrase}, cb ) ->
esc = make_esc cb, "NodeKeybase::import_from_p3skb"
await KeyManager.import_from_p3skb {raw}, esc defer km, warnings
await km.unlock_p3skb {tsenc: new Encryptor(key: new Buffer(passphrase, 'utf8'))}, esc defer()
await km.sign {}, esc defer()
await km.export_pgp_private_to_client {passphrase}, defer err, key_data
cb err, key_data
#gen_key: ({username, passphrase}, cb) ->
# ring = master_ring()
# console.log ring
# host = constants.canonical_host
# email = "#{username}@#{host}"
# script = [
# "%echo generating"
# "Key-Type: RSA"
# "Key-Length: #{constants.keygen.master.bits}"
# "Key-Usage: sign,auth"
# "Subkey-Type: RSA"
# "Subkey-Length: #{constants.keygen.subkey.bits}"
# "Subkey-Usage: encrypt"
# "Name-Real: #{host}/#{username}"
# "Name-Email: #{email}"
# "Expire-date: #{constants.keygen.expire}"
# "Passphrase: #{<PASSWORD>}"
# "%commit"
# ]
# stdin = script.join("\n")
# args = [ "--batch", "--gen-key", "--keyid-format", "long", "--status-fd", '2' ]
# stderr = new BufferOutStream()
# await ring.gpg { args, stdin, stderr, secret : true }, defer err, out
# if err?
# console.log "Error: #{stderr.data().toString()}"
# else
# status_parser = (new StatusParser).parse { buf : stderr.data() }
# if (kc = status_parser.lookup('KEY_CREATED'))? and kc.length >= 2
# fingerprint = kc[1]
# key = ring.make_key { fingerprint, secret : true }
# await key.load esc defer()
# else
# err = new Error "Failed to parse output of key generation"
# cb err
| true | triplesec = require 'triplesec'
constants = require '../constants'
crypto = require 'crypto'
{BufferOutStream} = require 'iced-spawn'
{master_ring} = require('gpg-wrapper').keyring
{make_esc} = require 'iced-error'
KeyManager = require('kbpgp').KeyManager
Encryptor = require('triplesec').Encryptor
class StatusParser
constructor : () ->
@_all = []
@_table = []
parse : ({buf}) ->
lines = buf.toString('utf8').split /\r?\n/
for line in lines
words = line.split /\s+/
if words[0] is '[GNUPG:]'
@_all.push words[1...]
@_table[words[1]] = words[2...]
@
lookup : (key) -> @_table[key]
module.exports =
gen_pwh: ({passphrase, salt}, cb) ->
if not (salt instanceof Buffer) then salt = new Buffer salt, 'hex'
enc = new triplesec.Encryptor
key: new Buffer(passphrase, 'utf8'),
version: constants.triplesec.version
extra_keymaterial = constants.pwh.derived_key_bytes + constants.openpgp.derived_key_bytes
await enc.resalt {salt, extra_keymaterial}, defer err, km
unless err?
_pwh = km.extra.slice(0, constants.pwh.derived_key_bytes)
_salt = enc.salt.to_buffer()
_pwh_version = triplesec.CURRENT_VERSION
cb err, _pwh, _salt, _pwh_version
gen_hmac_pwh : ( {passphrase, salt, login_session}, cb) ->
await module.exports.gen_pwh { passphrase, salt }, defer err, pwh
unless err?
hmac_pwh = crypto.createHmac('SHA512', pwh)
.update(new Buffer(login_session, 'base64'))
.digest('hex')
cb err, hmac_pwh
import_from_p3skb : ( {raw, passphrase}, cb ) ->
esc = make_esc cb, "NodeKeybase::import_from_p3skb"
await KeyManager.import_from_p3skb {raw}, esc defer km, warnings
await km.unlock_p3skb {tsenc: new Encryptor(key: new Buffer(passphrase, 'utf8'))}, esc defer()
await km.sign {}, esc defer()
await km.export_pgp_private_to_client {passphrase}, defer err, key_data
cb err, key_data
#gen_key: ({username, passphrase}, cb) ->
# ring = master_ring()
# console.log ring
# host = constants.canonical_host
# email = "#{username}@#{host}"
# script = [
# "%echo generating"
# "Key-Type: RSA"
# "Key-Length: #{constants.keygen.master.bits}"
# "Key-Usage: sign,auth"
# "Subkey-Type: RSA"
# "Subkey-Length: #{constants.keygen.subkey.bits}"
# "Subkey-Usage: encrypt"
# "Name-Real: #{host}/#{username}"
# "Name-Email: #{email}"
# "Expire-date: #{constants.keygen.expire}"
# "Passphrase: #{PI:PASSWORD:<PASSWORD>END_PI}"
# "%commit"
# ]
# stdin = script.join("\n")
# args = [ "--batch", "--gen-key", "--keyid-format", "long", "--status-fd", '2' ]
# stderr = new BufferOutStream()
# await ring.gpg { args, stdin, stderr, secret : true }, defer err, out
# if err?
# console.log "Error: #{stderr.data().toString()}"
# else
# status_parser = (new StatusParser).parse { buf : stderr.data() }
# if (kc = status_parser.lookup('KEY_CREATED'))? and kc.length >= 2
# fingerprint = kc[1]
# key = ring.make_key { fingerprint, secret : true }
# await key.load esc defer()
# else
# err = new Error "Failed to parse output of key generation"
# cb err
|
[
{
"context": "E_PR_USER\n\n process.env.GH_RELEASE_PR_TOKEN = 'bogus-token'\n process.env.GH_RELEASE_PR_USER = 'foo'\n\n ",
"end": 375,
"score": 0.964513897895813,
"start": 364,
"tag": "KEY",
"value": "bogus-token"
},
{
"context": " @user = @robot.brain.userForId('1', { name: 'jasmine' })\n done()\n @robot.run()\n\n afterEach ->",
"end": 650,
"score": 0.715887188911438,
"start": 643,
"tag": "USERNAME",
"value": "jasmine"
}
] | test/gh-release-pr-test.coffee | banyan/hubot-gh-release-pr | 3 | assert = require 'power-assert'
sinon = require 'sinon'
Hubot = require 'hubot'
TextMessage = Hubot.TextMessage
GitHubApi = require 'github'
describe 'gh-release-pr', ->
beforeEach (done) ->
@ghReleasePrToken = process.env.GH_RELEASE_PR_TOKEN
@ghReleasePrUser = process.env.GH_RELEASE_PR_USER
process.env.GH_RELEASE_PR_TOKEN = 'bogus-token'
process.env.GH_RELEASE_PR_USER = 'foo'
@user = null
@robot = new Hubot.loadBot null, 'mock-adapter', false, "Hubot"
@robot.adapter.on 'connected', =>
require('../src/gh-release-pr')(@robot)
@user = @robot.brain.userForId('1', { name: 'jasmine' })
done()
@robot.run()
afterEach ->
@server.restore()
@robot.shutdown()
process.env.GH_RELEASE_PR_TOKEN = @ghReleasePrToken
process.env.GH_RELEASE_PR_USER = @ghReleasePrUser
xit 'respond to release', (done) ->
@robot.adapter.on 'send', (envelope, strings) ->
console.log "strings: #{strings}"
done()
@robot.receive(new TextMessage(@user, 'hubot release repo production'))
| 86757 | assert = require 'power-assert'
sinon = require 'sinon'
Hubot = require 'hubot'
TextMessage = Hubot.TextMessage
GitHubApi = require 'github'
describe 'gh-release-pr', ->
beforeEach (done) ->
@ghReleasePrToken = process.env.GH_RELEASE_PR_TOKEN
@ghReleasePrUser = process.env.GH_RELEASE_PR_USER
process.env.GH_RELEASE_PR_TOKEN = '<KEY>'
process.env.GH_RELEASE_PR_USER = 'foo'
@user = null
@robot = new Hubot.loadBot null, 'mock-adapter', false, "Hubot"
@robot.adapter.on 'connected', =>
require('../src/gh-release-pr')(@robot)
@user = @robot.brain.userForId('1', { name: 'jasmine' })
done()
@robot.run()
afterEach ->
@server.restore()
@robot.shutdown()
process.env.GH_RELEASE_PR_TOKEN = @ghReleasePrToken
process.env.GH_RELEASE_PR_USER = @ghReleasePrUser
xit 'respond to release', (done) ->
@robot.adapter.on 'send', (envelope, strings) ->
console.log "strings: #{strings}"
done()
@robot.receive(new TextMessage(@user, 'hubot release repo production'))
| true | assert = require 'power-assert'
sinon = require 'sinon'
Hubot = require 'hubot'
TextMessage = Hubot.TextMessage
GitHubApi = require 'github'
describe 'gh-release-pr', ->
beforeEach (done) ->
@ghReleasePrToken = process.env.GH_RELEASE_PR_TOKEN
@ghReleasePrUser = process.env.GH_RELEASE_PR_USER
process.env.GH_RELEASE_PR_TOKEN = 'PI:KEY:<KEY>END_PI'
process.env.GH_RELEASE_PR_USER = 'foo'
@user = null
@robot = new Hubot.loadBot null, 'mock-adapter', false, "Hubot"
@robot.adapter.on 'connected', =>
require('../src/gh-release-pr')(@robot)
@user = @robot.brain.userForId('1', { name: 'jasmine' })
done()
@robot.run()
afterEach ->
@server.restore()
@robot.shutdown()
process.env.GH_RELEASE_PR_TOKEN = @ghReleasePrToken
process.env.GH_RELEASE_PR_USER = @ghReleasePrUser
xit 'respond to release', (done) ->
@robot.adapter.on 'send', (envelope, strings) ->
console.log "strings: #{strings}"
done()
@robot.receive(new TextMessage(@user, 'hubot release repo production'))
|
[
{
"context": "pEqual(subject(subject: {$type: 'Patient', name: 'maud',\n birthDate: {$gt: '1970'}}), 'subject:Patien",
"end": 1735,
"score": 0.9892005920410156,
"start": 1731,
"tag": "NAME",
"value": "maud"
}
] | fhir.js/test/querySpec.coffee | bethanysciences/NokiaFHIR | 24 | fhir = require('../src/middlewares/search')
assert = require('assert')
describe "linearizeParams:", ->
subject = fhir._query
it "simplest", ->
assert.deepEqual(subject(a:1,b:2) , [{param: 'a', value: [1]},{param: 'b',value: [2]}])
it "modifier", ->
assert.deepEqual(subject(a: {$exact: 2}) , [{param: 'a', modifier: ':exact', value: [2]}])
it "operator", ->
assert.deepEqual(subject(a: {$lt: 2}) , [{param: 'a', operator: 'lt', value: [2]}])
it "and", ->
assert.deepEqual(subject(a: {$and: [1, 2]}) , [{param: 'a', value: [1]}, {param: 'a',value: [2]}])
it "compound", ->
assert.deepEqual(subject(a: [1, 2]) , [{param: 'a', value: ['1|2']}])
it "or", ->
assert.deepEqual(subject(a: {$or: [1, 2]}) , [{param: 'a', value: [1,2]}])
it "operator & or", ->
assert.deepEqual(subject(a: {$exact: {$or: [1,2]}}) , [{param: 'a', modifier: ':exact', value: [1,2]}])
it "chained params", ->
assert.deepEqual(subject(subject: {name: {$exact: 'abu'}, birthDate: {$gt: '2000'}}), [
{param: 'subject.name', modifier: ':exact', value: ['abu']}
{param: 'subject.birthDate', operator: 'gt', value: ['2000']}
])
describe "test params builder", ->
subject = fhir.query
it "simple cases", ->
assert.deepEqual(subject(name: 'buka'), 'name=buka')
assert.deepEqual(subject(name: {$exact: 'buka'}), 'name:exact=buka')
assert.deepEqual(subject(birthDate: {$gt: '2011'}), 'birthDate=gt2011')
assert.deepEqual(subject(birthDate: {$gt: '2011', $lt: '2014'}), 'birthDate=gt2011&birthDate=lt2014')
assert.deepEqual(subject('subject.name': {$exact: 'maud'}), 'subject.name:exact=maud')
assert.deepEqual(subject(subject: {$type: 'Patient', name: 'maud',
birthDate: {$gt: '1970'}}), 'subject:Patient.name=maud&subject:Patient.birthDate=gt1970')
assert.deepEqual(subject('uri': 'http://test'), 'uri=http%3A%2F%2Ftest')
it "sort", ->
assert.deepEqual(subject(
$sort: [['name','asc'],['birthDate','desc'], 'vip']),
'_sort:asc=name&_sort:desc=birthDate&_sort=vip')
assert.deepEqual(
subject(subject: "id", questionnaire: "Allergies", _count: 1, $sort: [["authored", "desc"]]),
'subject=id&questionnaire=Allergies&_count=1&_sort:desc=authored')
it "include", ->
assert.deepEqual(subject($include: {Observation: "related.component", Patient: ["link.other", "careProvider"]}),
'_include=Observation.related.component&_include=Patient.link.other&_include=Patient.careProvider')
it "or", ->
assert.deepEqual(subject(name: {$or: ['bill', 'ted']}),
'name=bill%2Cted')
| 126800 | fhir = require('../src/middlewares/search')
assert = require('assert')
describe "linearizeParams:", ->
subject = fhir._query
it "simplest", ->
assert.deepEqual(subject(a:1,b:2) , [{param: 'a', value: [1]},{param: 'b',value: [2]}])
it "modifier", ->
assert.deepEqual(subject(a: {$exact: 2}) , [{param: 'a', modifier: ':exact', value: [2]}])
it "operator", ->
assert.deepEqual(subject(a: {$lt: 2}) , [{param: 'a', operator: 'lt', value: [2]}])
it "and", ->
assert.deepEqual(subject(a: {$and: [1, 2]}) , [{param: 'a', value: [1]}, {param: 'a',value: [2]}])
it "compound", ->
assert.deepEqual(subject(a: [1, 2]) , [{param: 'a', value: ['1|2']}])
it "or", ->
assert.deepEqual(subject(a: {$or: [1, 2]}) , [{param: 'a', value: [1,2]}])
it "operator & or", ->
assert.deepEqual(subject(a: {$exact: {$or: [1,2]}}) , [{param: 'a', modifier: ':exact', value: [1,2]}])
it "chained params", ->
assert.deepEqual(subject(subject: {name: {$exact: 'abu'}, birthDate: {$gt: '2000'}}), [
{param: 'subject.name', modifier: ':exact', value: ['abu']}
{param: 'subject.birthDate', operator: 'gt', value: ['2000']}
])
describe "test params builder", ->
subject = fhir.query
it "simple cases", ->
assert.deepEqual(subject(name: 'buka'), 'name=buka')
assert.deepEqual(subject(name: {$exact: 'buka'}), 'name:exact=buka')
assert.deepEqual(subject(birthDate: {$gt: '2011'}), 'birthDate=gt2011')
assert.deepEqual(subject(birthDate: {$gt: '2011', $lt: '2014'}), 'birthDate=gt2011&birthDate=lt2014')
assert.deepEqual(subject('subject.name': {$exact: 'maud'}), 'subject.name:exact=maud')
assert.deepEqual(subject(subject: {$type: 'Patient', name: '<NAME>',
birthDate: {$gt: '1970'}}), 'subject:Patient.name=maud&subject:Patient.birthDate=gt1970')
assert.deepEqual(subject('uri': 'http://test'), 'uri=http%3A%2F%2Ftest')
it "sort", ->
assert.deepEqual(subject(
$sort: [['name','asc'],['birthDate','desc'], 'vip']),
'_sort:asc=name&_sort:desc=birthDate&_sort=vip')
assert.deepEqual(
subject(subject: "id", questionnaire: "Allergies", _count: 1, $sort: [["authored", "desc"]]),
'subject=id&questionnaire=Allergies&_count=1&_sort:desc=authored')
it "include", ->
assert.deepEqual(subject($include: {Observation: "related.component", Patient: ["link.other", "careProvider"]}),
'_include=Observation.related.component&_include=Patient.link.other&_include=Patient.careProvider')
it "or", ->
assert.deepEqual(subject(name: {$or: ['bill', 'ted']}),
'name=bill%2Cted')
| true | fhir = require('../src/middlewares/search')
assert = require('assert')
describe "linearizeParams:", ->
subject = fhir._query
it "simplest", ->
assert.deepEqual(subject(a:1,b:2) , [{param: 'a', value: [1]},{param: 'b',value: [2]}])
it "modifier", ->
assert.deepEqual(subject(a: {$exact: 2}) , [{param: 'a', modifier: ':exact', value: [2]}])
it "operator", ->
assert.deepEqual(subject(a: {$lt: 2}) , [{param: 'a', operator: 'lt', value: [2]}])
it "and", ->
assert.deepEqual(subject(a: {$and: [1, 2]}) , [{param: 'a', value: [1]}, {param: 'a',value: [2]}])
it "compound", ->
assert.deepEqual(subject(a: [1, 2]) , [{param: 'a', value: ['1|2']}])
it "or", ->
assert.deepEqual(subject(a: {$or: [1, 2]}) , [{param: 'a', value: [1,2]}])
it "operator & or", ->
assert.deepEqual(subject(a: {$exact: {$or: [1,2]}}) , [{param: 'a', modifier: ':exact', value: [1,2]}])
it "chained params", ->
assert.deepEqual(subject(subject: {name: {$exact: 'abu'}, birthDate: {$gt: '2000'}}), [
{param: 'subject.name', modifier: ':exact', value: ['abu']}
{param: 'subject.birthDate', operator: 'gt', value: ['2000']}
])
describe "test params builder", ->
subject = fhir.query
it "simple cases", ->
assert.deepEqual(subject(name: 'buka'), 'name=buka')
assert.deepEqual(subject(name: {$exact: 'buka'}), 'name:exact=buka')
assert.deepEqual(subject(birthDate: {$gt: '2011'}), 'birthDate=gt2011')
assert.deepEqual(subject(birthDate: {$gt: '2011', $lt: '2014'}), 'birthDate=gt2011&birthDate=lt2014')
assert.deepEqual(subject('subject.name': {$exact: 'maud'}), 'subject.name:exact=maud')
assert.deepEqual(subject(subject: {$type: 'Patient', name: 'PI:NAME:<NAME>END_PI',
birthDate: {$gt: '1970'}}), 'subject:Patient.name=maud&subject:Patient.birthDate=gt1970')
assert.deepEqual(subject('uri': 'http://test'), 'uri=http%3A%2F%2Ftest')
it "sort", ->
assert.deepEqual(subject(
$sort: [['name','asc'],['birthDate','desc'], 'vip']),
'_sort:asc=name&_sort:desc=birthDate&_sort=vip')
assert.deepEqual(
subject(subject: "id", questionnaire: "Allergies", _count: 1, $sort: [["authored", "desc"]]),
'subject=id&questionnaire=Allergies&_count=1&_sort:desc=authored')
it "include", ->
assert.deepEqual(subject($include: {Observation: "related.component", Patient: ["link.other", "careProvider"]}),
'_include=Observation.related.component&_include=Patient.link.other&_include=Patient.careProvider')
it "or", ->
assert.deepEqual(subject(name: {$or: ['bill', 'ted']}),
'name=bill%2Cted')
|
[
{
"context": "# ### begin original comment by jashkenas\n# The CoffeeScript parser is generated by [Jison]",
"end": 41,
"score": 0.9989407062530518,
"start": 32,
"tag": "USERNAME",
"value": "jashkenas"
},
{
"context": " parser is generated by [Jison](http://github.com/zaach/jison)\n# from this grammar file. Jison is a botto",
"end": 115,
"score": 0.9996086359024048,
"start": 110,
"tag": "USERNAME",
"value": "zaach"
},
{
"context": "nto `lib/parser.js`.\n# ### end original comment by jashkenas\n\n# The only dependency is on the **Jison.Parser**",
"end": 1122,
"score": 0.9989209175109863,
"start": 1113,
"tag": "USERNAME",
"value": "jashkenas"
},
{
"context": "ake up a line in a body.\n# The comment above is by jashkenas. Should Block be Expression?\n\n# Cup is the equi",
"end": 14561,
"score": 0.6294862627983093,
"start": 14554,
"tag": "USERNAME",
"value": "jashken"
},
{
"context": " line in a body.\n# The comment above is by jashkenas. Should Block be Expression?\n\n# Cup is the equiva",
"end": 14563,
"score": 0.571112334728241,
"start": 14561,
"tag": "NAME",
"value": "as"
},
{
"context": "ivalence of the original grammar.coffee written by jashkenas\n# I repace Line with Cup because Line is too easi",
"end": 14670,
"score": 0.9380532503128052,
"start": 14661,
"tag": "USERNAME",
"value": "jashkenas"
}
] | src/parser.coffee | chaosim/coffee-script | 1 | # ### begin original comment by jashkenas
# The CoffeeScript parser is generated by [Jison](http://github.com/zaach/jison)
# from this grammar file. Jison is a bottom-up parser generator, similar in
# style to [Bison](http://www.gnu.org/software/bison), implemented in JavaScript.
# It can recognize [LALR(1), LR(0), SLR(1), and LR(1)](http://en.wikipedia.org/wiki/LR_grammar)
# type grammars. To create the Jison parser, we list the pattern to match
# on the left-hand side, and the action to take (usually the creation of syntax
# tree nodes) on the right. As the parser runs, it
# shifts tokens from our token stream, from left to right, and
# [attempts to match](http://en.wikipedia.org/wiki/Bottom-up_parsing)
# the token sequence against the grammar below. When a match can be made, it
# reduces into the [nonterminal](http://en.wikipedia.org/wiki/Terminal_and_nonterminal_symbols)
# (the enclosing name at the top), and we proceed from there.
#
# If you run the `cake build:parser` command, Jison constructs a parse table
# from our grammar and saves it into `lib/parser.js`.
# ### end original comment by jashkenas
# The only dependency is on the **Jison.Parser**.
# {Parser} = require 'jison' # the only dependency is not needed any more.
# Jison DSL
# ---------
{identifier, keyword} = require './parserutil'
yy = require '../lib/coffee-script/nodes'
# some global variable used by the parser
text = '' # the text which is being parsed, this could be any sequence, not only strincs.
textLength = 0 # the length of text
cursor = 0 # the current position of parsing, use text[cursor] to get current character in parsed stream
tabWidth = 4 # the tab width, when meet a tab, column += tabWidth
# the current currentIndentWidth, initial value is 0
# when indenting, it increases and recorded and used as parameter of OUTDENT, to verify outdent
currentIndentWidth = 0
# minimal value for one indent
minIndentWidth = 2
# when blockMode is true, parse indent block, otherwise parse a single line block
blockMode = true
atBeginOfLine = true
parseCache = {} # {tag+start: [result, cursor]}, memorized parser result
symbolToTagMap = {} # {symbol: tag}, from rule symbol map to a shorter memo tag, for memory efficeny
tags = {} # {tag: true}, record tags that has been used to avoid conflict
symbolToParentsMap = {} # {symbol:[parent...]}, the map from symbol to their all of parents for left recursive symbols
baseRules = {} # {symbole: rule's function}
# parse @data from @start with the rule function @root
exports.parse = (data, root, options) ->
o = options or {}
if typeof start == 'object' then start = 0; options = start
text = data
textLength = text.length
cursor = o.start or 0
tabWidth = o.tabWidth or 4
parseCache = {}
baseRules = {}
symbolToParentsMap = {}
memoNames = ['Expression', 'Body', 'Cup', 'Block', 'Invocation', 'Value', 'Assignable',
'SimpleAssignable', 'For', 'If', 'Operation']
for symbol in memoNames then setMemoTag(symbol)
addLeftRecursiveParentChildrens(
Expression: ['Invocation', 'Value', 'Operation', 'Invocation', 'Assign', 'While', 'For'],
Value: ['Assignable'],
Assignable: ['SimpleAssignable'],
SimpleAssignable: ['Value', 'Invocation'],
Assign: ['Assignable'],
Invocation: ['Value', 'Invocation'],
For: ['Expression'],
If: ['Expression'],
Operation: ['Expression', 'SimpleAssignable']
)
setRecursiveRules(grammar)
setMemorizeRules(grammar, ['Body', 'Cup', 'Block', 'Statement'])
generateLinenoColumn()
grammar.Root(0)
lineColumnList = []
generateLinenoColumn = () ->
i = 0
lineno = column = 0
while i<textLength
c = text[i]
if c is '\r'
lineColumnList[i++] = [lineno, column]
if text[i] is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\t'
lineColumnList[i++] = [lineno, column]
column += tabWidth
else
lineColumnList[i++] = [lineno, column]
column++
lineColumnList[i] = [lineno+1, 0]
# some utilities used by the parser
# on succeed any matcher should not return a value which is not null or undefined, except the root symbol.
# set a shorter start part of symbol as the tag used in parseCache
setMemoTag = (symbol) ->
i = 1
while 1
if hasOwnProperty.call(tags, symbol.slice(0, i)) in tags then i++
else break
tag = symbol.slice(0, i)
symbolToTagMap[symbol] = tag
tags[tag] = true
# set the symbols in grammar which memorize their rule's result.
setMemorizeRules = (grammar, symbols) ->
for symbol in symbols
baseRules[symbol] = grammar[symbol]
grammar[symbol] = memorize(symbol)
# set all the symbols in grammar which are left recursive.
setRecursiveRules = (grammar) ->
map = symbolToParentsMap
for symbol of map
baseRules[symbol] = grammar[symbol]
grammar[symbol] = recursive(symbol)
# add direct left recursive parent->children relation for @parentChildrens to global variable symbolToParentsMap
addLeftRecursiveParentChildrens = (parentChildrens...) ->
map = symbolToParentsMap
for parentChildren in parentChildrens
for parent, children of parentChildren
for symbol in children
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
# add left recursive parent->children relation to @symbolToParentsMap for symbols in @recursiveCircles
addLeftRecursiveCircles = (recursiveCircles...) ->
map = symbolToParentsMap
for circle in recursiveCircles
i = 0
length = circle.length
while i<length
if i==length-1 then j = 0 else j = i+1
symbol = circle[i]; parent = circle[j]
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
i++
# make @symbol a left recursive symbol, which means to wrap baseRules[symbol] with recursive,
# when recursiv(symbol)(start) is executed, all of left recursive rules which @symbol depend on will be computed
# and memorized, until no change exists.
recursive = (symbol) ->
map = symbolToParentsMap
tag = symbolToTagMap[symbol]
agenda = [] # dynamic list included all left recursive symbols which depend on @symbol
addParent = (parent) ->
agenda.unshift(parent)
parents = map[parent]
if parents then for parent in parents
if parent not in agenda
agenda.unshift(parent)
addParent(parent)
addParent(symbol)
(start) ->
hash0 = tag+start
m = parseCache[hash0]
if m then cursor = m[1]; return m[0]
while agenda.length # exist any symbols which depend on the changed result?
symbol = agenda.pop()
hash = tag+start
m = parseCache[hash]
if not m then m = parseCache[hash] = [undefined, start]
rule = baseRules[symbol]
changed = false
while 1
if (result = rule(start)) and (result isnt m[0] or cursor isnt m[1])
parseCache[hash] = m = [result, cursor]
changed = true
else break
# if any new result exists, recompute the symbols which may be depend on the new result.
if changed then for parent in map[symbol]
if parent not in agenda then agenda.push parent
m = parseCache[hash0]
cursor = m[1]
m[0]
# memorize result and cursor for @symbol which is not left recursive.
# left recursive should be wrapped by recursive(symbol)!!!
exports.memorize = memorize = (symbol) ->
tag = symbolToTagMap[symbol]
rule = baseRules[symbol]
(start) ->
hash = tag+start
m = parseCache[hash]
if m then cursor = m[1]; m[0]
else
result = rule(start)
parseCache[hash] = [result, cursor]
result
# lookup the memorized result and reached cursor for @symbol at the position of @start
exports.memo = memo = (symbol) ->
tag = symbolToTagMap[symbol]
(start) ->
m = parseCache[tag+start]
if m then cursor = m[1]; m[0]
# compute exps in sequence, return the result of the last one.
# andp and orp are used to compose the matchers
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) and exps[2](cursor] ... and exps[exps.length-1](cursor)
andp = (exps...) -> (start) ->
cursor = start
for exp in exps
if not(result = exp(cursor)) then return result
return result
# compute exps in parallel, return the result of the first which is not evaluated to false.
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) or exps[2](cursor] ... or exps[exps.length-1](cursor)
orp = (exps...) -> (start) ->
for exp in exps
if result = exp(start) then return result
return result
# applicaton of not operation
# notp is not useful except to compose the matchers.
# It's not unnessary, low effecient and ugly to write "notp(exp)(start)",
# so don't write "notp(exp)(start)", instead "not exp(start)".
notp = (exp) -> (start) -> not exp(start)
# compute the global variable lineno and column Synchronously
# It's important and noteworthy that how to keep lineno and column having the correct value at any time.
# pay attention to that!
# comment: have record in advance, so the code on lineno is removed.
next = () ->
return text[cursor++]
# c = text[cursor++]
# if c is '\r'
# if text[cursor] is '\n' then c = text[cursor++]
# lineno++; column = 0
# else if c is '\n' then lineno++; column = 0
# return c
# change both cursor and column
step = (n=1) -> cursor += n; # column += n
# match one character
char = (c) -> (start) ->
cursor = start
setlinecolumen()
if text[cursor++]==c then cursor = start+1; return c
# match a literal string.
# There should not have newline character("\r\n") in literal string!
# Otherwise the lineno and column will be updated wrongly.
# now the relation is not existed any more because the lineno and column is computed in advance.
literal = (string) -> (start) ->
n = string.length
if text.slice(start, stop = start+n)==string
cursor = stop; true
# cursor = stop; column += n; true
# zero or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# the whitespaces width+1 is returned as result( +1 to avoid 0 as the result, which is a falsic value)!!!
# newline is not included!!!
spaces = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
# column += n
return n+1
# one or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# newline is not included!!!
spaces1 = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
if n
# column += n;
n
# first, match @left, then match @item, at last match @right
# left and right is set to spaces by default.
wrap = (item, left=spaces, right=spaces) -> (start) ->
if left(start) and result = item(cursor) and right(cursor)
return result
# is a letter used in identifer?
# follow word such as return, break, etc.
identifierLetter_ = () ->
c = text[cursor]
c is '$' or c is '_' or 'a'<=c<'z' or 'A'<=c<='Z' or '0'<=c<='9'
# Grammatical rules of Coffeescript
exports.grammar = grammar = cs = {}
# The **Root** is the top-level node in the syntax tree.
cs.Root = (start) ->
skipHeadSpaces_() # skip the spaces at the begin of program, including line comment.
if cursor is textLength then new yy.Block
else
body = cs.Body(cursor)
# and skipTailSpaces(cursor); body # is skipTailSpaces necessary? wait to see.
# skip the spaces at the begin of program, including line comment.
skipHeadSpaces_ = () ->
while c = text[cursor]
if c==' ' or c=='t' or c== '\r' or c== '\n' then cursor++; continue
else if c is '#' and not (text[cursor+1] isnt '#'or text[cursor+2] isnt '#') # should stop at BlockComment
while c = text[cursor++]
if c=='\r' or c=='n' then break
else if c is '\\'
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else if c is ';' # maybe should allow ; at begin of line?
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else break
if lineColumnList[cursor][1] isnt 0
throw new ParseError(cursor, "Effective Code Cup of the whole program should start at column 0, the begin of a line.")
# Any list of statements and expressions, separated by line breaks or semicolons.
cs.Body = (start) ->
cups = []
while 1
if cs.CupLeftEdge_() and (cup = cs.Cup(cursor)) and cs.CupRightEdge_()
cups.push cup
else break
# if cups do not include effective code, should have yy.literal('undefined') added.
yy.Block.wrap(cups)
cs.CupLeftEdge_ = () ->
if blockMode
if atBeginOfLine
atBeginOfLine = false
spaces_() and (lineColumnList[cursor][1]==currentIndentWidth)
else spaces_() and (concatenation_() or true) and spaces_()
else
spaces_() and (concatenation_() or true) and spaces_()
cs.CupRightEdge_ = () ->
spaces_() and (semicolon_() or (linecomment_() or newline_()) and atBeginOfLine= true)
# before INDENT, should have skip all of whitespaces, newline, linecomment, and give blockMode a determined value.
cs.INDENT = (start) ->
if not blockMode then true
else if column >= currentIndentWidth+minIndentWidth
currentIndentWidth = column
cs.OUTDENT = (indent) -> (start) ->
if blockMode then column is indent
else column is 0
# skip spaces until meet an end of line, then skip to the begin of next line
# line comment at the tail of the line is skipped too.
spacesUntilEndOfLine = (start) ->
cursor = start
while 1
c = text[cursor]
if c is '\t' or c is ' ' then cursor++; continue
else if c is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return
else cursor++
else break
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; true
else if c is '\n' then cursor++; true
# Block and statements, which make up a line in a body.
# The comment above is by jashkenas. Should Block be Expression?
# Cup is the equivalence of the original grammar.coffee written by jashkenas
# I repace Line with Cup because Line is too easily misunderstood.
# Line in grammar.coffe can be a part of phisical line or a concatenation of multiple physical line,
# even a single compound statement of multiple physical line.
# Cup is a intact logic unit of code piece, same as Line in grammar.coffee.
# The Cup can be big or small, and a Cup can be put in the Body of another bigger Cup.
# So let's have a cup of coffee.
cs.Cup = (start) ->
cs.Statement(start)\
or cs.Expression(start)\
or cs.EmptyCup(start)
# A cs.Cup begin with semicolon, or has only spaces, line comment(which obey indent/unindent
cs.EmptyCup = (start) ->
c = text[start]
c is ';' or c is '\r' or c is '\n'
# Pure statements which cannot be expressions.
cs.Statement = (start) ->
cs.Return(start)\
or cs.BlockComment(start)\
or cs.Break(start)\
or cs.Continue(start)
# Break and Continue is my repacement to STATEMENT in original grammar.coffee
# A return statement from a function body.
cs.Return = (start) ->
if RETURN(start) and spacesConcatLine_()
if exp = cs.Expression(cursor) then new yy.Return exp
else new yy.Return
### ###
###
dfasfdfs
dsfdfsa
asdfdsfa
###
# A block comment
# BlockComment should obey the indent/outdent rule, but line comment don't need, right? -->yes.
# BlockComment will return yy.Comment and generate comment in object javascript code
cs.BlockComment = (start) ->
cursor = start
if text[cursor] is '#'and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
while 1
c = text[cursor]
if not c then return
if c is '#' and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
return new yy.Comment text.slice(start+3, cursor-3)
cs.Break = (start) -> if BREAK(start) then new yy.Literal('break')
cs.Continue = (start) -> if CONTINUE(start) then new yy.Literal('continue')
# All the different types of expressions in our language. The basic unit of
# CoffeeScript is the **cs.Expression** -- everything that can be an expression
# is one. Blocks serve as the building blocks of many other grammar, making
# them somewhat circular.
cs.Expression = (start) ->
recValue(start)\
or recOperation(start)\
or recInvocation(start)\
or recAssign(start)\
or recIf(start)\
or recWhile(start)\
or recFor(start)\
or cs.Switch(start)\
or cs.Throw(start)\
or cs.Class(start)\
or cs.Try(start)\
or cs.Code(start) #(param) -> ... or -> ..
recValue = memo('Value')
recOperation = memo('Operation')
recInvocation = memo('Invocation')
recAssign = memo('Assign')
recIf = memo('If')
recWhile = memo('While')
recFor = memo('For')
# An indented block of expressions. Note that the [Rewriter](rewriter.html)
# will convert some postfix forms into blocks for us, by adjusting the token stream.
# as described below, I should consider how to add block of single line which is consisted of mulitple line cup.
cs.Block = (start) ->
# two mode: single line block mode, indent block mode
if n = INDENT(start)
outdent = OUTDENT(n)
if outdent(cursor) then new yy.Block
else if body = cs.Body(cursor) and outdent(cursor) then body
# A literal identifier, a variable name or property.
cs.Identifier = (start) -> cs.IDENTIFIER(start)
# All of our immediate values. Generally these can be passed straight
# through and printed to JavaScript.
cs.Literal = (start) ->
cs.NUMBER(start)\
or cs.STRING(start)\
or cs.JS(start)\
or cs.REGEX(start)\
or cs.DEBUGGER(start)\
or cs.UNDEFINED(start)\
or cs.NULL(start)\
or cs.BOOL(start)
recAssignable = memo('Assignable')
# Assignment of a variable, property, or index to a value.
cs.Assign = (start) ->
if left = recAssignable(start) and assignOp_(cursor)
if exp = cs.Expression(cursor)\
# it's right to use TERMINATOR here? how about meeting a semicolon?
# spaces till newline, or line comment
or cs.TERMINATOR(cursor) and exp = cs.Expression(cursor)\
or n = INDENT(cursor) and exp = cs.Expression and (outdent = OUTDENT(n)) and outdent(cursor)
new yy.Assign left, exp
symbolOperator_ =(op) ->
# : + && || etc.
op = literal_(op)
() ->
spacesConcatLine_() and op() and spacesConcatLine_()
assignOp_ = symbolOperator_('=')
# Assignment when it happens within an object literal. The difference from
# the ordinary **cs.Assign** is that these allow numbers and strings as keys.
cs.AssignObj = (start) ->
if x = cs.BlockComment then return x
if left = cs.ObjAssignable(start)
if wrap(':')
if exp = cs.Expression(cursor)\
or INDENT(cursor) and exp = cs.Expression(cursor) and OUTDENT(cursor)
new yy.Assign LOC(1)(new yy.Value(left)), exp, 'object'
else
new yy.Value left
cs.ObjAssignable = (start) ->
cs.Identifier(start)\
or Number(start)\
or String(start)\
or cs.ThisProperty(start)
# The **cs.Code** node is the function literal. It's defined by an indented block
# of **cs.Block** preceded by a function arrow, with an optional parameter
# list.
cs.Code = (start) ->
if PARAM_START(start) and params = cs.ParamList(cursor) and PARAM_END(cursor) \
and funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code params, body, funcGlyph
else if funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code [], body, funcGlyph
# CoffeeScript has two different symbols for functions. `->` is for ordinary
# functions, and `=>` is for functions bound to the current value of *this*.
cs.FuncGlyph = (start) ->
if wrap('->')(start) then 'func'
else if wrap('=>') then 'boundfunc'
# An optional, trailing comma.
cs.OptComma = (start) ->
spaces(start)
if char(',') then spaces(cursor); [true]
[false]
# The list of parameters that a function accepts can be of any length.
cs.ParamList = (start) ->
if param = cs.Param(start)
result = [param]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and param = cs.Param(cursor) then result.push(param)
else if INDENT(cursor)
params = cs.ParamList(cursor)
for p in params then result.push(p)
OUTDENT(cursor)
else if meetComma[0] and param = cs.Param(cursor) then result.push(param)
else break
result
# A single parameter in a function definition can be ordinary, or a splat
# that hoovers up the remaining arguments.
cs.Param = (start) ->
v = cs.ParamVar(start)
if wrap('...')(cursor) then new yy.Param v, null, on
else if wrap('=')(cursor) and exp = cs.Expression(cursor) then new yy.Param v, exp
else new yy.Param v
# Function Parameters
cs.ParamVar = (start) ->
cs.Identifier(start)\
or cs.ThisProperty(start)\
or cs.Array(start)\
or cs.Object(start)
# A splat that occurs outside of a parameter list.
cs.Splat = (start) ->
if exp = cs.Expression(start) and wrap('...')(cursor)
new yy.Splat exp
# Variables and properties that can be assigned to.
cs.SimpleAssignable = (start) ->
if value = recValue(start) and accessor = cs.Accessor(cursor) then value.add accessor
else if caller = recInvocation(start) and accessor = cs.Accessor(cursor)
new yy.Value caller, [].concat accessor
else if thisProp = cs.ThisProperty(start) then thisProp
else if name = cs.Identifier(start) then new yy.Value name
# Everything that can be assigned to.
cs.Assignable = (start) ->
recSimpleAssignable(start)\
or cs.newyyValue(cs.Array)(start)\
or cs.newyyValue(cs.Object)(start)
# The types of things that can be treated as values -- assigned to, invoked
# as functions, indexed into, named as a class, etc.
cs.Value = (start) ->
recAssignable(start)\
or cs.newyyValue(cs.Literal)(start)\
or cs.newyyValue(cs.Parenthetical)(start)\
or cs.newyyValue(cs.Range)(start)\
or cs.This(start)
# The general group of accessors into an object, by property, by prototype
# or by array index or slice.
cs.Accessor = (start) ->
if wrap('.') and id = cs.Identifier(cursor) then new yy.Access id
else if wrap('?.') and id = cs.Identifier(cursor) then new yy.Access id, 'soak'
else if wrap('::') and id = cs.Identifier(cursor)
new[LOC(1)(new yy.Access new yy.Literal('prototype')), LOC(2)(new yy.Access id)]
else if wrap('?::') and id = cs.Identifier(cursor)
[LOC(1)(new yy.Access new yy.Literal('prototype'), 'soak'), LOC(2)(new yy.Access id)]
else if wrap('::') then new Access new cs.Literal 'prototype'
else if index = cs.Index(start) then index
# Indexing into an object or array using bracket notation.
cs.Index = (start) ->
if INDEX_START(start) and val = cs.IndexValue(cursor) and INDEX_END(cursor) then val
if INDEX_SOAK(cursor) and cs.Index(cursor) # id?[1]
yy.extend $2, soak : yes
cs.IndexValue = (start) ->
if value = cs.Expression(start) then new yy.Index value
else if slice = cs.Slice(start) then new yy.Slice slice
# In CoffeeScript, an object literal is simply a list of assignments.
cs.Object = (start) ->
if leftBrace = wrap('{')(start)
spaces(cursor)
if char('}') then new yy.Obj [], leftBrace.generated
else if assigns = cs.AssignList(cursor)\
and cs.OptComma(cursor) and wrap('}')(cursor)
new yy.Obj assigns, leftBrace.generated
# Assignment of properties within an object literal can be separated by
# comma, as in JavaScript, or simply by newline.
cs.AssignList = (start) ->
if assign = cs.AssignObj(start)
result = [assign]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and assign = cs.AssignObj(cursor) then result.push(assign)
else if INDENT(cursor)
assigns = cs.AssignList(cursor)
for x in assigns then result.push(x)
OUTDENT(cursor)
else if meetComma[0] and assign = cs.AssignObj(cursor) then result.push(param)
else break
result
# cs.Class definitions have optional bodies of prototype property assignments,
# and optional references to the superclass.
cs.Class = (start) ->
if CLASS(start)
if name = cs.SimpleAssignable(cursor)
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class name, sup, body
else new yy.Class name, sup
else if body = cs.Block(cursor) then new yy.Class name, null, body
else new yy.Class name
else
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class null, sup, body
else new yy.Class null, sup
else if body = cs.Block(cursor) then new yy.Class null, null, body
else new yy.Class
# Ordinary function invocation, or a chained series of calls.
cs.Invocation = (start) ->
# left recursive
if m1 = recValue(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
else if m2 = recInvocation(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
if not m1 and not m2
if SUPER(start)
new yy.Call 'super', [new yy.Splat new yy.Literal 'arguments']
else if SUPER(start) and cs.Arguments(cursor)
new yy.Call 'super', $2
# An optional existence check on a function.
cs.OptFuncExist = (start) ->
if emptyword(start) then no
if FUNC_EXIST(start) then yes
# The list of arguments to a function call.
cs.Arguments = (start) ->
if CALL_START(start)
if args = cs.ArgList(cursor) and cs.OptComma(cursor)
args
else result = []
if CALL_END(cursor) then result
# A reference to the *this* current object.
cs.This = (start) ->
if THIS(start) then new yy.Value new yy.Literal 'this'
if wrap('')(start) then new yy.Value new yy.Literal 'this'
# A reference to a property on *this*.
cs.ThisProperty = (start) ->
if wrap('')(start) and cs.Identifier(cursor)
new yy.Value LOC(1)(new yy.Literal('this')), [LOC(2)(new yy.Access($2))], 'this'
# The array literal.
cs.Array = (start) ->
if wrap('[')(start)
if cs.ArgList(cursor) and cs.OptComma(cursor)
result = new yy.Arr $2
else result = new yy.Arr []
if wrap(']')(cursor) then result
# Inclusive and exclusive range dots.
cs.RangeDots = (start) ->
if wrap('..')(start) then 'inclusive'
else if wrap('...')(start) then 'exclusive'
# The CoffeeScript range literal.
cs.Range = (start) ->
if wrap('[')(start) and cs.Expression(cursor) and cs.RangeDots(cursor) and cs.Expression(cursor) wrap(']')
new yy.Range $2, $4, $3
# cs.Array slice literals.
cs.Slice = (start) ->
# don't use recExpression here
if cs.Expression(start) and cs.RangeDots(cursor) and cs.Expression(cursor)
new yy.Range $1, $3, $2
if cs.Expression(start) and cs.RangeDots(cursor)
new yy.Range $1, null, $2
if cs.RangeDots(start) and cs.Expression(cursor) then new yy.Range null, $2, $1
if cs.RangeDots(start) then new yy.Range null, null, $1
# The **cs.ArgList** is both the list of objects passed into a function call,
# as well as the contents of an array literal
# (i.e. comma-separated expressions). Newlines work as well.
cs.ArgList = (start) ->
if cs.Arg(start) then [$1]
else if cs.ArgList(start) and wrap(',') and cs.Arg(cursor) then $1.concat $3
else if cs.ArgList(start) and cs.OptComma(cursor) and cs.TERMINATOR(cursor) and cs.Arg(cursor) then $1.concat $4
else if INDENT(start) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor) then $2
else if cs.ArgList(start) and cs.OptComma(cursor) and INDENT(cursor) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor)
$1.concat $4
# Valid arguments are Blocks or Splats.
cs.Arg = (start) -> cs.Expression(start) or cs.Splat(start)
# Just simple, comma-separated, required arguments (no fancy syntax). We need
# this to be separate from the **cs.ArgList** for use in **cs.Switch** blocks, where
# having the newlines wouldn't make sense.
cs.SimpleArgs = (start) ->
if exp = cs.Expression(start)
result = [exp]
while 1
if wrap(',')
if exp = cs.Expression(cursor) then result.push(exp)
else return
result
# The variants of *try/catch/finally* exception handling blocks.
cs.Try = (start) ->
test = TRY(start) and cs.Block(cursor)
if test
if cs.Catch(cursor) and catch_ = cs.Block(cursor)
if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, catch_[0], catch_[1], final
else new yy.Try test, catch_[0], catch_[1]
else if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, null, null, final
else new yy.Try test
# A catch clause names its error and runs a block of code.
cs.Catch = (start) ->
if CATCH(start)
if vari = cs.Identifier(cursor) and body = cs.Block(cursor) then [vari, body]
if obj = cs.Object(cursor)
if body = cs.Block(cursor) then [LOC(2)(new yy.Value(obj)), body]
else if body = cs.Block(cursor) then [null, body]
# cs.Throw an exception object.
cs.Throw = (start) ->
if THROW(start) and cs.Expression(cursor) then new yy.Throw $2
# cs.Parenthetical expressions. Note that the **cs.Parenthetical** is a **cs.Value**,
# not an **cs.Expression**, so if you need to use an expression in a place
# where only values are accepted, wrapping it in parentheses will always do
# the trick.
cs.Parenthetical = (start) ->
if wrap('(')(start)
if body = cs.Body(start)
if wrap(')')(cursor) then new yy.Parens body
if INDENT(start) and cs.Body(cursor) and OUTDENT(cursor)
if wrap(')')(cursor) then new yy.Parens $3
# The condition portion of a while loop.
cs.WhileSource = (start) ->
if WHILE(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While test, guard: value
else new yy.While $2
else if UNTIL(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While $2, invert: true, guard: $4
else new yy.While $2, invert: true
# The while loop can either be normal, with a block of expressions to execute,
# or postfix, with a single expression. There is no do..while.
cs.While = (start) ->
if exp = recExpression(start) and cs.WhileSource(cursor)
return $2.addBody LOC(1) yy.Block.wrap([$1])
if exp then retturn exp
else if cs.WhileSource(start) and cs.Block(cursor) then $1.addBody $2
else if cs.Statement(start) and cs.WhileSource(cursor) then $2.addBody LOC(1) yy.Block.wrap([$1])
else if body = cs.Loop(start) then body
cs.Loop = (start) ->
if LOOP(start)
if body = cs.Block(cursor) then new yy.While(LOC(1) new yy.Literal 'true').addBody body
else if body = cs.Expression(cursor)
new yy.While(LOC(1) new yy.Literal 'true').addBody LOC(2) cs.Block.wrap [body]
# cs.Array, object, and range comprehensions, at the most generic level.
# Comprehensions can either be normal, with a block of expressions to execute,
# or postfix, with a single expression.
cs.For = (start) ->
if action = recExpression(start) and test = cs.ForBody(cursor) then new yy.For action, test
if action then return action
if action = cs.Statement(start) and test = cs.ForBody(cursor) then new yy.For action, test
else if test = cs.ForBody(start) and action = cs.Block(cursor) then new yy.For action, test
cs.ForBody = (start) ->
if range = FOR(start) and cs.Range(cursor) then source: LOC(2) new yy.Value(range)
else if start = cs.ForStart(start) and src = cs.ForSource(cursor)
src.own = start.own; src.name = start[0]; src.index = start[1];
src
cs.ForStart = (start) ->
if FOR(start)
if OWN(cursor)
if vari = cs.ForVariables(cursor) then vari.own = yes; vari
else if vari = cs.ForVariables(cursor) then vari
# An array of all accepted values for a variable inside the loop.
# cs.This enables support for pattern matchin
cs.ForValue = (start) ->
if id = cs.Identifier(start) then id
else if prop = cs.ThisProperty(start) then prop
else if cs.Array(start) then new yy.Value arr
else if obj = cs.Object(start) then new yy.Value obj
# An array or range comprehension has variables for the current element
# and (optional) reference to the current index. Or, *key, value*, in the case
# of object comprehensions.
cs.ForVariables = (start) ->
if v = cs.ForValue(start)
if wrap(',')(cursor) and v3 = cs.ForValue(cursor) then [v1, v3]
else [v]
# The source of a comprehension is an array or object with an optional guard
# clause. cs.If it's an array comprehension, you can also choose to step through
# in fixed-size increments.
cs.ForSource = (start) ->
if FORIN(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
if FOROF(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
cs.Switch = (start) ->
if SWITCH(start)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
else if test = cs.Expression(cursor)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) and cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
SWITCH = (start) -> switch_word(start) and not identifierLetter_()
switch_word = literal('switch')
cs.Whens = (start) ->
result =[]
while 1
if LEADING_WHEN(start)
if args = cs.SimpleArgs(cursor) and action = cs.Block(cursor) and may(cs.TERMINATOR)(cursor)
result.push([args, action])
else return result
# The most basic form of *if* is a condition and an action. The following
# if-related grammar are broken up along these lines in order to avoid
# ambiguity.
cs.IfBlock = (start) ->
if IF(start) and test = cs.Expression(cursor) and body = cs.Block(cursor)
new yy.If test, body, type: $1
if cs.IfBlock(start) and ELSE(cursor) and IF(cursor) and cs.Expression(cursor) and cs.Block(cursor)
$1.addElse new yy.If $4, $5, type: $3
# The full complement of *if* expressions, including postfix one-liner
# *if* and *unless*.
cs.If = (start) ->
if if_ = cs.IfBlock(start)
if ELSE(cursor) and elseBody = cs.Block(cursor) then if_.addElse elseBody
else if_
if cs.Statement(start) and POST_IF(cursor) and cs.Expression(cursor)
new yy.If $3, LOC(1)(cs.Block.wrap [$1]), type: $2, statement: true
# Arithmetic and logical operators, working on one or more operands.
# Here they are grouped by order of precedence. The actual precedence grammar
# are defined at the bottom of the page. It would be shorter if we could
# combine most of these grammar into a single generic *Operand OpSymbol Operand*
# -type rule, but in order to make the precedence binding possible, separate
# grammar are necessary.
cs.Operation = (start) ->
if m = memo('Expression')(start)
if _spaces('?')(cursor) then new yy.Existence $1
else if wrapadd(cursor) and cs.Expression(cursor) then return new yy.Op '+' , $1, $3
else if wrapsub(cursor) and cs.Expression(cursor) then return new yy.Op '-' , $1, $3
else if MATH(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if SHIFT(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if COMPARE(cursor) and cs.Expression then return new yy.Op $2, $1, $3
else if LOGIC(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if RELATION(cursor) and cs.Expression(cursor)
if $2.charAt(0) is '!' then return new yy.Op($2[1..], $1, $3).invert()
else return new yy.Op $2, $1, $3
else if simple = memo('SimpleAssignable')(start)
if COMPOUND_ASSIGN(cursor) and cs.Expression(start) then return new yy.Assign $1, $3, $2
else if COMPOUND_ASSIGN(cursor) and INDENT(cursor) and cs.Expression(cursor) and OUTDENT(cursor)
return new yy.Assign $1, $4, $2
else if COMPOUND_ASSIGN(cursor) and cs.TERMINATOR(cursor) and cs.Expression(cursor)
return new yy.Assign $1, $4, $2
else if EXTENDS(cursor) and cs.Expression(cursor) then new yy.Extends $1, $3
if op = UNARY(start) and exp = cs.Expression(cursor) then new yy.Op op , exp
else if wrap('-')(start) and exp = cs.Expression(cursor) then new yy.Op '-', exp, prec: 'UNARY'
else if wrap('+')(start) and exp = cs.Expression(cursor) then new yy.Op '+', exp, prec: 'UNARY'
else if wrap('++')(start) and cs.SimpleAssignable(cursor) then new yy.Op '++', $2
else if wrapdec(start) and cs.SimpleAssignable(cursor) then new yy.Op '--', $2
else if cs.SimpleAssignable(start) and wrap('--')(cursor) then new yy.Op '--', $1, null, true
else if cs.SimpleAssignable(start) and wrap('++')(cursor) then new yy.Op '++', $1, null, true
wrapinc = wrap('++'); wrapdec = wrap('--'); wrapadd = wrap('+'); wrapsub = wrap('-');
cs.newyyValue = (item) -> (start) ->
if x = item(start) then new yy.Value(x)
#fdasf
# skip whitespaces, line concatentation and any ';'
# should include line comment
cs.TERMINATOR = (start) ->
cursor = start
spaces_()
if text[cursor]==';'
cursor++; meetSemicolon = true
spaces_()
if lineComment_()
skipEmptyLineAndCommentLines_()
else if concatLine_() then return meetSemicolon
true
spaces_ = () ->
while c = text[cursor++]
if c!=' ' and c!='\t' then return true
return true
# skip whitespaces, line concatentation, but don't skip ';', line comment
# used in cases that should have left parts of a cs.Cup to be parsed.
# used after RETURN after cs.Return
spacesConcatLine_ = () ->
concated = false
while 1
c = text[cursor]
if c==' ' or c=='\t' then continue # c==';' or , different from TERMINATOR here with stopping at ';'
# in the middle of line, is it right to eat line comment?
if lineComment_() then return # fail and backtrack, maybe other choice which have indent/outdent
else if concatLine_() then return true # concatLine should bring the left part for the cs.Cup
else break
true
# line comment, which can begin at any position, is different from block comment
lineComment_ = () ->
if text[cursor] is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
skipToEndOfLine_()
# skip lines which is whitespaces completely or with line comment follow some whitespaces
# until reach a line which inludes effective code.
skipEmptyLineAndCommentLines_ = () ->
while 1
spaces_()
lineComment_()
if not newline_() then break
if text[cursor]=='\\'
new ParseError(cursor, 'should not begin a line with \\')
# else if text[cursor] ==';'
# new ParseError(cursor, 'should not begin a line with ;')
return true
# skip any character until end of line, include # ; \ etc. used by lineComment_
skipToEndOfLine_ = () ->
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return true
else cursor++
return true
# concatenate line by \, skip until first nonwhitespace in next line, check error cases.
concatLine_ = () ->
if text[cursor]=='\\'
if text[cursor+1]=='\r' and text[cursor+2]=='\n'
cursor += 3; lineno++; #column = 0
else if text[cursor+1]=='\n' then cursor += 2; lineno++; #column = 0
else new ParseError(cursor, "meet a line concatenation symbol ('\\') which is not at the end of line.")
skipHeadWhiteSpaces_()
c = text[cursor]
if c is '\\' or c is '#' or c is ';'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not begin with #{c}")
else if c is '\r' or c is '\n'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not be a empty line.")
cs.IDENTIFIER = (start) ->
if id = identifier(start) then new yy.Literal id
cs.NUMBER = (start)-> new yy.Literal $1
cs.STRING = (start) -> new yy.Literal $1
cs.JS = (start) -> new yy.Literal $1
cs.REGEX = (start) -> new yy.Literal $1
cs.DEBUGGER = (start) -> new yy.Literal $1
cs.UNDEFINED = (start) -> new yy.Undefined
cs.NULL = (start) -> new yy.Null
cs.BOOL = (start) -> new yy.Bool $1
# Precedence
# Operators at the top of this list have higher precedence than the ones lower down.
# Following these grammar is what makes `2 + 3 * 4` parse as 2 + (3 * 4) and not (2 + 3) * 4
operators = [
['left', '.', '?.', '::', '?::']
['left', 'CALL_START', 'CALL_END']
['nonassoc', '++', '--']
['left', '?']
['right', 'UNARY']
['left', 'MATH']
['left', '+', '-']
['left', 'SHIFT']
['left', 'RELATION']
['left', 'COMPARE']
['left', 'LOGIC']
['nonassoc', 'INDENT', 'OUTDENT']
['right', '=', ':', 'COMPOUND_ASSIGN', 'RETURN', 'THROW', 'EXTENDS']
['right', 'FORIN', 'FOROF', 'BY', 'WHEN']
['right', 'IF', 'ELSE', 'FOR', 'WHILE', 'UNTIL', 'LOOP', 'SUPER', 'CLASS']
['right', 'POST_IF']
]
#['__bind', '__extends', '__hasProp', '__indexOf', '__slice', 'break', 'by', 'case',
# 'catch', 'class', 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'else',
# 'enum', 'export', 'extends', 'false', 'finally', 'for', 'function', 'if', 'implements',
# 'import', 'in', 'instanceof', 'interface', 'let', 'loop', 'native', 'new', 'null', 'of',
# 'package', 'private', 'protected', 'public', 'return', 'static', 'super', 'switch',
# 'then', 'this', 'throw', 'true', 'try', 'typeof', 'undefined', 'unless', 'until',
# 'var', 'void', 'when', 'while', 'with', 'yield'] =
# for kw in []
# do (kw_word = literal(kw)) ->
# (start) -> kw_word(start) and not identifierLetter_() | 190609 | # ### begin original comment by jashkenas
# The CoffeeScript parser is generated by [Jison](http://github.com/zaach/jison)
# from this grammar file. Jison is a bottom-up parser generator, similar in
# style to [Bison](http://www.gnu.org/software/bison), implemented in JavaScript.
# It can recognize [LALR(1), LR(0), SLR(1), and LR(1)](http://en.wikipedia.org/wiki/LR_grammar)
# type grammars. To create the Jison parser, we list the pattern to match
# on the left-hand side, and the action to take (usually the creation of syntax
# tree nodes) on the right. As the parser runs, it
# shifts tokens from our token stream, from left to right, and
# [attempts to match](http://en.wikipedia.org/wiki/Bottom-up_parsing)
# the token sequence against the grammar below. When a match can be made, it
# reduces into the [nonterminal](http://en.wikipedia.org/wiki/Terminal_and_nonterminal_symbols)
# (the enclosing name at the top), and we proceed from there.
#
# If you run the `cake build:parser` command, Jison constructs a parse table
# from our grammar and saves it into `lib/parser.js`.
# ### end original comment by jashkenas
# The only dependency is on the **Jison.Parser**.
# {Parser} = require 'jison' # the only dependency is not needed any more.
# Jison DSL
# ---------
{identifier, keyword} = require './parserutil'
yy = require '../lib/coffee-script/nodes'
# some global variable used by the parser
text = '' # the text which is being parsed, this could be any sequence, not only strincs.
textLength = 0 # the length of text
cursor = 0 # the current position of parsing, use text[cursor] to get current character in parsed stream
tabWidth = 4 # the tab width, when meet a tab, column += tabWidth
# the current currentIndentWidth, initial value is 0
# when indenting, it increases and recorded and used as parameter of OUTDENT, to verify outdent
currentIndentWidth = 0
# minimal value for one indent
minIndentWidth = 2
# when blockMode is true, parse indent block, otherwise parse a single line block
blockMode = true
atBeginOfLine = true
parseCache = {} # {tag+start: [result, cursor]}, memorized parser result
symbolToTagMap = {} # {symbol: tag}, from rule symbol map to a shorter memo tag, for memory efficeny
tags = {} # {tag: true}, record tags that has been used to avoid conflict
symbolToParentsMap = {} # {symbol:[parent...]}, the map from symbol to their all of parents for left recursive symbols
baseRules = {} # {symbole: rule's function}
# parse @data from @start with the rule function @root
exports.parse = (data, root, options) ->
o = options or {}
if typeof start == 'object' then start = 0; options = start
text = data
textLength = text.length
cursor = o.start or 0
tabWidth = o.tabWidth or 4
parseCache = {}
baseRules = {}
symbolToParentsMap = {}
memoNames = ['Expression', 'Body', 'Cup', 'Block', 'Invocation', 'Value', 'Assignable',
'SimpleAssignable', 'For', 'If', 'Operation']
for symbol in memoNames then setMemoTag(symbol)
addLeftRecursiveParentChildrens(
Expression: ['Invocation', 'Value', 'Operation', 'Invocation', 'Assign', 'While', 'For'],
Value: ['Assignable'],
Assignable: ['SimpleAssignable'],
SimpleAssignable: ['Value', 'Invocation'],
Assign: ['Assignable'],
Invocation: ['Value', 'Invocation'],
For: ['Expression'],
If: ['Expression'],
Operation: ['Expression', 'SimpleAssignable']
)
setRecursiveRules(grammar)
setMemorizeRules(grammar, ['Body', 'Cup', 'Block', 'Statement'])
generateLinenoColumn()
grammar.Root(0)
lineColumnList = []
generateLinenoColumn = () ->
i = 0
lineno = column = 0
while i<textLength
c = text[i]
if c is '\r'
lineColumnList[i++] = [lineno, column]
if text[i] is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\t'
lineColumnList[i++] = [lineno, column]
column += tabWidth
else
lineColumnList[i++] = [lineno, column]
column++
lineColumnList[i] = [lineno+1, 0]
# some utilities used by the parser
# on succeed any matcher should not return a value which is not null or undefined, except the root symbol.
# set a shorter start part of symbol as the tag used in parseCache
setMemoTag = (symbol) ->
i = 1
while 1
if hasOwnProperty.call(tags, symbol.slice(0, i)) in tags then i++
else break
tag = symbol.slice(0, i)
symbolToTagMap[symbol] = tag
tags[tag] = true
# set the symbols in grammar which memorize their rule's result.
setMemorizeRules = (grammar, symbols) ->
for symbol in symbols
baseRules[symbol] = grammar[symbol]
grammar[symbol] = memorize(symbol)
# set all the symbols in grammar which are left recursive.
setRecursiveRules = (grammar) ->
map = symbolToParentsMap
for symbol of map
baseRules[symbol] = grammar[symbol]
grammar[symbol] = recursive(symbol)
# add direct left recursive parent->children relation for @parentChildrens to global variable symbolToParentsMap
addLeftRecursiveParentChildrens = (parentChildrens...) ->
map = symbolToParentsMap
for parentChildren in parentChildrens
for parent, children of parentChildren
for symbol in children
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
# add left recursive parent->children relation to @symbolToParentsMap for symbols in @recursiveCircles
addLeftRecursiveCircles = (recursiveCircles...) ->
map = symbolToParentsMap
for circle in recursiveCircles
i = 0
length = circle.length
while i<length
if i==length-1 then j = 0 else j = i+1
symbol = circle[i]; parent = circle[j]
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
i++
# make @symbol a left recursive symbol, which means to wrap baseRules[symbol] with recursive,
# when recursiv(symbol)(start) is executed, all of left recursive rules which @symbol depend on will be computed
# and memorized, until no change exists.
recursive = (symbol) ->
map = symbolToParentsMap
tag = symbolToTagMap[symbol]
agenda = [] # dynamic list included all left recursive symbols which depend on @symbol
addParent = (parent) ->
agenda.unshift(parent)
parents = map[parent]
if parents then for parent in parents
if parent not in agenda
agenda.unshift(parent)
addParent(parent)
addParent(symbol)
(start) ->
hash0 = tag+start
m = parseCache[hash0]
if m then cursor = m[1]; return m[0]
while agenda.length # exist any symbols which depend on the changed result?
symbol = agenda.pop()
hash = tag+start
m = parseCache[hash]
if not m then m = parseCache[hash] = [undefined, start]
rule = baseRules[symbol]
changed = false
while 1
if (result = rule(start)) and (result isnt m[0] or cursor isnt m[1])
parseCache[hash] = m = [result, cursor]
changed = true
else break
# if any new result exists, recompute the symbols which may be depend on the new result.
if changed then for parent in map[symbol]
if parent not in agenda then agenda.push parent
m = parseCache[hash0]
cursor = m[1]
m[0]
# memorize result and cursor for @symbol which is not left recursive.
# left recursive should be wrapped by recursive(symbol)!!!
exports.memorize = memorize = (symbol) ->
tag = symbolToTagMap[symbol]
rule = baseRules[symbol]
(start) ->
hash = tag+start
m = parseCache[hash]
if m then cursor = m[1]; m[0]
else
result = rule(start)
parseCache[hash] = [result, cursor]
result
# lookup the memorized result and reached cursor for @symbol at the position of @start
exports.memo = memo = (symbol) ->
tag = symbolToTagMap[symbol]
(start) ->
m = parseCache[tag+start]
if m then cursor = m[1]; m[0]
# compute exps in sequence, return the result of the last one.
# andp and orp are used to compose the matchers
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) and exps[2](cursor] ... and exps[exps.length-1](cursor)
andp = (exps...) -> (start) ->
cursor = start
for exp in exps
if not(result = exp(cursor)) then return result
return result
# compute exps in parallel, return the result of the first which is not evaluated to false.
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) or exps[2](cursor] ... or exps[exps.length-1](cursor)
orp = (exps...) -> (start) ->
for exp in exps
if result = exp(start) then return result
return result
# applicaton of not operation
# notp is not useful except to compose the matchers.
# It's not unnessary, low effecient and ugly to write "notp(exp)(start)",
# so don't write "notp(exp)(start)", instead "not exp(start)".
notp = (exp) -> (start) -> not exp(start)
# compute the global variable lineno and column Synchronously
# It's important and noteworthy that how to keep lineno and column having the correct value at any time.
# pay attention to that!
# comment: have record in advance, so the code on lineno is removed.
next = () ->
return text[cursor++]
# c = text[cursor++]
# if c is '\r'
# if text[cursor] is '\n' then c = text[cursor++]
# lineno++; column = 0
# else if c is '\n' then lineno++; column = 0
# return c
# change both cursor and column
step = (n=1) -> cursor += n; # column += n
# match one character
char = (c) -> (start) ->
cursor = start
setlinecolumen()
if text[cursor++]==c then cursor = start+1; return c
# match a literal string.
# There should not have newline character("\r\n") in literal string!
# Otherwise the lineno and column will be updated wrongly.
# now the relation is not existed any more because the lineno and column is computed in advance.
literal = (string) -> (start) ->
n = string.length
if text.slice(start, stop = start+n)==string
cursor = stop; true
# cursor = stop; column += n; true
# zero or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# the whitespaces width+1 is returned as result( +1 to avoid 0 as the result, which is a falsic value)!!!
# newline is not included!!!
spaces = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
# column += n
return n+1
# one or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# newline is not included!!!
spaces1 = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
if n
# column += n;
n
# first, match @left, then match @item, at last match @right
# left and right is set to spaces by default.
wrap = (item, left=spaces, right=spaces) -> (start) ->
if left(start) and result = item(cursor) and right(cursor)
return result
# is a letter used in identifer?
# follow word such as return, break, etc.
identifierLetter_ = () ->
c = text[cursor]
c is '$' or c is '_' or 'a'<=c<'z' or 'A'<=c<='Z' or '0'<=c<='9'
# Grammatical rules of Coffeescript
exports.grammar = grammar = cs = {}
# The **Root** is the top-level node in the syntax tree.
cs.Root = (start) ->
skipHeadSpaces_() # skip the spaces at the begin of program, including line comment.
if cursor is textLength then new yy.Block
else
body = cs.Body(cursor)
# and skipTailSpaces(cursor); body # is skipTailSpaces necessary? wait to see.
# skip the spaces at the begin of program, including line comment.
skipHeadSpaces_ = () ->
while c = text[cursor]
if c==' ' or c=='t' or c== '\r' or c== '\n' then cursor++; continue
else if c is '#' and not (text[cursor+1] isnt '#'or text[cursor+2] isnt '#') # should stop at BlockComment
while c = text[cursor++]
if c=='\r' or c=='n' then break
else if c is '\\'
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else if c is ';' # maybe should allow ; at begin of line?
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else break
if lineColumnList[cursor][1] isnt 0
throw new ParseError(cursor, "Effective Code Cup of the whole program should start at column 0, the begin of a line.")
# Any list of statements and expressions, separated by line breaks or semicolons.
cs.Body = (start) ->
cups = []
while 1
if cs.CupLeftEdge_() and (cup = cs.Cup(cursor)) and cs.CupRightEdge_()
cups.push cup
else break
# if cups do not include effective code, should have yy.literal('undefined') added.
yy.Block.wrap(cups)
cs.CupLeftEdge_ = () ->
if blockMode
if atBeginOfLine
atBeginOfLine = false
spaces_() and (lineColumnList[cursor][1]==currentIndentWidth)
else spaces_() and (concatenation_() or true) and spaces_()
else
spaces_() and (concatenation_() or true) and spaces_()
cs.CupRightEdge_ = () ->
spaces_() and (semicolon_() or (linecomment_() or newline_()) and atBeginOfLine= true)
# before INDENT, should have skip all of whitespaces, newline, linecomment, and give blockMode a determined value.
cs.INDENT = (start) ->
if not blockMode then true
else if column >= currentIndentWidth+minIndentWidth
currentIndentWidth = column
cs.OUTDENT = (indent) -> (start) ->
if blockMode then column is indent
else column is 0
# skip spaces until meet an end of line, then skip to the begin of next line
# line comment at the tail of the line is skipped too.
spacesUntilEndOfLine = (start) ->
cursor = start
while 1
c = text[cursor]
if c is '\t' or c is ' ' then cursor++; continue
else if c is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return
else cursor++
else break
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; true
else if c is '\n' then cursor++; true
# Block and statements, which make up a line in a body.
# The comment above is by jashken<NAME>. Should Block be Expression?
# Cup is the equivalence of the original grammar.coffee written by jashkenas
# I repace Line with Cup because Line is too easily misunderstood.
# Line in grammar.coffe can be a part of phisical line or a concatenation of multiple physical line,
# even a single compound statement of multiple physical line.
# Cup is a intact logic unit of code piece, same as Line in grammar.coffee.
# The Cup can be big or small, and a Cup can be put in the Body of another bigger Cup.
# So let's have a cup of coffee.
cs.Cup = (start) ->
cs.Statement(start)\
or cs.Expression(start)\
or cs.EmptyCup(start)
# A cs.Cup begin with semicolon, or has only spaces, line comment(which obey indent/unindent
cs.EmptyCup = (start) ->
c = text[start]
c is ';' or c is '\r' or c is '\n'
# Pure statements which cannot be expressions.
cs.Statement = (start) ->
cs.Return(start)\
or cs.BlockComment(start)\
or cs.Break(start)\
or cs.Continue(start)
# Break and Continue is my repacement to STATEMENT in original grammar.coffee
# A return statement from a function body.
cs.Return = (start) ->
if RETURN(start) and spacesConcatLine_()
if exp = cs.Expression(cursor) then new yy.Return exp
else new yy.Return
### ###
###
dfasfdfs
dsfdfsa
asdfdsfa
###
# A block comment
# BlockComment should obey the indent/outdent rule, but line comment don't need, right? -->yes.
# BlockComment will return yy.Comment and generate comment in object javascript code
cs.BlockComment = (start) ->
cursor = start
if text[cursor] is '#'and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
while 1
c = text[cursor]
if not c then return
if c is '#' and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
return new yy.Comment text.slice(start+3, cursor-3)
cs.Break = (start) -> if BREAK(start) then new yy.Literal('break')
cs.Continue = (start) -> if CONTINUE(start) then new yy.Literal('continue')
# All the different types of expressions in our language. The basic unit of
# CoffeeScript is the **cs.Expression** -- everything that can be an expression
# is one. Blocks serve as the building blocks of many other grammar, making
# them somewhat circular.
cs.Expression = (start) ->
recValue(start)\
or recOperation(start)\
or recInvocation(start)\
or recAssign(start)\
or recIf(start)\
or recWhile(start)\
or recFor(start)\
or cs.Switch(start)\
or cs.Throw(start)\
or cs.Class(start)\
or cs.Try(start)\
or cs.Code(start) #(param) -> ... or -> ..
recValue = memo('Value')
recOperation = memo('Operation')
recInvocation = memo('Invocation')
recAssign = memo('Assign')
recIf = memo('If')
recWhile = memo('While')
recFor = memo('For')
# An indented block of expressions. Note that the [Rewriter](rewriter.html)
# will convert some postfix forms into blocks for us, by adjusting the token stream.
# as described below, I should consider how to add block of single line which is consisted of mulitple line cup.
cs.Block = (start) ->
# two mode: single line block mode, indent block mode
if n = INDENT(start)
outdent = OUTDENT(n)
if outdent(cursor) then new yy.Block
else if body = cs.Body(cursor) and outdent(cursor) then body
# A literal identifier, a variable name or property.
cs.Identifier = (start) -> cs.IDENTIFIER(start)
# All of our immediate values. Generally these can be passed straight
# through and printed to JavaScript.
cs.Literal = (start) ->
cs.NUMBER(start)\
or cs.STRING(start)\
or cs.JS(start)\
or cs.REGEX(start)\
or cs.DEBUGGER(start)\
or cs.UNDEFINED(start)\
or cs.NULL(start)\
or cs.BOOL(start)
recAssignable = memo('Assignable')
# Assignment of a variable, property, or index to a value.
cs.Assign = (start) ->
if left = recAssignable(start) and assignOp_(cursor)
if exp = cs.Expression(cursor)\
# it's right to use TERMINATOR here? how about meeting a semicolon?
# spaces till newline, or line comment
or cs.TERMINATOR(cursor) and exp = cs.Expression(cursor)\
or n = INDENT(cursor) and exp = cs.Expression and (outdent = OUTDENT(n)) and outdent(cursor)
new yy.Assign left, exp
symbolOperator_ =(op) ->
# : + && || etc.
op = literal_(op)
() ->
spacesConcatLine_() and op() and spacesConcatLine_()
assignOp_ = symbolOperator_('=')
# Assignment when it happens within an object literal. The difference from
# the ordinary **cs.Assign** is that these allow numbers and strings as keys.
cs.AssignObj = (start) ->
if x = cs.BlockComment then return x
if left = cs.ObjAssignable(start)
if wrap(':')
if exp = cs.Expression(cursor)\
or INDENT(cursor) and exp = cs.Expression(cursor) and OUTDENT(cursor)
new yy.Assign LOC(1)(new yy.Value(left)), exp, 'object'
else
new yy.Value left
cs.ObjAssignable = (start) ->
cs.Identifier(start)\
or Number(start)\
or String(start)\
or cs.ThisProperty(start)
# The **cs.Code** node is the function literal. It's defined by an indented block
# of **cs.Block** preceded by a function arrow, with an optional parameter
# list.
cs.Code = (start) ->
if PARAM_START(start) and params = cs.ParamList(cursor) and PARAM_END(cursor) \
and funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code params, body, funcGlyph
else if funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code [], body, funcGlyph
# CoffeeScript has two different symbols for functions. `->` is for ordinary
# functions, and `=>` is for functions bound to the current value of *this*.
cs.FuncGlyph = (start) ->
if wrap('->')(start) then 'func'
else if wrap('=>') then 'boundfunc'
# An optional, trailing comma.
cs.OptComma = (start) ->
spaces(start)
if char(',') then spaces(cursor); [true]
[false]
# The list of parameters that a function accepts can be of any length.
cs.ParamList = (start) ->
if param = cs.Param(start)
result = [param]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and param = cs.Param(cursor) then result.push(param)
else if INDENT(cursor)
params = cs.ParamList(cursor)
for p in params then result.push(p)
OUTDENT(cursor)
else if meetComma[0] and param = cs.Param(cursor) then result.push(param)
else break
result
# A single parameter in a function definition can be ordinary, or a splat
# that hoovers up the remaining arguments.
cs.Param = (start) ->
v = cs.ParamVar(start)
if wrap('...')(cursor) then new yy.Param v, null, on
else if wrap('=')(cursor) and exp = cs.Expression(cursor) then new yy.Param v, exp
else new yy.Param v
# Function Parameters
cs.ParamVar = (start) ->
cs.Identifier(start)\
or cs.ThisProperty(start)\
or cs.Array(start)\
or cs.Object(start)
# A splat that occurs outside of a parameter list.
cs.Splat = (start) ->
if exp = cs.Expression(start) and wrap('...')(cursor)
new yy.Splat exp
# Variables and properties that can be assigned to.
cs.SimpleAssignable = (start) ->
if value = recValue(start) and accessor = cs.Accessor(cursor) then value.add accessor
else if caller = recInvocation(start) and accessor = cs.Accessor(cursor)
new yy.Value caller, [].concat accessor
else if thisProp = cs.ThisProperty(start) then thisProp
else if name = cs.Identifier(start) then new yy.Value name
# Everything that can be assigned to.
cs.Assignable = (start) ->
recSimpleAssignable(start)\
or cs.newyyValue(cs.Array)(start)\
or cs.newyyValue(cs.Object)(start)
# The types of things that can be treated as values -- assigned to, invoked
# as functions, indexed into, named as a class, etc.
cs.Value = (start) ->
recAssignable(start)\
or cs.newyyValue(cs.Literal)(start)\
or cs.newyyValue(cs.Parenthetical)(start)\
or cs.newyyValue(cs.Range)(start)\
or cs.This(start)
# The general group of accessors into an object, by property, by prototype
# or by array index or slice.
cs.Accessor = (start) ->
if wrap('.') and id = cs.Identifier(cursor) then new yy.Access id
else if wrap('?.') and id = cs.Identifier(cursor) then new yy.Access id, 'soak'
else if wrap('::') and id = cs.Identifier(cursor)
new[LOC(1)(new yy.Access new yy.Literal('prototype')), LOC(2)(new yy.Access id)]
else if wrap('?::') and id = cs.Identifier(cursor)
[LOC(1)(new yy.Access new yy.Literal('prototype'), 'soak'), LOC(2)(new yy.Access id)]
else if wrap('::') then new Access new cs.Literal 'prototype'
else if index = cs.Index(start) then index
# Indexing into an object or array using bracket notation.
cs.Index = (start) ->
if INDEX_START(start) and val = cs.IndexValue(cursor) and INDEX_END(cursor) then val
if INDEX_SOAK(cursor) and cs.Index(cursor) # id?[1]
yy.extend $2, soak : yes
cs.IndexValue = (start) ->
if value = cs.Expression(start) then new yy.Index value
else if slice = cs.Slice(start) then new yy.Slice slice
# In CoffeeScript, an object literal is simply a list of assignments.
cs.Object = (start) ->
if leftBrace = wrap('{')(start)
spaces(cursor)
if char('}') then new yy.Obj [], leftBrace.generated
else if assigns = cs.AssignList(cursor)\
and cs.OptComma(cursor) and wrap('}')(cursor)
new yy.Obj assigns, leftBrace.generated
# Assignment of properties within an object literal can be separated by
# comma, as in JavaScript, or simply by newline.
cs.AssignList = (start) ->
if assign = cs.AssignObj(start)
result = [assign]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and assign = cs.AssignObj(cursor) then result.push(assign)
else if INDENT(cursor)
assigns = cs.AssignList(cursor)
for x in assigns then result.push(x)
OUTDENT(cursor)
else if meetComma[0] and assign = cs.AssignObj(cursor) then result.push(param)
else break
result
# cs.Class definitions have optional bodies of prototype property assignments,
# and optional references to the superclass.
cs.Class = (start) ->
if CLASS(start)
if name = cs.SimpleAssignable(cursor)
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class name, sup, body
else new yy.Class name, sup
else if body = cs.Block(cursor) then new yy.Class name, null, body
else new yy.Class name
else
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class null, sup, body
else new yy.Class null, sup
else if body = cs.Block(cursor) then new yy.Class null, null, body
else new yy.Class
# Ordinary function invocation, or a chained series of calls.
cs.Invocation = (start) ->
# left recursive
if m1 = recValue(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
else if m2 = recInvocation(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
if not m1 and not m2
if SUPER(start)
new yy.Call 'super', [new yy.Splat new yy.Literal 'arguments']
else if SUPER(start) and cs.Arguments(cursor)
new yy.Call 'super', $2
# An optional existence check on a function.
cs.OptFuncExist = (start) ->
if emptyword(start) then no
if FUNC_EXIST(start) then yes
# The list of arguments to a function call.
cs.Arguments = (start) ->
if CALL_START(start)
if args = cs.ArgList(cursor) and cs.OptComma(cursor)
args
else result = []
if CALL_END(cursor) then result
# A reference to the *this* current object.
cs.This = (start) ->
if THIS(start) then new yy.Value new yy.Literal 'this'
if wrap('')(start) then new yy.Value new yy.Literal 'this'
# A reference to a property on *this*.
cs.ThisProperty = (start) ->
if wrap('')(start) and cs.Identifier(cursor)
new yy.Value LOC(1)(new yy.Literal('this')), [LOC(2)(new yy.Access($2))], 'this'
# The array literal.
cs.Array = (start) ->
if wrap('[')(start)
if cs.ArgList(cursor) and cs.OptComma(cursor)
result = new yy.Arr $2
else result = new yy.Arr []
if wrap(']')(cursor) then result
# Inclusive and exclusive range dots.
cs.RangeDots = (start) ->
if wrap('..')(start) then 'inclusive'
else if wrap('...')(start) then 'exclusive'
# The CoffeeScript range literal.
cs.Range = (start) ->
if wrap('[')(start) and cs.Expression(cursor) and cs.RangeDots(cursor) and cs.Expression(cursor) wrap(']')
new yy.Range $2, $4, $3
# cs.Array slice literals.
cs.Slice = (start) ->
# don't use recExpression here
if cs.Expression(start) and cs.RangeDots(cursor) and cs.Expression(cursor)
new yy.Range $1, $3, $2
if cs.Expression(start) and cs.RangeDots(cursor)
new yy.Range $1, null, $2
if cs.RangeDots(start) and cs.Expression(cursor) then new yy.Range null, $2, $1
if cs.RangeDots(start) then new yy.Range null, null, $1
# The **cs.ArgList** is both the list of objects passed into a function call,
# as well as the contents of an array literal
# (i.e. comma-separated expressions). Newlines work as well.
cs.ArgList = (start) ->
if cs.Arg(start) then [$1]
else if cs.ArgList(start) and wrap(',') and cs.Arg(cursor) then $1.concat $3
else if cs.ArgList(start) and cs.OptComma(cursor) and cs.TERMINATOR(cursor) and cs.Arg(cursor) then $1.concat $4
else if INDENT(start) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor) then $2
else if cs.ArgList(start) and cs.OptComma(cursor) and INDENT(cursor) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor)
$1.concat $4
# Valid arguments are Blocks or Splats.
cs.Arg = (start) -> cs.Expression(start) or cs.Splat(start)
# Just simple, comma-separated, required arguments (no fancy syntax). We need
# this to be separate from the **cs.ArgList** for use in **cs.Switch** blocks, where
# having the newlines wouldn't make sense.
cs.SimpleArgs = (start) ->
if exp = cs.Expression(start)
result = [exp]
while 1
if wrap(',')
if exp = cs.Expression(cursor) then result.push(exp)
else return
result
# The variants of *try/catch/finally* exception handling blocks.
cs.Try = (start) ->
test = TRY(start) and cs.Block(cursor)
if test
if cs.Catch(cursor) and catch_ = cs.Block(cursor)
if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, catch_[0], catch_[1], final
else new yy.Try test, catch_[0], catch_[1]
else if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, null, null, final
else new yy.Try test
# A catch clause names its error and runs a block of code.
cs.Catch = (start) ->
if CATCH(start)
if vari = cs.Identifier(cursor) and body = cs.Block(cursor) then [vari, body]
if obj = cs.Object(cursor)
if body = cs.Block(cursor) then [LOC(2)(new yy.Value(obj)), body]
else if body = cs.Block(cursor) then [null, body]
# cs.Throw an exception object.
cs.Throw = (start) ->
if THROW(start) and cs.Expression(cursor) then new yy.Throw $2
# cs.Parenthetical expressions. Note that the **cs.Parenthetical** is a **cs.Value**,
# not an **cs.Expression**, so if you need to use an expression in a place
# where only values are accepted, wrapping it in parentheses will always do
# the trick.
cs.Parenthetical = (start) ->
if wrap('(')(start)
if body = cs.Body(start)
if wrap(')')(cursor) then new yy.Parens body
if INDENT(start) and cs.Body(cursor) and OUTDENT(cursor)
if wrap(')')(cursor) then new yy.Parens $3
# The condition portion of a while loop.
cs.WhileSource = (start) ->
if WHILE(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While test, guard: value
else new yy.While $2
else if UNTIL(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While $2, invert: true, guard: $4
else new yy.While $2, invert: true
# The while loop can either be normal, with a block of expressions to execute,
# or postfix, with a single expression. There is no do..while.
cs.While = (start) ->
if exp = recExpression(start) and cs.WhileSource(cursor)
return $2.addBody LOC(1) yy.Block.wrap([$1])
if exp then retturn exp
else if cs.WhileSource(start) and cs.Block(cursor) then $1.addBody $2
else if cs.Statement(start) and cs.WhileSource(cursor) then $2.addBody LOC(1) yy.Block.wrap([$1])
else if body = cs.Loop(start) then body
cs.Loop = (start) ->
if LOOP(start)
if body = cs.Block(cursor) then new yy.While(LOC(1) new yy.Literal 'true').addBody body
else if body = cs.Expression(cursor)
new yy.While(LOC(1) new yy.Literal 'true').addBody LOC(2) cs.Block.wrap [body]
# cs.Array, object, and range comprehensions, at the most generic level.
# Comprehensions can either be normal, with a block of expressions to execute,
# or postfix, with a single expression.
cs.For = (start) ->
if action = recExpression(start) and test = cs.ForBody(cursor) then new yy.For action, test
if action then return action
if action = cs.Statement(start) and test = cs.ForBody(cursor) then new yy.For action, test
else if test = cs.ForBody(start) and action = cs.Block(cursor) then new yy.For action, test
cs.ForBody = (start) ->
if range = FOR(start) and cs.Range(cursor) then source: LOC(2) new yy.Value(range)
else if start = cs.ForStart(start) and src = cs.ForSource(cursor)
src.own = start.own; src.name = start[0]; src.index = start[1];
src
cs.ForStart = (start) ->
if FOR(start)
if OWN(cursor)
if vari = cs.ForVariables(cursor) then vari.own = yes; vari
else if vari = cs.ForVariables(cursor) then vari
# An array of all accepted values for a variable inside the loop.
# cs.This enables support for pattern matchin
cs.ForValue = (start) ->
if id = cs.Identifier(start) then id
else if prop = cs.ThisProperty(start) then prop
else if cs.Array(start) then new yy.Value arr
else if obj = cs.Object(start) then new yy.Value obj
# An array or range comprehension has variables for the current element
# and (optional) reference to the current index. Or, *key, value*, in the case
# of object comprehensions.
cs.ForVariables = (start) ->
if v = cs.ForValue(start)
if wrap(',')(cursor) and v3 = cs.ForValue(cursor) then [v1, v3]
else [v]
# The source of a comprehension is an array or object with an optional guard
# clause. cs.If it's an array comprehension, you can also choose to step through
# in fixed-size increments.
cs.ForSource = (start) ->
if FORIN(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
if FOROF(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
cs.Switch = (start) ->
if SWITCH(start)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
else if test = cs.Expression(cursor)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) and cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
SWITCH = (start) -> switch_word(start) and not identifierLetter_()
switch_word = literal('switch')
cs.Whens = (start) ->
result =[]
while 1
if LEADING_WHEN(start)
if args = cs.SimpleArgs(cursor) and action = cs.Block(cursor) and may(cs.TERMINATOR)(cursor)
result.push([args, action])
else return result
# The most basic form of *if* is a condition and an action. The following
# if-related grammar are broken up along these lines in order to avoid
# ambiguity.
cs.IfBlock = (start) ->
if IF(start) and test = cs.Expression(cursor) and body = cs.Block(cursor)
new yy.If test, body, type: $1
if cs.IfBlock(start) and ELSE(cursor) and IF(cursor) and cs.Expression(cursor) and cs.Block(cursor)
$1.addElse new yy.If $4, $5, type: $3
# The full complement of *if* expressions, including postfix one-liner
# *if* and *unless*.
cs.If = (start) ->
if if_ = cs.IfBlock(start)
if ELSE(cursor) and elseBody = cs.Block(cursor) then if_.addElse elseBody
else if_
if cs.Statement(start) and POST_IF(cursor) and cs.Expression(cursor)
new yy.If $3, LOC(1)(cs.Block.wrap [$1]), type: $2, statement: true
# Arithmetic and logical operators, working on one or more operands.
# Here they are grouped by order of precedence. The actual precedence grammar
# are defined at the bottom of the page. It would be shorter if we could
# combine most of these grammar into a single generic *Operand OpSymbol Operand*
# -type rule, but in order to make the precedence binding possible, separate
# grammar are necessary.
cs.Operation = (start) ->
if m = memo('Expression')(start)
if _spaces('?')(cursor) then new yy.Existence $1
else if wrapadd(cursor) and cs.Expression(cursor) then return new yy.Op '+' , $1, $3
else if wrapsub(cursor) and cs.Expression(cursor) then return new yy.Op '-' , $1, $3
else if MATH(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if SHIFT(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if COMPARE(cursor) and cs.Expression then return new yy.Op $2, $1, $3
else if LOGIC(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if RELATION(cursor) and cs.Expression(cursor)
if $2.charAt(0) is '!' then return new yy.Op($2[1..], $1, $3).invert()
else return new yy.Op $2, $1, $3
else if simple = memo('SimpleAssignable')(start)
if COMPOUND_ASSIGN(cursor) and cs.Expression(start) then return new yy.Assign $1, $3, $2
else if COMPOUND_ASSIGN(cursor) and INDENT(cursor) and cs.Expression(cursor) and OUTDENT(cursor)
return new yy.Assign $1, $4, $2
else if COMPOUND_ASSIGN(cursor) and cs.TERMINATOR(cursor) and cs.Expression(cursor)
return new yy.Assign $1, $4, $2
else if EXTENDS(cursor) and cs.Expression(cursor) then new yy.Extends $1, $3
if op = UNARY(start) and exp = cs.Expression(cursor) then new yy.Op op , exp
else if wrap('-')(start) and exp = cs.Expression(cursor) then new yy.Op '-', exp, prec: 'UNARY'
else if wrap('+')(start) and exp = cs.Expression(cursor) then new yy.Op '+', exp, prec: 'UNARY'
else if wrap('++')(start) and cs.SimpleAssignable(cursor) then new yy.Op '++', $2
else if wrapdec(start) and cs.SimpleAssignable(cursor) then new yy.Op '--', $2
else if cs.SimpleAssignable(start) and wrap('--')(cursor) then new yy.Op '--', $1, null, true
else if cs.SimpleAssignable(start) and wrap('++')(cursor) then new yy.Op '++', $1, null, true
wrapinc = wrap('++'); wrapdec = wrap('--'); wrapadd = wrap('+'); wrapsub = wrap('-');
cs.newyyValue = (item) -> (start) ->
if x = item(start) then new yy.Value(x)
#fdasf
# skip whitespaces, line concatentation and any ';'
# should include line comment
cs.TERMINATOR = (start) ->
cursor = start
spaces_()
if text[cursor]==';'
cursor++; meetSemicolon = true
spaces_()
if lineComment_()
skipEmptyLineAndCommentLines_()
else if concatLine_() then return meetSemicolon
true
spaces_ = () ->
while c = text[cursor++]
if c!=' ' and c!='\t' then return true
return true
# skip whitespaces, line concatentation, but don't skip ';', line comment
# used in cases that should have left parts of a cs.Cup to be parsed.
# used after RETURN after cs.Return
spacesConcatLine_ = () ->
concated = false
while 1
c = text[cursor]
if c==' ' or c=='\t' then continue # c==';' or , different from TERMINATOR here with stopping at ';'
# in the middle of line, is it right to eat line comment?
if lineComment_() then return # fail and backtrack, maybe other choice which have indent/outdent
else if concatLine_() then return true # concatLine should bring the left part for the cs.Cup
else break
true
# line comment, which can begin at any position, is different from block comment
lineComment_ = () ->
if text[cursor] is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
skipToEndOfLine_()
# skip lines which is whitespaces completely or with line comment follow some whitespaces
# until reach a line which inludes effective code.
skipEmptyLineAndCommentLines_ = () ->
while 1
spaces_()
lineComment_()
if not newline_() then break
if text[cursor]=='\\'
new ParseError(cursor, 'should not begin a line with \\')
# else if text[cursor] ==';'
# new ParseError(cursor, 'should not begin a line with ;')
return true
# skip any character until end of line, include # ; \ etc. used by lineComment_
skipToEndOfLine_ = () ->
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return true
else cursor++
return true
# concatenate line by \, skip until first nonwhitespace in next line, check error cases.
concatLine_ = () ->
if text[cursor]=='\\'
if text[cursor+1]=='\r' and text[cursor+2]=='\n'
cursor += 3; lineno++; #column = 0
else if text[cursor+1]=='\n' then cursor += 2; lineno++; #column = 0
else new ParseError(cursor, "meet a line concatenation symbol ('\\') which is not at the end of line.")
skipHeadWhiteSpaces_()
c = text[cursor]
if c is '\\' or c is '#' or c is ';'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not begin with #{c}")
else if c is '\r' or c is '\n'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not be a empty line.")
cs.IDENTIFIER = (start) ->
if id = identifier(start) then new yy.Literal id
cs.NUMBER = (start)-> new yy.Literal $1
cs.STRING = (start) -> new yy.Literal $1
cs.JS = (start) -> new yy.Literal $1
cs.REGEX = (start) -> new yy.Literal $1
cs.DEBUGGER = (start) -> new yy.Literal $1
cs.UNDEFINED = (start) -> new yy.Undefined
cs.NULL = (start) -> new yy.Null
cs.BOOL = (start) -> new yy.Bool $1
# Precedence
# Operators at the top of this list have higher precedence than the ones lower down.
# Following these grammar is what makes `2 + 3 * 4` parse as 2 + (3 * 4) and not (2 + 3) * 4
operators = [
['left', '.', '?.', '::', '?::']
['left', 'CALL_START', 'CALL_END']
['nonassoc', '++', '--']
['left', '?']
['right', 'UNARY']
['left', 'MATH']
['left', '+', '-']
['left', 'SHIFT']
['left', 'RELATION']
['left', 'COMPARE']
['left', 'LOGIC']
['nonassoc', 'INDENT', 'OUTDENT']
['right', '=', ':', 'COMPOUND_ASSIGN', 'RETURN', 'THROW', 'EXTENDS']
['right', 'FORIN', 'FOROF', 'BY', 'WHEN']
['right', 'IF', 'ELSE', 'FOR', 'WHILE', 'UNTIL', 'LOOP', 'SUPER', 'CLASS']
['right', 'POST_IF']
]
#['__bind', '__extends', '__hasProp', '__indexOf', '__slice', 'break', 'by', 'case',
# 'catch', 'class', 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'else',
# 'enum', 'export', 'extends', 'false', 'finally', 'for', 'function', 'if', 'implements',
# 'import', 'in', 'instanceof', 'interface', 'let', 'loop', 'native', 'new', 'null', 'of',
# 'package', 'private', 'protected', 'public', 'return', 'static', 'super', 'switch',
# 'then', 'this', 'throw', 'true', 'try', 'typeof', 'undefined', 'unless', 'until',
# 'var', 'void', 'when', 'while', 'with', 'yield'] =
# for kw in []
# do (kw_word = literal(kw)) ->
# (start) -> kw_word(start) and not identifierLetter_() | true | # ### begin original comment by jashkenas
# The CoffeeScript parser is generated by [Jison](http://github.com/zaach/jison)
# from this grammar file. Jison is a bottom-up parser generator, similar in
# style to [Bison](http://www.gnu.org/software/bison), implemented in JavaScript.
# It can recognize [LALR(1), LR(0), SLR(1), and LR(1)](http://en.wikipedia.org/wiki/LR_grammar)
# type grammars. To create the Jison parser, we list the pattern to match
# on the left-hand side, and the action to take (usually the creation of syntax
# tree nodes) on the right. As the parser runs, it
# shifts tokens from our token stream, from left to right, and
# [attempts to match](http://en.wikipedia.org/wiki/Bottom-up_parsing)
# the token sequence against the grammar below. When a match can be made, it
# reduces into the [nonterminal](http://en.wikipedia.org/wiki/Terminal_and_nonterminal_symbols)
# (the enclosing name at the top), and we proceed from there.
#
# If you run the `cake build:parser` command, Jison constructs a parse table
# from our grammar and saves it into `lib/parser.js`.
# ### end original comment by jashkenas
# The only dependency is on the **Jison.Parser**.
# {Parser} = require 'jison' # the only dependency is not needed any more.
# Jison DSL
# ---------
{identifier, keyword} = require './parserutil'
yy = require '../lib/coffee-script/nodes'
# some global variable used by the parser
text = '' # the text which is being parsed, this could be any sequence, not only strincs.
textLength = 0 # the length of text
cursor = 0 # the current position of parsing, use text[cursor] to get current character in parsed stream
tabWidth = 4 # the tab width, when meet a tab, column += tabWidth
# the current currentIndentWidth, initial value is 0
# when indenting, it increases and recorded and used as parameter of OUTDENT, to verify outdent
currentIndentWidth = 0
# minimal value for one indent
minIndentWidth = 2
# when blockMode is true, parse indent block, otherwise parse a single line block
blockMode = true
atBeginOfLine = true
parseCache = {} # {tag+start: [result, cursor]}, memorized parser result
symbolToTagMap = {} # {symbol: tag}, from rule symbol map to a shorter memo tag, for memory efficeny
tags = {} # {tag: true}, record tags that has been used to avoid conflict
symbolToParentsMap = {} # {symbol:[parent...]}, the map from symbol to their all of parents for left recursive symbols
baseRules = {} # {symbole: rule's function}
# parse @data from @start with the rule function @root
exports.parse = (data, root, options) ->
o = options or {}
if typeof start == 'object' then start = 0; options = start
text = data
textLength = text.length
cursor = o.start or 0
tabWidth = o.tabWidth or 4
parseCache = {}
baseRules = {}
symbolToParentsMap = {}
memoNames = ['Expression', 'Body', 'Cup', 'Block', 'Invocation', 'Value', 'Assignable',
'SimpleAssignable', 'For', 'If', 'Operation']
for symbol in memoNames then setMemoTag(symbol)
addLeftRecursiveParentChildrens(
Expression: ['Invocation', 'Value', 'Operation', 'Invocation', 'Assign', 'While', 'For'],
Value: ['Assignable'],
Assignable: ['SimpleAssignable'],
SimpleAssignable: ['Value', 'Invocation'],
Assign: ['Assignable'],
Invocation: ['Value', 'Invocation'],
For: ['Expression'],
If: ['Expression'],
Operation: ['Expression', 'SimpleAssignable']
)
setRecursiveRules(grammar)
setMemorizeRules(grammar, ['Body', 'Cup', 'Block', 'Statement'])
generateLinenoColumn()
grammar.Root(0)
lineColumnList = []
generateLinenoColumn = () ->
i = 0
lineno = column = 0
while i<textLength
c = text[i]
if c is '\r'
lineColumnList[i++] = [lineno, column]
if text[i] is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\n'
lineColumnList[i++] = [lineno, column]
lineno++; column = 0
else if c is '\t'
lineColumnList[i++] = [lineno, column]
column += tabWidth
else
lineColumnList[i++] = [lineno, column]
column++
lineColumnList[i] = [lineno+1, 0]
# some utilities used by the parser
# on succeed any matcher should not return a value which is not null or undefined, except the root symbol.
# set a shorter start part of symbol as the tag used in parseCache
setMemoTag = (symbol) ->
i = 1
while 1
if hasOwnProperty.call(tags, symbol.slice(0, i)) in tags then i++
else break
tag = symbol.slice(0, i)
symbolToTagMap[symbol] = tag
tags[tag] = true
# set the symbols in grammar which memorize their rule's result.
setMemorizeRules = (grammar, symbols) ->
for symbol in symbols
baseRules[symbol] = grammar[symbol]
grammar[symbol] = memorize(symbol)
# set all the symbols in grammar which are left recursive.
setRecursiveRules = (grammar) ->
map = symbolToParentsMap
for symbol of map
baseRules[symbol] = grammar[symbol]
grammar[symbol] = recursive(symbol)
# add direct left recursive parent->children relation for @parentChildrens to global variable symbolToParentsMap
addLeftRecursiveParentChildrens = (parentChildrens...) ->
map = symbolToParentsMap
for parentChildren in parentChildrens
for parent, children of parentChildren
for symbol in children
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
# add left recursive parent->children relation to @symbolToParentsMap for symbols in @recursiveCircles
addLeftRecursiveCircles = (recursiveCircles...) ->
map = symbolToParentsMap
for circle in recursiveCircles
i = 0
length = circle.length
while i<length
if i==length-1 then j = 0 else j = i+1
symbol = circle[i]; parent = circle[j]
list = map[symbol] ?= []
if parent isnt symbol and parent not in list then list.push parent
i++
# make @symbol a left recursive symbol, which means to wrap baseRules[symbol] with recursive,
# when recursiv(symbol)(start) is executed, all of left recursive rules which @symbol depend on will be computed
# and memorized, until no change exists.
recursive = (symbol) ->
map = symbolToParentsMap
tag = symbolToTagMap[symbol]
agenda = [] # dynamic list included all left recursive symbols which depend on @symbol
addParent = (parent) ->
agenda.unshift(parent)
parents = map[parent]
if parents then for parent in parents
if parent not in agenda
agenda.unshift(parent)
addParent(parent)
addParent(symbol)
(start) ->
hash0 = tag+start
m = parseCache[hash0]
if m then cursor = m[1]; return m[0]
while agenda.length # exist any symbols which depend on the changed result?
symbol = agenda.pop()
hash = tag+start
m = parseCache[hash]
if not m then m = parseCache[hash] = [undefined, start]
rule = baseRules[symbol]
changed = false
while 1
if (result = rule(start)) and (result isnt m[0] or cursor isnt m[1])
parseCache[hash] = m = [result, cursor]
changed = true
else break
# if any new result exists, recompute the symbols which may be depend on the new result.
if changed then for parent in map[symbol]
if parent not in agenda then agenda.push parent
m = parseCache[hash0]
cursor = m[1]
m[0]
# memorize result and cursor for @symbol which is not left recursive.
# left recursive should be wrapped by recursive(symbol)!!!
exports.memorize = memorize = (symbol) ->
tag = symbolToTagMap[symbol]
rule = baseRules[symbol]
(start) ->
hash = tag+start
m = parseCache[hash]
if m then cursor = m[1]; m[0]
else
result = rule(start)
parseCache[hash] = [result, cursor]
result
# lookup the memorized result and reached cursor for @symbol at the position of @start
exports.memo = memo = (symbol) ->
tag = symbolToTagMap[symbol]
(start) ->
m = parseCache[tag+start]
if m then cursor = m[1]; m[0]
# compute exps in sequence, return the result of the last one.
# andp and orp are used to compose the matchers
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) and exps[2](cursor] ... and exps[exps.length-1](cursor)
andp = (exps...) -> (start) ->
cursor = start
for exp in exps
if not(result = exp(cursor)) then return result
return result
# compute exps in parallel, return the result of the first which is not evaluated to false.
# the effect is the same as by using the Short-circuit evaluation, like below:
# exps[0](start) or exps[2](cursor] ... or exps[exps.length-1](cursor)
orp = (exps...) -> (start) ->
for exp in exps
if result = exp(start) then return result
return result
# applicaton of not operation
# notp is not useful except to compose the matchers.
# It's not unnessary, low effecient and ugly to write "notp(exp)(start)",
# so don't write "notp(exp)(start)", instead "not exp(start)".
notp = (exp) -> (start) -> not exp(start)
# compute the global variable lineno and column Synchronously
# It's important and noteworthy that how to keep lineno and column having the correct value at any time.
# pay attention to that!
# comment: have record in advance, so the code on lineno is removed.
next = () ->
return text[cursor++]
# c = text[cursor++]
# if c is '\r'
# if text[cursor] is '\n' then c = text[cursor++]
# lineno++; column = 0
# else if c is '\n' then lineno++; column = 0
# return c
# change both cursor and column
step = (n=1) -> cursor += n; # column += n
# match one character
char = (c) -> (start) ->
cursor = start
setlinecolumen()
if text[cursor++]==c then cursor = start+1; return c
# match a literal string.
# There should not have newline character("\r\n") in literal string!
# Otherwise the lineno and column will be updated wrongly.
# now the relation is not existed any more because the lineno and column is computed in advance.
literal = (string) -> (start) ->
n = string.length
if text.slice(start, stop = start+n)==string
cursor = stop; true
# cursor = stop; column += n; true
# zero or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# the whitespaces width+1 is returned as result( +1 to avoid 0 as the result, which is a falsic value)!!!
# newline is not included!!!
spaces = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
# column += n
return n+1
# one or more whitespaces, ie. space or tab.
# tab '\t' is counted as tabWidth spaces, and the columen is updated in this manner.
# newline is not included!!!
spaces1 = (start) ->
n = 0
cursor = start
while 1
c = text[cursor]
cursor++
if c==' ' then n++
else if c=='\t' then n += tabWidth
else break
if n
# column += n;
n
# first, match @left, then match @item, at last match @right
# left and right is set to spaces by default.
wrap = (item, left=spaces, right=spaces) -> (start) ->
if left(start) and result = item(cursor) and right(cursor)
return result
# is a letter used in identifer?
# follow word such as return, break, etc.
identifierLetter_ = () ->
c = text[cursor]
c is '$' or c is '_' or 'a'<=c<'z' or 'A'<=c<='Z' or '0'<=c<='9'
# Grammatical rules of Coffeescript
exports.grammar = grammar = cs = {}
# The **Root** is the top-level node in the syntax tree.
cs.Root = (start) ->
skipHeadSpaces_() # skip the spaces at the begin of program, including line comment.
if cursor is textLength then new yy.Block
else
body = cs.Body(cursor)
# and skipTailSpaces(cursor); body # is skipTailSpaces necessary? wait to see.
# skip the spaces at the begin of program, including line comment.
skipHeadSpaces_ = () ->
while c = text[cursor]
if c==' ' or c=='t' or c== '\r' or c== '\n' then cursor++; continue
else if c is '#' and not (text[cursor+1] isnt '#'or text[cursor+2] isnt '#') # should stop at BlockComment
while c = text[cursor++]
if c=='\r' or c=='n' then break
else if c is '\\'
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else if c is ';' # maybe should allow ; at begin of line?
throw new ParseError(cursor, "should not have ';' or '\' at the begin of program.")
else break
if lineColumnList[cursor][1] isnt 0
throw new ParseError(cursor, "Effective Code Cup of the whole program should start at column 0, the begin of a line.")
# Any list of statements and expressions, separated by line breaks or semicolons.
cs.Body = (start) ->
cups = []
while 1
if cs.CupLeftEdge_() and (cup = cs.Cup(cursor)) and cs.CupRightEdge_()
cups.push cup
else break
# if cups do not include effective code, should have yy.literal('undefined') added.
yy.Block.wrap(cups)
cs.CupLeftEdge_ = () ->
if blockMode
if atBeginOfLine
atBeginOfLine = false
spaces_() and (lineColumnList[cursor][1]==currentIndentWidth)
else spaces_() and (concatenation_() or true) and spaces_()
else
spaces_() and (concatenation_() or true) and spaces_()
cs.CupRightEdge_ = () ->
spaces_() and (semicolon_() or (linecomment_() or newline_()) and atBeginOfLine= true)
# before INDENT, should have skip all of whitespaces, newline, linecomment, and give blockMode a determined value.
cs.INDENT = (start) ->
if not blockMode then true
else if column >= currentIndentWidth+minIndentWidth
currentIndentWidth = column
cs.OUTDENT = (indent) -> (start) ->
if blockMode then column is indent
else column is 0
# skip spaces until meet an end of line, then skip to the begin of next line
# line comment at the tail of the line is skipped too.
spacesUntilEndOfLine = (start) ->
cursor = start
while 1
c = text[cursor]
if c is '\t' or c is ' ' then cursor++; continue
else if c is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return
else cursor++
else break
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; true
else if c is '\n' then cursor++; true
# Block and statements, which make up a line in a body.
# The comment above is by jashkenPI:NAME:<NAME>END_PI. Should Block be Expression?
# Cup is the equivalence of the original grammar.coffee written by jashkenas
# I repace Line with Cup because Line is too easily misunderstood.
# Line in grammar.coffe can be a part of phisical line or a concatenation of multiple physical line,
# even a single compound statement of multiple physical line.
# Cup is a intact logic unit of code piece, same as Line in grammar.coffee.
# The Cup can be big or small, and a Cup can be put in the Body of another bigger Cup.
# So let's have a cup of coffee.
cs.Cup = (start) ->
cs.Statement(start)\
or cs.Expression(start)\
or cs.EmptyCup(start)
# A cs.Cup begin with semicolon, or has only spaces, line comment(which obey indent/unindent
cs.EmptyCup = (start) ->
c = text[start]
c is ';' or c is '\r' or c is '\n'
# Pure statements which cannot be expressions.
cs.Statement = (start) ->
cs.Return(start)\
or cs.BlockComment(start)\
or cs.Break(start)\
or cs.Continue(start)
# Break and Continue is my repacement to STATEMENT in original grammar.coffee
# A return statement from a function body.
cs.Return = (start) ->
if RETURN(start) and spacesConcatLine_()
if exp = cs.Expression(cursor) then new yy.Return exp
else new yy.Return
### ###
###
dfasfdfs
dsfdfsa
asdfdsfa
###
# A block comment
# BlockComment should obey the indent/outdent rule, but line comment don't need, right? -->yes.
# BlockComment will return yy.Comment and generate comment in object javascript code
cs.BlockComment = (start) ->
cursor = start
if text[cursor] is '#'and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
while 1
c = text[cursor]
if not c then return
if c is '#' and text[cursor+1] is '#' and text[cursor+2] is '#'
cursor += 3
return new yy.Comment text.slice(start+3, cursor-3)
cs.Break = (start) -> if BREAK(start) then new yy.Literal('break')
cs.Continue = (start) -> if CONTINUE(start) then new yy.Literal('continue')
# All the different types of expressions in our language. The basic unit of
# CoffeeScript is the **cs.Expression** -- everything that can be an expression
# is one. Blocks serve as the building blocks of many other grammar, making
# them somewhat circular.
cs.Expression = (start) ->
recValue(start)\
or recOperation(start)\
or recInvocation(start)\
or recAssign(start)\
or recIf(start)\
or recWhile(start)\
or recFor(start)\
or cs.Switch(start)\
or cs.Throw(start)\
or cs.Class(start)\
or cs.Try(start)\
or cs.Code(start) #(param) -> ... or -> ..
recValue = memo('Value')
recOperation = memo('Operation')
recInvocation = memo('Invocation')
recAssign = memo('Assign')
recIf = memo('If')
recWhile = memo('While')
recFor = memo('For')
# An indented block of expressions. Note that the [Rewriter](rewriter.html)
# will convert some postfix forms into blocks for us, by adjusting the token stream.
# as described below, I should consider how to add block of single line which is consisted of mulitple line cup.
cs.Block = (start) ->
# two mode: single line block mode, indent block mode
if n = INDENT(start)
outdent = OUTDENT(n)
if outdent(cursor) then new yy.Block
else if body = cs.Body(cursor) and outdent(cursor) then body
# A literal identifier, a variable name or property.
cs.Identifier = (start) -> cs.IDENTIFIER(start)
# All of our immediate values. Generally these can be passed straight
# through and printed to JavaScript.
cs.Literal = (start) ->
cs.NUMBER(start)\
or cs.STRING(start)\
or cs.JS(start)\
or cs.REGEX(start)\
or cs.DEBUGGER(start)\
or cs.UNDEFINED(start)\
or cs.NULL(start)\
or cs.BOOL(start)
recAssignable = memo('Assignable')
# Assignment of a variable, property, or index to a value.
cs.Assign = (start) ->
if left = recAssignable(start) and assignOp_(cursor)
if exp = cs.Expression(cursor)\
# it's right to use TERMINATOR here? how about meeting a semicolon?
# spaces till newline, or line comment
or cs.TERMINATOR(cursor) and exp = cs.Expression(cursor)\
or n = INDENT(cursor) and exp = cs.Expression and (outdent = OUTDENT(n)) and outdent(cursor)
new yy.Assign left, exp
symbolOperator_ =(op) ->
# : + && || etc.
op = literal_(op)
() ->
spacesConcatLine_() and op() and spacesConcatLine_()
assignOp_ = symbolOperator_('=')
# Assignment when it happens within an object literal. The difference from
# the ordinary **cs.Assign** is that these allow numbers and strings as keys.
cs.AssignObj = (start) ->
if x = cs.BlockComment then return x
if left = cs.ObjAssignable(start)
if wrap(':')
if exp = cs.Expression(cursor)\
or INDENT(cursor) and exp = cs.Expression(cursor) and OUTDENT(cursor)
new yy.Assign LOC(1)(new yy.Value(left)), exp, 'object'
else
new yy.Value left
cs.ObjAssignable = (start) ->
cs.Identifier(start)\
or Number(start)\
or String(start)\
or cs.ThisProperty(start)
# The **cs.Code** node is the function literal. It's defined by an indented block
# of **cs.Block** preceded by a function arrow, with an optional parameter
# list.
cs.Code = (start) ->
if PARAM_START(start) and params = cs.ParamList(cursor) and PARAM_END(cursor) \
and funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code params, body, funcGlyph
else if funcGlyph = cs.FuncGlyph(cursor) and body = cs.Block(cursor)
new yy.Code [], body, funcGlyph
# CoffeeScript has two different symbols for functions. `->` is for ordinary
# functions, and `=>` is for functions bound to the current value of *this*.
cs.FuncGlyph = (start) ->
if wrap('->')(start) then 'func'
else if wrap('=>') then 'boundfunc'
# An optional, trailing comma.
cs.OptComma = (start) ->
spaces(start)
if char(',') then spaces(cursor); [true]
[false]
# The list of parameters that a function accepts can be of any length.
cs.ParamList = (start) ->
if param = cs.Param(start)
result = [param]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and param = cs.Param(cursor) then result.push(param)
else if INDENT(cursor)
params = cs.ParamList(cursor)
for p in params then result.push(p)
OUTDENT(cursor)
else if meetComma[0] and param = cs.Param(cursor) then result.push(param)
else break
result
# A single parameter in a function definition can be ordinary, or a splat
# that hoovers up the remaining arguments.
cs.Param = (start) ->
v = cs.ParamVar(start)
if wrap('...')(cursor) then new yy.Param v, null, on
else if wrap('=')(cursor) and exp = cs.Expression(cursor) then new yy.Param v, exp
else new yy.Param v
# Function Parameters
cs.ParamVar = (start) ->
cs.Identifier(start)\
or cs.ThisProperty(start)\
or cs.Array(start)\
or cs.Object(start)
# A splat that occurs outside of a parameter list.
cs.Splat = (start) ->
if exp = cs.Expression(start) and wrap('...')(cursor)
new yy.Splat exp
# Variables and properties that can be assigned to.
cs.SimpleAssignable = (start) ->
if value = recValue(start) and accessor = cs.Accessor(cursor) then value.add accessor
else if caller = recInvocation(start) and accessor = cs.Accessor(cursor)
new yy.Value caller, [].concat accessor
else if thisProp = cs.ThisProperty(start) then thisProp
else if name = cs.Identifier(start) then new yy.Value name
# Everything that can be assigned to.
cs.Assignable = (start) ->
recSimpleAssignable(start)\
or cs.newyyValue(cs.Array)(start)\
or cs.newyyValue(cs.Object)(start)
# The types of things that can be treated as values -- assigned to, invoked
# as functions, indexed into, named as a class, etc.
cs.Value = (start) ->
recAssignable(start)\
or cs.newyyValue(cs.Literal)(start)\
or cs.newyyValue(cs.Parenthetical)(start)\
or cs.newyyValue(cs.Range)(start)\
or cs.This(start)
# The general group of accessors into an object, by property, by prototype
# or by array index or slice.
cs.Accessor = (start) ->
if wrap('.') and id = cs.Identifier(cursor) then new yy.Access id
else if wrap('?.') and id = cs.Identifier(cursor) then new yy.Access id, 'soak'
else if wrap('::') and id = cs.Identifier(cursor)
new[LOC(1)(new yy.Access new yy.Literal('prototype')), LOC(2)(new yy.Access id)]
else if wrap('?::') and id = cs.Identifier(cursor)
[LOC(1)(new yy.Access new yy.Literal('prototype'), 'soak'), LOC(2)(new yy.Access id)]
else if wrap('::') then new Access new cs.Literal 'prototype'
else if index = cs.Index(start) then index
# Indexing into an object or array using bracket notation.
cs.Index = (start) ->
if INDEX_START(start) and val = cs.IndexValue(cursor) and INDEX_END(cursor) then val
if INDEX_SOAK(cursor) and cs.Index(cursor) # id?[1]
yy.extend $2, soak : yes
cs.IndexValue = (start) ->
if value = cs.Expression(start) then new yy.Index value
else if slice = cs.Slice(start) then new yy.Slice slice
# In CoffeeScript, an object literal is simply a list of assignments.
cs.Object = (start) ->
if leftBrace = wrap('{')(start)
spaces(cursor)
if char('}') then new yy.Obj [], leftBrace.generated
else if assigns = cs.AssignList(cursor)\
and cs.OptComma(cursor) and wrap('}')(cursor)
new yy.Obj assigns, leftBrace.generated
# Assignment of properties within an object literal can be separated by
# comma, as in JavaScript, or simply by newline.
cs.AssignList = (start) ->
if assign = cs.AssignObj(start)
result = [assign]
while 1
meetComma = cs.OptComma(cursor)
if cs.TERMINATOR(cursor) and assign = cs.AssignObj(cursor) then result.push(assign)
else if INDENT(cursor)
assigns = cs.AssignList(cursor)
for x in assigns then result.push(x)
OUTDENT(cursor)
else if meetComma[0] and assign = cs.AssignObj(cursor) then result.push(param)
else break
result
# cs.Class definitions have optional bodies of prototype property assignments,
# and optional references to the superclass.
cs.Class = (start) ->
if CLASS(start)
if name = cs.SimpleAssignable(cursor)
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class name, sup, body
else new yy.Class name, sup
else if body = cs.Block(cursor) then new yy.Class name, null, body
else new yy.Class name
else
if EXTENDS(cursor) and sup = cs.Expression(cursor)
if body = cs.Block(cursor) then new yy.Class null, sup, body
else new yy.Class null, sup
else if body = cs.Block(cursor) then new yy.Class null, null, body
else new yy.Class
# Ordinary function invocation, or a chained series of calls.
cs.Invocation = (start) ->
# left recursive
if m1 = recValue(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
else if m2 = recInvocation(start) and cs.OptFuncExist(cursor) and cs.Arguments(cursor)
new yy.Call $1, $3, $2
if not m1 and not m2
if SUPER(start)
new yy.Call 'super', [new yy.Splat new yy.Literal 'arguments']
else if SUPER(start) and cs.Arguments(cursor)
new yy.Call 'super', $2
# An optional existence check on a function.
cs.OptFuncExist = (start) ->
if emptyword(start) then no
if FUNC_EXIST(start) then yes
# The list of arguments to a function call.
cs.Arguments = (start) ->
if CALL_START(start)
if args = cs.ArgList(cursor) and cs.OptComma(cursor)
args
else result = []
if CALL_END(cursor) then result
# A reference to the *this* current object.
cs.This = (start) ->
if THIS(start) then new yy.Value new yy.Literal 'this'
if wrap('')(start) then new yy.Value new yy.Literal 'this'
# A reference to a property on *this*.
cs.ThisProperty = (start) ->
if wrap('')(start) and cs.Identifier(cursor)
new yy.Value LOC(1)(new yy.Literal('this')), [LOC(2)(new yy.Access($2))], 'this'
# The array literal.
cs.Array = (start) ->
if wrap('[')(start)
if cs.ArgList(cursor) and cs.OptComma(cursor)
result = new yy.Arr $2
else result = new yy.Arr []
if wrap(']')(cursor) then result
# Inclusive and exclusive range dots.
cs.RangeDots = (start) ->
if wrap('..')(start) then 'inclusive'
else if wrap('...')(start) then 'exclusive'
# The CoffeeScript range literal.
cs.Range = (start) ->
if wrap('[')(start) and cs.Expression(cursor) and cs.RangeDots(cursor) and cs.Expression(cursor) wrap(']')
new yy.Range $2, $4, $3
# cs.Array slice literals.
cs.Slice = (start) ->
# don't use recExpression here
if cs.Expression(start) and cs.RangeDots(cursor) and cs.Expression(cursor)
new yy.Range $1, $3, $2
if cs.Expression(start) and cs.RangeDots(cursor)
new yy.Range $1, null, $2
if cs.RangeDots(start) and cs.Expression(cursor) then new yy.Range null, $2, $1
if cs.RangeDots(start) then new yy.Range null, null, $1
# The **cs.ArgList** is both the list of objects passed into a function call,
# as well as the contents of an array literal
# (i.e. comma-separated expressions). Newlines work as well.
cs.ArgList = (start) ->
if cs.Arg(start) then [$1]
else if cs.ArgList(start) and wrap(',') and cs.Arg(cursor) then $1.concat $3
else if cs.ArgList(start) and cs.OptComma(cursor) and cs.TERMINATOR(cursor) and cs.Arg(cursor) then $1.concat $4
else if INDENT(start) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor) then $2
else if cs.ArgList(start) and cs.OptComma(cursor) and INDENT(cursor) and cs.ArgList(cursor) and cs.OptComma(cursor) and OUTDENT(cursor)
$1.concat $4
# Valid arguments are Blocks or Splats.
cs.Arg = (start) -> cs.Expression(start) or cs.Splat(start)
# Just simple, comma-separated, required arguments (no fancy syntax). We need
# this to be separate from the **cs.ArgList** for use in **cs.Switch** blocks, where
# having the newlines wouldn't make sense.
cs.SimpleArgs = (start) ->
if exp = cs.Expression(start)
result = [exp]
while 1
if wrap(',')
if exp = cs.Expression(cursor) then result.push(exp)
else return
result
# The variants of *try/catch/finally* exception handling blocks.
cs.Try = (start) ->
test = TRY(start) and cs.Block(cursor)
if test
if cs.Catch(cursor) and catch_ = cs.Block(cursor)
if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, catch_[0], catch_[1], final
else new yy.Try test, catch_[0], catch_[1]
else if FINALLY(cursor) and final = cs.Block(cursor)
new yy.Try test, null, null, final
else new yy.Try test
# A catch clause names its error and runs a block of code.
cs.Catch = (start) ->
if CATCH(start)
if vari = cs.Identifier(cursor) and body = cs.Block(cursor) then [vari, body]
if obj = cs.Object(cursor)
if body = cs.Block(cursor) then [LOC(2)(new yy.Value(obj)), body]
else if body = cs.Block(cursor) then [null, body]
# cs.Throw an exception object.
cs.Throw = (start) ->
if THROW(start) and cs.Expression(cursor) then new yy.Throw $2
# cs.Parenthetical expressions. Note that the **cs.Parenthetical** is a **cs.Value**,
# not an **cs.Expression**, so if you need to use an expression in a place
# where only values are accepted, wrapping it in parentheses will always do
# the trick.
cs.Parenthetical = (start) ->
if wrap('(')(start)
if body = cs.Body(start)
if wrap(')')(cursor) then new yy.Parens body
if INDENT(start) and cs.Body(cursor) and OUTDENT(cursor)
if wrap(')')(cursor) then new yy.Parens $3
# The condition portion of a while loop.
cs.WhileSource = (start) ->
if WHILE(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While test, guard: value
else new yy.While $2
else if UNTIL(start)
if test = cs.Expression(cursor)
if WHEN(cursor) and value = cs.Expression(cursor)
new yy.While $2, invert: true, guard: $4
else new yy.While $2, invert: true
# The while loop can either be normal, with a block of expressions to execute,
# or postfix, with a single expression. There is no do..while.
cs.While = (start) ->
if exp = recExpression(start) and cs.WhileSource(cursor)
return $2.addBody LOC(1) yy.Block.wrap([$1])
if exp then retturn exp
else if cs.WhileSource(start) and cs.Block(cursor) then $1.addBody $2
else if cs.Statement(start) and cs.WhileSource(cursor) then $2.addBody LOC(1) yy.Block.wrap([$1])
else if body = cs.Loop(start) then body
cs.Loop = (start) ->
if LOOP(start)
if body = cs.Block(cursor) then new yy.While(LOC(1) new yy.Literal 'true').addBody body
else if body = cs.Expression(cursor)
new yy.While(LOC(1) new yy.Literal 'true').addBody LOC(2) cs.Block.wrap [body]
# cs.Array, object, and range comprehensions, at the most generic level.
# Comprehensions can either be normal, with a block of expressions to execute,
# or postfix, with a single expression.
cs.For = (start) ->
if action = recExpression(start) and test = cs.ForBody(cursor) then new yy.For action, test
if action then return action
if action = cs.Statement(start) and test = cs.ForBody(cursor) then new yy.For action, test
else if test = cs.ForBody(start) and action = cs.Block(cursor) then new yy.For action, test
cs.ForBody = (start) ->
if range = FOR(start) and cs.Range(cursor) then source: LOC(2) new yy.Value(range)
else if start = cs.ForStart(start) and src = cs.ForSource(cursor)
src.own = start.own; src.name = start[0]; src.index = start[1];
src
cs.ForStart = (start) ->
if FOR(start)
if OWN(cursor)
if vari = cs.ForVariables(cursor) then vari.own = yes; vari
else if vari = cs.ForVariables(cursor) then vari
# An array of all accepted values for a variable inside the loop.
# cs.This enables support for pattern matchin
cs.ForValue = (start) ->
if id = cs.Identifier(start) then id
else if prop = cs.ThisProperty(start) then prop
else if cs.Array(start) then new yy.Value arr
else if obj = cs.Object(start) then new yy.Value obj
# An array or range comprehension has variables for the current element
# and (optional) reference to the current index. Or, *key, value*, in the case
# of object comprehensions.
cs.ForVariables = (start) ->
if v = cs.ForValue(start)
if wrap(',')(cursor) and v3 = cs.ForValue(cursor) then [v1, v3]
else [v]
# The source of a comprehension is an array or object with an optional guard
# clause. cs.If it's an array comprehension, you can also choose to step through
# in fixed-size increments.
cs.ForSource = (start) ->
if FORIN(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
if FOROF(start) and source = cs.Expression(cursor)
if WHEN(cursor) and guard = cs.Expression(cursor)
if BY(cursor) and step = cs.Expression(cursor) then source: source, guard: guard, step: step
else source: source, guard: guard, object: yes
else source: source
cs.Switch = (start) ->
if SWITCH(start)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
else if test = cs.Expression(cursor)
if INDENT(cursor)
if whens = cs.Whens(cursor)
if ELSE(cursor) and cs.Block(cursor) new yy.Switch null, whens, else_
else new yy.Switch test, whens
OUTDENT(cursor)
SWITCH = (start) -> switch_word(start) and not identifierLetter_()
switch_word = literal('switch')
cs.Whens = (start) ->
result =[]
while 1
if LEADING_WHEN(start)
if args = cs.SimpleArgs(cursor) and action = cs.Block(cursor) and may(cs.TERMINATOR)(cursor)
result.push([args, action])
else return result
# The most basic form of *if* is a condition and an action. The following
# if-related grammar are broken up along these lines in order to avoid
# ambiguity.
cs.IfBlock = (start) ->
if IF(start) and test = cs.Expression(cursor) and body = cs.Block(cursor)
new yy.If test, body, type: $1
if cs.IfBlock(start) and ELSE(cursor) and IF(cursor) and cs.Expression(cursor) and cs.Block(cursor)
$1.addElse new yy.If $4, $5, type: $3
# The full complement of *if* expressions, including postfix one-liner
# *if* and *unless*.
cs.If = (start) ->
if if_ = cs.IfBlock(start)
if ELSE(cursor) and elseBody = cs.Block(cursor) then if_.addElse elseBody
else if_
if cs.Statement(start) and POST_IF(cursor) and cs.Expression(cursor)
new yy.If $3, LOC(1)(cs.Block.wrap [$1]), type: $2, statement: true
# Arithmetic and logical operators, working on one or more operands.
# Here they are grouped by order of precedence. The actual precedence grammar
# are defined at the bottom of the page. It would be shorter if we could
# combine most of these grammar into a single generic *Operand OpSymbol Operand*
# -type rule, but in order to make the precedence binding possible, separate
# grammar are necessary.
cs.Operation = (start) ->
if m = memo('Expression')(start)
if _spaces('?')(cursor) then new yy.Existence $1
else if wrapadd(cursor) and cs.Expression(cursor) then return new yy.Op '+' , $1, $3
else if wrapsub(cursor) and cs.Expression(cursor) then return new yy.Op '-' , $1, $3
else if MATH(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if SHIFT(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if COMPARE(cursor) and cs.Expression then return new yy.Op $2, $1, $3
else if LOGIC(cursor) and cs.Expression(cursor) then return new yy.Op $2, $1, $3
else if RELATION(cursor) and cs.Expression(cursor)
if $2.charAt(0) is '!' then return new yy.Op($2[1..], $1, $3).invert()
else return new yy.Op $2, $1, $3
else if simple = memo('SimpleAssignable')(start)
if COMPOUND_ASSIGN(cursor) and cs.Expression(start) then return new yy.Assign $1, $3, $2
else if COMPOUND_ASSIGN(cursor) and INDENT(cursor) and cs.Expression(cursor) and OUTDENT(cursor)
return new yy.Assign $1, $4, $2
else if COMPOUND_ASSIGN(cursor) and cs.TERMINATOR(cursor) and cs.Expression(cursor)
return new yy.Assign $1, $4, $2
else if EXTENDS(cursor) and cs.Expression(cursor) then new yy.Extends $1, $3
if op = UNARY(start) and exp = cs.Expression(cursor) then new yy.Op op , exp
else if wrap('-')(start) and exp = cs.Expression(cursor) then new yy.Op '-', exp, prec: 'UNARY'
else if wrap('+')(start) and exp = cs.Expression(cursor) then new yy.Op '+', exp, prec: 'UNARY'
else if wrap('++')(start) and cs.SimpleAssignable(cursor) then new yy.Op '++', $2
else if wrapdec(start) and cs.SimpleAssignable(cursor) then new yy.Op '--', $2
else if cs.SimpleAssignable(start) and wrap('--')(cursor) then new yy.Op '--', $1, null, true
else if cs.SimpleAssignable(start) and wrap('++')(cursor) then new yy.Op '++', $1, null, true
wrapinc = wrap('++'); wrapdec = wrap('--'); wrapadd = wrap('+'); wrapsub = wrap('-');
cs.newyyValue = (item) -> (start) ->
if x = item(start) then new yy.Value(x)
#fdasf
# skip whitespaces, line concatentation and any ';'
# should include line comment
cs.TERMINATOR = (start) ->
cursor = start
spaces_()
if text[cursor]==';'
cursor++; meetSemicolon = true
spaces_()
if lineComment_()
skipEmptyLineAndCommentLines_()
else if concatLine_() then return meetSemicolon
true
spaces_ = () ->
while c = text[cursor++]
if c!=' ' and c!='\t' then return true
return true
# skip whitespaces, line concatentation, but don't skip ';', line comment
# used in cases that should have left parts of a cs.Cup to be parsed.
# used after RETURN after cs.Return
spacesConcatLine_ = () ->
concated = false
while 1
c = text[cursor]
if c==' ' or c=='\t' then continue # c==';' or , different from TERMINATOR here with stopping at ';'
# in the middle of line, is it right to eat line comment?
if lineComment_() then return # fail and backtrack, maybe other choice which have indent/outdent
else if concatLine_() then return true # concatLine should bring the left part for the cs.Cup
else break
true
# line comment, which can begin at any position, is different from block comment
lineComment_ = () ->
if text[cursor] is '#'
if text[cursor+1] is '#' and text[cursor+2] is '#'
new ParseError(cursor, 'unexpected block comment!') # block comment is a statement!!!
cursor++
skipToEndOfLine_()
# skip lines which is whitespaces completely or with line comment follow some whitespaces
# until reach a line which inludes effective code.
skipEmptyLineAndCommentLines_ = () ->
while 1
spaces_()
lineComment_()
if not newline_() then break
if text[cursor]=='\\'
new ParseError(cursor, 'should not begin a line with \\')
# else if text[cursor] ==';'
# new ParseError(cursor, 'should not begin a line with ;')
return true
# skip any character until end of line, include # ; \ etc. used by lineComment_
skipToEndOfLine_ = () ->
while 1
c = text[cursor]
if c is '\r' and text[cursor+1] is '\n'
cursor +=2; return true
else if c is '\n' then cursor++; return true
else if c is undefined then return true
else cursor++
return true
# concatenate line by \, skip until first nonwhitespace in next line, check error cases.
concatLine_ = () ->
if text[cursor]=='\\'
if text[cursor+1]=='\r' and text[cursor+2]=='\n'
cursor += 3; lineno++; #column = 0
else if text[cursor+1]=='\n' then cursor += 2; lineno++; #column = 0
else new ParseError(cursor, "meet a line concatenation symbol ('\\') which is not at the end of line.")
skipHeadWhiteSpaces_()
c = text[cursor]
if c is '\\' or c is '#' or c is ';'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not begin with #{c}")
else if c is '\r' or c is '\n'
new ParseError(cursor, "The next line of line concatenation symbol \\ should not be a empty line.")
cs.IDENTIFIER = (start) ->
if id = identifier(start) then new yy.Literal id
cs.NUMBER = (start)-> new yy.Literal $1
cs.STRING = (start) -> new yy.Literal $1
cs.JS = (start) -> new yy.Literal $1
cs.REGEX = (start) -> new yy.Literal $1
cs.DEBUGGER = (start) -> new yy.Literal $1
cs.UNDEFINED = (start) -> new yy.Undefined
cs.NULL = (start) -> new yy.Null
cs.BOOL = (start) -> new yy.Bool $1
# Precedence
# Operators at the top of this list have higher precedence than the ones lower down.
# Following these grammar is what makes `2 + 3 * 4` parse as 2 + (3 * 4) and not (2 + 3) * 4
operators = [
['left', '.', '?.', '::', '?::']
['left', 'CALL_START', 'CALL_END']
['nonassoc', '++', '--']
['left', '?']
['right', 'UNARY']
['left', 'MATH']
['left', '+', '-']
['left', 'SHIFT']
['left', 'RELATION']
['left', 'COMPARE']
['left', 'LOGIC']
['nonassoc', 'INDENT', 'OUTDENT']
['right', '=', ':', 'COMPOUND_ASSIGN', 'RETURN', 'THROW', 'EXTENDS']
['right', 'FORIN', 'FOROF', 'BY', 'WHEN']
['right', 'IF', 'ELSE', 'FOR', 'WHILE', 'UNTIL', 'LOOP', 'SUPER', 'CLASS']
['right', 'POST_IF']
]
#['__bind', '__extends', '__hasProp', '__indexOf', '__slice', 'break', 'by', 'case',
# 'catch', 'class', 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'else',
# 'enum', 'export', 'extends', 'false', 'finally', 'for', 'function', 'if', 'implements',
# 'import', 'in', 'instanceof', 'interface', 'let', 'loop', 'native', 'new', 'null', 'of',
# 'package', 'private', 'protected', 'public', 'return', 'static', 'super', 'switch',
# 'then', 'this', 'throw', 'true', 'try', 'typeof', 'undefined', 'unless', 'until',
# 'var', 'void', 'when', 'while', 'with', 'yield'] =
# for kw in []
# do (kw_word = literal(kw)) ->
# (start) -> kw_word(start) and not identifierLetter_() |
[
{
"context": "se of redundant constructors in classes.\n# @author Alberto Rodríguez\n###\n'use strict'\n\n#------------------------------",
"end": 107,
"score": 0.9998620748519897,
"start": 90,
"tag": "NAME",
"value": "Alberto Rodríguez"
}
] | src/rules/no-useless-constructor.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to flag the use of redundant constructors in classes.
# @author Alberto Rodríguez
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @param {ASTNode[]} body An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @param {ASTNode} node A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @param {ASTNode[]} superArgs An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @param {ASTNode} ctorParams The parameters of a constructor to check.
# @param {ASTNode} superArgs The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @param {Array} body constructor body content.
# @param {Array} ctorParams The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
isThisParam = (param) ->
current = param
while current
switch current.type
when 'ThisExpression'
return yes
when 'MemberExpression'
current = current.object
when 'ArrayPattern'
return (
(yes for element in current.elements when isThisParam element)
.length > 0
)
when 'ObjectPattern'
return (
(yes for property in current.properties when isThisParam property)
.length > 0
)
when 'Property'
current = current.value
when 'AssignmentPattern'
current = current.left
else
return no
no
hasThisParams = (params) ->
for param in params when isThisParam param
return yes
no
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
type: 'suggestion'
docs:
description: 'disallow unnecessary constructors'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/no-useless-constructor'
schema: []
create: (context) ->
###*
# Checks whether a node is a redundant constructor
# @param {ASTNode} node node to check
# @returns {void}
###
checkForConstructor = (node) ->
return unless node.kind is 'constructor'
{body} = node.value.body
ctorParams = node.value.params
{superClass} = node.parent.parent
if (
if superClass
isRedundantSuperCall body, ctorParams
else
body.length is 0 and not hasThisParams ctorParams
)
context.report {
node
message: 'Useless constructor.'
}
MethodDefinition: checkForConstructor
| 67968 | ###*
# @fileoverview Rule to flag the use of redundant constructors in classes.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @param {ASTNode[]} body An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @param {ASTNode} node A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @param {ASTNode[]} superArgs An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @param {ASTNode} ctorParams The parameters of a constructor to check.
# @param {ASTNode} superArgs The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @param {Array} body constructor body content.
# @param {Array} ctorParams The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
isThisParam = (param) ->
current = param
while current
switch current.type
when 'ThisExpression'
return yes
when 'MemberExpression'
current = current.object
when 'ArrayPattern'
return (
(yes for element in current.elements when isThisParam element)
.length > 0
)
when 'ObjectPattern'
return (
(yes for property in current.properties when isThisParam property)
.length > 0
)
when 'Property'
current = current.value
when 'AssignmentPattern'
current = current.left
else
return no
no
hasThisParams = (params) ->
for param in params when isThisParam param
return yes
no
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
type: 'suggestion'
docs:
description: 'disallow unnecessary constructors'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/no-useless-constructor'
schema: []
create: (context) ->
###*
# Checks whether a node is a redundant constructor
# @param {ASTNode} node node to check
# @returns {void}
###
checkForConstructor = (node) ->
return unless node.kind is 'constructor'
{body} = node.value.body
ctorParams = node.value.params
{superClass} = node.parent.parent
if (
if superClass
isRedundantSuperCall body, ctorParams
else
body.length is 0 and not hasThisParams ctorParams
)
context.report {
node
message: 'Useless constructor.'
}
MethodDefinition: checkForConstructor
| true | ###*
# @fileoverview Rule to flag the use of redundant constructors in classes.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether a given array of statements is a single call of `super`.
# @param {ASTNode[]} body An array of statements to check.
# @returns {boolean} `true` if the body is a single call of `super`.
###
isSingleSuperCall = (body) ->
body.length is 1 and
body[0].type is 'ExpressionStatement' and
body[0].expression.type is 'CallExpression' and
body[0].expression.callee.type is 'Super'
###*
# Checks whether a given node is a pattern which doesn't have any side effects.
# Default parameters and Destructuring parameters can have side effects.
# @param {ASTNode} node A pattern node.
# @returns {boolean} `true` if the node doesn't have any side effects.
###
isSimple = (node) -> node.type in ['Identifier', 'RestElement']
###*
# Checks whether a given array of expressions is `...arguments` or not.
# `super(...arguments)` passes all arguments through.
# @param {ASTNode[]} superArgs An array of expressions to check.
# @returns {boolean} `true` if the superArgs is `...arguments`.
###
isSpreadArguments = (superArgs) ->
superArgs.length is 1 and
superArgs[0].type is 'SpreadElement' and
superArgs[0].argument.type is 'Identifier' and
superArgs[0].argument.name is 'arguments'
###*
# Checks whether given 2 nodes are identifiers which have the same name or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are identifiers which have the same
# name.
###
isValidIdentifierPair = (ctorParam, superArg) ->
ctorParam.type is 'Identifier' and
superArg.type is 'Identifier' and
ctorParam.name is superArg.name
###*
# Checks whether given 2 nodes are a rest/spread pair which has the same values.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes are a rest/spread pair which has the
# same values.
###
isValidRestSpreadPair = (ctorParam, superArg) ->
ctorParam.type is 'RestElement' and
superArg.type is 'SpreadElement' and
isValidIdentifierPair ctorParam.argument, superArg.argument
###*
# Checks whether given 2 nodes have the same value or not.
# @param {ASTNode} ctorParam A node to check.
# @param {ASTNode} superArg A node to check.
# @returns {boolean} `true` if the nodes have the same value or not.
###
isValidPair = (ctorParam, superArg) ->
isValidIdentifierPair(ctorParam, superArg) or
isValidRestSpreadPair ctorParam, superArg
###*
# Checks whether the parameters of a constructor and the arguments of `super()`
# have the same values or not.
# @param {ASTNode} ctorParams The parameters of a constructor to check.
# @param {ASTNode} superArgs The arguments of `super()` to check.
# @returns {boolean} `true` if those have the same values.
###
isPassingThrough = (ctorParams, superArgs) ->
return no unless ctorParams.length is superArgs.length
i = 0
while i < ctorParams.length
return no unless isValidPair ctorParams[i], superArgs[i]
++i
yes
###*
# Checks whether the constructor body is a redundant super call.
# @param {Array} body constructor body content.
# @param {Array} ctorParams The params to check against super call.
# @returns {boolean} true if the construtor body is redundant
###
isRedundantSuperCall = (body, ctorParams) ->
isSingleSuperCall(body) and
ctorParams.every(isSimple) and
(isSpreadArguments(body[0].expression.arguments) or
isPassingThrough ctorParams, body[0].expression.arguments)
isThisParam = (param) ->
current = param
while current
switch current.type
when 'ThisExpression'
return yes
when 'MemberExpression'
current = current.object
when 'ArrayPattern'
return (
(yes for element in current.elements when isThisParam element)
.length > 0
)
when 'ObjectPattern'
return (
(yes for property in current.properties when isThisParam property)
.length > 0
)
when 'Property'
current = current.value
when 'AssignmentPattern'
current = current.left
else
return no
no
hasThisParams = (params) ->
for param in params when isThisParam param
return yes
no
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
type: 'suggestion'
docs:
description: 'disallow unnecessary constructors'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/no-useless-constructor'
schema: []
create: (context) ->
###*
# Checks whether a node is a redundant constructor
# @param {ASTNode} node node to check
# @returns {void}
###
checkForConstructor = (node) ->
return unless node.kind is 'constructor'
{body} = node.value.body
ctorParams = node.value.params
{superClass} = node.parent.parent
if (
if superClass
isRedundantSuperCall body, ctorParams
else
body.length is 0 and not hasThisParams ctorParams
)
context.report {
node
message: 'Useless constructor.'
}
MethodDefinition: checkForConstructor
|
[
{
"context": "iterals instead of string concatenation.\n# @author Toru Nagashima\n###\n\n'use strict'\n\n#-----------------------------",
"end": 120,
"score": 0.999880313873291,
"start": 106,
"tag": "NAME",
"value": "Toru Nagashima"
}
] | src/rules/prefer-template.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview A rule to suggest using template literals instead of string concatenation.
# @author Toru Nagashima
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given node is a concatenation.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node is a concatenation.
###
isConcatenation = (node) ->
node.type is 'BinaryExpression' and node.operator is '+'
###*
# Gets the top binary expression node for concatenation in parents of a given node.
# @param {ASTNode} node - A node to get.
# @returns {ASTNode} the top binary expression node in parents of a given node.
###
getTopConcatBinaryExpression = (node) ->
currentNode = node
while isConcatenation currentNode.parent then currentNode = currentNode.parent
currentNode
###*
# Determines whether a given node is a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node is an octal escape sequence
###
isOctalEscapeSequence = (node) ->
# No need to check TemplateLiterals – would throw error with octal escape
isStringLiteral = node.type is 'Literal' and typeof node.value is 'string'
return no unless isStringLiteral
match = node.raw.match /^([^\\]|\\[^0-7])*\\([0-7]{1,3})/
if match
# \0 is actually not considered an octal
return yes if match[2] isnt '0' or typeof match[3] isnt 'undefined'
no
###*
# Checks whether or not a node contains a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node contains an octal escape sequence
###
hasOctalEscapeSequence = (node) ->
return (
hasOctalEscapeSequence(node.left) or hasOctalEscapeSequence node.right
) if isConcatenation node
isOctalEscapeSequence node
###*
# Checks whether or not a given binary expression has string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has string literals.
###
hasStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasStringLiteral(node.right) or hasStringLiteral node.left
) if isConcatenation node
astUtils.isStringLiteral node
###*
# Checks whether or not a given binary expression has non string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has non string literals.
###
hasNonStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasNonStringLiteral(node.right) or hasNonStringLiteral node.left
) if isConcatenation node
not astUtils.isStringLiteral node
###*
# Determines whether a given node will start with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will start with a template curly.
###
startsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.left if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[0].range[0] is node.quasis[0].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
###*
# Determines whether a given node end with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will end with a template curly.
###
endsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.right if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[node.quasis.length - 1].range[0] is
node.quasis[node.quasis.length - 1].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require template literals instead of string concatenation'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/prefer-template'
schema: []
fixable: 'code'
create: (context) ->
sourceCode = context.getSourceCode()
done = Object.create null
###*
# Gets the non-token text between two nodes, ignoring any other tokens that appear between the two tokens.
# @param {ASTNode} node1 The first node
# @param {ASTNode} node2 The second node
# @returns {string} The text between the nodes, excluding other tokens
###
getTextBetween = (node1, node2) ->
allTokens =
[node1].concat(sourceCode.getTokensBetween node1, node2).concat node2
sourceText = sourceCode.getText()
allTokens
.slice 0, -1
.reduce(
(accumulator, token, index) ->
accumulator +
sourceText.slice token.range[1], allTokens[index + 1].range[0]
,
''
)
###*
# Returns a template literal form of the given node.
# @param {ASTNode} currentNode A node that should be converted to a template literal
# @param {string} textBeforeNode Text that should appear before the node
# @param {string} textAfterNode Text that should appear after the node
# @returns {string} A string form of this node, represented as a template literal
###
getTemplateLiteral = (currentNode, textBeforeNode, textAfterNode) ->
###
# If the current node is a string literal, escape any instances of ${ or ` to prevent them from being interpreted
# as a template placeholder. However, if the code already contains a backslash before the ${ or `
# for some reason, don't add another backslash, because that would change the meaning of the code (it would cause
# an actual backslash character to appear before the dollar sign).
###
return "\"#{
str =
currentNode.raw
.slice 1, -1
.replace /\\*(#{|")/g, (matched) ->
return "\\#{matched}" if matched.lastIndexOf('\\') % 2
matched
unless currentNode.raw[0] is '"'
# Unescape any quotes that appear in the original Literal that no longer need to be escaped.
str = str.replace(
new RegExp "\\\\#{currentNode.raw[0]}", 'g'
currentNode.raw[0]
)
str
}\"" if (
currentNode.type is 'Literal' and typeof currentNode.value is 'string'
)
return sourceCode.getText currentNode if (
currentNode.type is 'TemplateLiteral'
)
if (
isConcatenation(currentNode) and
hasStringLiteral(currentNode) and
hasNonStringLiteral currentNode
)
plusSign = sourceCode.getFirstTokenBetween(
currentNode.left
currentNode.right
(token) -> token.value is '+'
)
textBeforePlus = getTextBetween currentNode.left, plusSign
textAfterPlus = getTextBetween plusSign, currentNode.right
leftEndsWithCurly = endsWithTemplateCurly currentNode.left
rightStartsWithCurly = startsWithTemplateCurly currentNode.right
# If the left side of the expression ends with a template curly, add the extra text to the end of the curly bracket.
# `foo${bar}` /* comment */ + 'baz' --> `foo${bar /* comment */ }${baz}`
return (
getTemplateLiteral(
currentNode.left
textBeforeNode
textBeforePlus + textAfterPlus
).slice(0, -1) +
getTemplateLiteral(currentNode.right, null, textAfterNode).slice 1
) if leftEndsWithCurly
# Otherwise, if the right side of the expression starts with a template curly, add the text there.
# 'foo' /* comment */ + `${bar}baz` --> `foo${ /* comment */ bar}baz`
return (
getTemplateLiteral(currentNode.left, textBeforeNode, null).slice(
0
-1
) +
getTemplateLiteral(
currentNode.right
textBeforePlus + textAfterPlus
textAfterNode
).slice 1
) if rightStartsWithCurly
###
# Otherwise, these nodes should not be combined into a template curly, since there is nowhere to put
# the text between them.
###
return "#{getTemplateLiteral(
currentNode.left
textBeforeNode
null
)}#{textBeforePlus}+#{textAfterPlus}#{getTemplateLiteral(
currentNode.right
textAfterNode
null
)}"
"\"\#{#{textBeforeNode or ''}#{sourceCode.getText(
currentNode
)}#{textAfterNode or ''}}\""
###*
# Returns a fixer object that converts a non-string binary expression to a template literal
# @param {SourceCodeFixer} fixer The fixer object
# @param {ASTNode} node A node that should be converted to a template literal
# @returns {Object} A fix for this binary expression
###
fixNonStringBinaryExpression = (fixer, node) ->
topBinaryExpr = getTopConcatBinaryExpression node.parent
return null if hasOctalEscapeSequence topBinaryExpr
fixer.replaceText(
topBinaryExpr
getTemplateLiteral topBinaryExpr, null, null
)
###*
# Reports if a given node is string concatenation with non string literals.
#
# @param {ASTNode} node - A node to check.
# @returns {void}
###
checkForStringConcat = (node) ->
return if (
not astUtils.isStringLiteral(node) or not isConcatenation node.parent
)
topBinaryExpr = getTopConcatBinaryExpression node.parent
# Checks whether or not this node had been checked already.
return if done[topBinaryExpr.range[0]]
done[topBinaryExpr.range[0]] = yes
if hasNonStringLiteral topBinaryExpr
context.report
node: topBinaryExpr
message: 'Unexpected string concatenation.'
fix: (fixer) -> fixNonStringBinaryExpression fixer, node
Program: -> done ###:### = Object.create null
Literal: checkForStringConcat
TemplateLiteral: checkForStringConcat
| 142905 | ###*
# @fileoverview A rule to suggest using template literals instead of string concatenation.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given node is a concatenation.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node is a concatenation.
###
isConcatenation = (node) ->
node.type is 'BinaryExpression' and node.operator is '+'
###*
# Gets the top binary expression node for concatenation in parents of a given node.
# @param {ASTNode} node - A node to get.
# @returns {ASTNode} the top binary expression node in parents of a given node.
###
getTopConcatBinaryExpression = (node) ->
currentNode = node
while isConcatenation currentNode.parent then currentNode = currentNode.parent
currentNode
###*
# Determines whether a given node is a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node is an octal escape sequence
###
isOctalEscapeSequence = (node) ->
# No need to check TemplateLiterals – would throw error with octal escape
isStringLiteral = node.type is 'Literal' and typeof node.value is 'string'
return no unless isStringLiteral
match = node.raw.match /^([^\\]|\\[^0-7])*\\([0-7]{1,3})/
if match
# \0 is actually not considered an octal
return yes if match[2] isnt '0' or typeof match[3] isnt 'undefined'
no
###*
# Checks whether or not a node contains a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node contains an octal escape sequence
###
hasOctalEscapeSequence = (node) ->
return (
hasOctalEscapeSequence(node.left) or hasOctalEscapeSequence node.right
) if isConcatenation node
isOctalEscapeSequence node
###*
# Checks whether or not a given binary expression has string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has string literals.
###
hasStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasStringLiteral(node.right) or hasStringLiteral node.left
) if isConcatenation node
astUtils.isStringLiteral node
###*
# Checks whether or not a given binary expression has non string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has non string literals.
###
hasNonStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasNonStringLiteral(node.right) or hasNonStringLiteral node.left
) if isConcatenation node
not astUtils.isStringLiteral node
###*
# Determines whether a given node will start with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will start with a template curly.
###
startsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.left if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[0].range[0] is node.quasis[0].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
###*
# Determines whether a given node end with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will end with a template curly.
###
endsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.right if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[node.quasis.length - 1].range[0] is
node.quasis[node.quasis.length - 1].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require template literals instead of string concatenation'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/prefer-template'
schema: []
fixable: 'code'
create: (context) ->
sourceCode = context.getSourceCode()
done = Object.create null
###*
# Gets the non-token text between two nodes, ignoring any other tokens that appear between the two tokens.
# @param {ASTNode} node1 The first node
# @param {ASTNode} node2 The second node
# @returns {string} The text between the nodes, excluding other tokens
###
getTextBetween = (node1, node2) ->
allTokens =
[node1].concat(sourceCode.getTokensBetween node1, node2).concat node2
sourceText = sourceCode.getText()
allTokens
.slice 0, -1
.reduce(
(accumulator, token, index) ->
accumulator +
sourceText.slice token.range[1], allTokens[index + 1].range[0]
,
''
)
###*
# Returns a template literal form of the given node.
# @param {ASTNode} currentNode A node that should be converted to a template literal
# @param {string} textBeforeNode Text that should appear before the node
# @param {string} textAfterNode Text that should appear after the node
# @returns {string} A string form of this node, represented as a template literal
###
getTemplateLiteral = (currentNode, textBeforeNode, textAfterNode) ->
###
# If the current node is a string literal, escape any instances of ${ or ` to prevent them from being interpreted
# as a template placeholder. However, if the code already contains a backslash before the ${ or `
# for some reason, don't add another backslash, because that would change the meaning of the code (it would cause
# an actual backslash character to appear before the dollar sign).
###
return "\"#{
str =
currentNode.raw
.slice 1, -1
.replace /\\*(#{|")/g, (matched) ->
return "\\#{matched}" if matched.lastIndexOf('\\') % 2
matched
unless currentNode.raw[0] is '"'
# Unescape any quotes that appear in the original Literal that no longer need to be escaped.
str = str.replace(
new RegExp "\\\\#{currentNode.raw[0]}", 'g'
currentNode.raw[0]
)
str
}\"" if (
currentNode.type is 'Literal' and typeof currentNode.value is 'string'
)
return sourceCode.getText currentNode if (
currentNode.type is 'TemplateLiteral'
)
if (
isConcatenation(currentNode) and
hasStringLiteral(currentNode) and
hasNonStringLiteral currentNode
)
plusSign = sourceCode.getFirstTokenBetween(
currentNode.left
currentNode.right
(token) -> token.value is '+'
)
textBeforePlus = getTextBetween currentNode.left, plusSign
textAfterPlus = getTextBetween plusSign, currentNode.right
leftEndsWithCurly = endsWithTemplateCurly currentNode.left
rightStartsWithCurly = startsWithTemplateCurly currentNode.right
# If the left side of the expression ends with a template curly, add the extra text to the end of the curly bracket.
# `foo${bar}` /* comment */ + 'baz' --> `foo${bar /* comment */ }${baz}`
return (
getTemplateLiteral(
currentNode.left
textBeforeNode
textBeforePlus + textAfterPlus
).slice(0, -1) +
getTemplateLiteral(currentNode.right, null, textAfterNode).slice 1
) if leftEndsWithCurly
# Otherwise, if the right side of the expression starts with a template curly, add the text there.
# 'foo' /* comment */ + `${bar}baz` --> `foo${ /* comment */ bar}baz`
return (
getTemplateLiteral(currentNode.left, textBeforeNode, null).slice(
0
-1
) +
getTemplateLiteral(
currentNode.right
textBeforePlus + textAfterPlus
textAfterNode
).slice 1
) if rightStartsWithCurly
###
# Otherwise, these nodes should not be combined into a template curly, since there is nowhere to put
# the text between them.
###
return "#{getTemplateLiteral(
currentNode.left
textBeforeNode
null
)}#{textBeforePlus}+#{textAfterPlus}#{getTemplateLiteral(
currentNode.right
textAfterNode
null
)}"
"\"\#{#{textBeforeNode or ''}#{sourceCode.getText(
currentNode
)}#{textAfterNode or ''}}\""
###*
# Returns a fixer object that converts a non-string binary expression to a template literal
# @param {SourceCodeFixer} fixer The fixer object
# @param {ASTNode} node A node that should be converted to a template literal
# @returns {Object} A fix for this binary expression
###
fixNonStringBinaryExpression = (fixer, node) ->
topBinaryExpr = getTopConcatBinaryExpression node.parent
return null if hasOctalEscapeSequence topBinaryExpr
fixer.replaceText(
topBinaryExpr
getTemplateLiteral topBinaryExpr, null, null
)
###*
# Reports if a given node is string concatenation with non string literals.
#
# @param {ASTNode} node - A node to check.
# @returns {void}
###
checkForStringConcat = (node) ->
return if (
not astUtils.isStringLiteral(node) or not isConcatenation node.parent
)
topBinaryExpr = getTopConcatBinaryExpression node.parent
# Checks whether or not this node had been checked already.
return if done[topBinaryExpr.range[0]]
done[topBinaryExpr.range[0]] = yes
if hasNonStringLiteral topBinaryExpr
context.report
node: topBinaryExpr
message: 'Unexpected string concatenation.'
fix: (fixer) -> fixNonStringBinaryExpression fixer, node
Program: -> done ###:### = Object.create null
Literal: checkForStringConcat
TemplateLiteral: checkForStringConcat
| true | ###*
# @fileoverview A rule to suggest using template literals instead of string concatenation.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given node is a concatenation.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node is a concatenation.
###
isConcatenation = (node) ->
node.type is 'BinaryExpression' and node.operator is '+'
###*
# Gets the top binary expression node for concatenation in parents of a given node.
# @param {ASTNode} node - A node to get.
# @returns {ASTNode} the top binary expression node in parents of a given node.
###
getTopConcatBinaryExpression = (node) ->
currentNode = node
while isConcatenation currentNode.parent then currentNode = currentNode.parent
currentNode
###*
# Determines whether a given node is a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node is an octal escape sequence
###
isOctalEscapeSequence = (node) ->
# No need to check TemplateLiterals – would throw error with octal escape
isStringLiteral = node.type is 'Literal' and typeof node.value is 'string'
return no unless isStringLiteral
match = node.raw.match /^([^\\]|\\[^0-7])*\\([0-7]{1,3})/
if match
# \0 is actually not considered an octal
return yes if match[2] isnt '0' or typeof match[3] isnt 'undefined'
no
###*
# Checks whether or not a node contains a octal escape sequence
# @param {ASTNode} node A node to check
# @returns {boolean} `true` if the node contains an octal escape sequence
###
hasOctalEscapeSequence = (node) ->
return (
hasOctalEscapeSequence(node.left) or hasOctalEscapeSequence node.right
) if isConcatenation node
isOctalEscapeSequence node
###*
# Checks whether or not a given binary expression has string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has string literals.
###
hasStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasStringLiteral(node.right) or hasStringLiteral node.left
) if isConcatenation node
astUtils.isStringLiteral node
###*
# Checks whether or not a given binary expression has non string literals.
# @param {ASTNode} node - A node to check.
# @returns {boolean} `true` if the node has non string literals.
###
hasNonStringLiteral = (node) ->
# `left` is deeper than `right` normally.
return (
hasNonStringLiteral(node.right) or hasNonStringLiteral node.left
) if isConcatenation node
not astUtils.isStringLiteral node
###*
# Determines whether a given node will start with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will start with a template curly.
###
startsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.left if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[0].range[0] is node.quasis[0].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
###*
# Determines whether a given node end with a template curly expression (`${}`) when being converted to a template literal.
# @param {ASTNode} node The node that will be fixed to a template literal
# @returns {boolean} `true` if the node will end with a template curly.
###
endsWithTemplateCurly = (node) ->
return startsWithTemplateCurly node.right if node.type is 'BinaryExpression'
return (
node.expressions.length and
node.quasis.length and
node.quasis[node.quasis.length - 1].range[0] is
node.quasis[node.quasis.length - 1].range[1]
) if node.type is 'TemplateLiteral'
node.type isnt 'Literal' or typeof node.value isnt 'string'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require template literals instead of string concatenation'
category: 'ECMAScript 6'
recommended: no
url: 'https://eslint.org/docs/rules/prefer-template'
schema: []
fixable: 'code'
create: (context) ->
sourceCode = context.getSourceCode()
done = Object.create null
###*
# Gets the non-token text between two nodes, ignoring any other tokens that appear between the two tokens.
# @param {ASTNode} node1 The first node
# @param {ASTNode} node2 The second node
# @returns {string} The text between the nodes, excluding other tokens
###
getTextBetween = (node1, node2) ->
allTokens =
[node1].concat(sourceCode.getTokensBetween node1, node2).concat node2
sourceText = sourceCode.getText()
allTokens
.slice 0, -1
.reduce(
(accumulator, token, index) ->
accumulator +
sourceText.slice token.range[1], allTokens[index + 1].range[0]
,
''
)
###*
# Returns a template literal form of the given node.
# @param {ASTNode} currentNode A node that should be converted to a template literal
# @param {string} textBeforeNode Text that should appear before the node
# @param {string} textAfterNode Text that should appear after the node
# @returns {string} A string form of this node, represented as a template literal
###
getTemplateLiteral = (currentNode, textBeforeNode, textAfterNode) ->
###
# If the current node is a string literal, escape any instances of ${ or ` to prevent them from being interpreted
# as a template placeholder. However, if the code already contains a backslash before the ${ or `
# for some reason, don't add another backslash, because that would change the meaning of the code (it would cause
# an actual backslash character to appear before the dollar sign).
###
return "\"#{
str =
currentNode.raw
.slice 1, -1
.replace /\\*(#{|")/g, (matched) ->
return "\\#{matched}" if matched.lastIndexOf('\\') % 2
matched
unless currentNode.raw[0] is '"'
# Unescape any quotes that appear in the original Literal that no longer need to be escaped.
str = str.replace(
new RegExp "\\\\#{currentNode.raw[0]}", 'g'
currentNode.raw[0]
)
str
}\"" if (
currentNode.type is 'Literal' and typeof currentNode.value is 'string'
)
return sourceCode.getText currentNode if (
currentNode.type is 'TemplateLiteral'
)
if (
isConcatenation(currentNode) and
hasStringLiteral(currentNode) and
hasNonStringLiteral currentNode
)
plusSign = sourceCode.getFirstTokenBetween(
currentNode.left
currentNode.right
(token) -> token.value is '+'
)
textBeforePlus = getTextBetween currentNode.left, plusSign
textAfterPlus = getTextBetween plusSign, currentNode.right
leftEndsWithCurly = endsWithTemplateCurly currentNode.left
rightStartsWithCurly = startsWithTemplateCurly currentNode.right
# If the left side of the expression ends with a template curly, add the extra text to the end of the curly bracket.
# `foo${bar}` /* comment */ + 'baz' --> `foo${bar /* comment */ }${baz}`
return (
getTemplateLiteral(
currentNode.left
textBeforeNode
textBeforePlus + textAfterPlus
).slice(0, -1) +
getTemplateLiteral(currentNode.right, null, textAfterNode).slice 1
) if leftEndsWithCurly
# Otherwise, if the right side of the expression starts with a template curly, add the text there.
# 'foo' /* comment */ + `${bar}baz` --> `foo${ /* comment */ bar}baz`
return (
getTemplateLiteral(currentNode.left, textBeforeNode, null).slice(
0
-1
) +
getTemplateLiteral(
currentNode.right
textBeforePlus + textAfterPlus
textAfterNode
).slice 1
) if rightStartsWithCurly
###
# Otherwise, these nodes should not be combined into a template curly, since there is nowhere to put
# the text between them.
###
return "#{getTemplateLiteral(
currentNode.left
textBeforeNode
null
)}#{textBeforePlus}+#{textAfterPlus}#{getTemplateLiteral(
currentNode.right
textAfterNode
null
)}"
"\"\#{#{textBeforeNode or ''}#{sourceCode.getText(
currentNode
)}#{textAfterNode or ''}}\""
###*
# Returns a fixer object that converts a non-string binary expression to a template literal
# @param {SourceCodeFixer} fixer The fixer object
# @param {ASTNode} node A node that should be converted to a template literal
# @returns {Object} A fix for this binary expression
###
fixNonStringBinaryExpression = (fixer, node) ->
topBinaryExpr = getTopConcatBinaryExpression node.parent
return null if hasOctalEscapeSequence topBinaryExpr
fixer.replaceText(
topBinaryExpr
getTemplateLiteral topBinaryExpr, null, null
)
###*
# Reports if a given node is string concatenation with non string literals.
#
# @param {ASTNode} node - A node to check.
# @returns {void}
###
checkForStringConcat = (node) ->
return if (
not astUtils.isStringLiteral(node) or not isConcatenation node.parent
)
topBinaryExpr = getTopConcatBinaryExpression node.parent
# Checks whether or not this node had been checked already.
return if done[topBinaryExpr.range[0]]
done[topBinaryExpr.range[0]] = yes
if hasNonStringLiteral topBinaryExpr
context.report
node: topBinaryExpr
message: 'Unexpected string concatenation.'
fix: (fixer) -> fixNonStringBinaryExpression fixer, node
Program: -> done ###:### = Object.create null
Literal: checkForStringConcat
TemplateLiteral: checkForStringConcat
|
[
{
"context": "orker\n amqpUri: 'amqp://meshblu:judgementday@127.0.0.1'\n jobTimeoutSeconds: 1\n jobLogRedisUri:",
"end": 969,
"score": 0.9997367858886719,
"start": 960,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ient = new MeshbluAmqp uuid: 'some-uuid', token: 'some-token', hostname: 'localhost'\n @client.connect (erro",
"end": 1475,
"score": 0.8061168789863586,
"start": 1465,
"tag": "KEY",
"value": "some-token"
}
] | test/connect-firehose-spec.coffee | octoblu/meshblu-core-firehose-amqp | 0 | _ = require 'lodash'
async = require 'async'
FirehoseWorker = require '../src/firehose-worker'
MeshbluAmqp = require 'meshblu-amqp'
RedisNS = require '@octoblu/redis-ns'
redis = require 'ioredis'
describe 'connect firehose subscription', ->
beforeEach 'create redis connection', (done) ->
rawClient = redis.createClient(dropBufferSupport: true)
@redisClient = new RedisNS 'test:firehose:amqp', rawClient
@redisClient.on 'ready', =>
@redisClient.keys '*', (error, keys) =>
return done error if error?
return done() if _.isEmpty keys
rawClient.del keys..., done
return # nothing
beforeEach 'hydrant', (done) ->
@redisHydrantClient = new RedisNS 'messages', redis.createClient(dropBufferSupport: true)
@redisHydrantClient.on 'ready', done
return # nothing
beforeEach 'run', ->
@worker = new FirehoseWorker
amqpUri: 'amqp://meshblu:judgementday@127.0.0.1'
jobTimeoutSeconds: 1
jobLogRedisUri: 'redis://localhost:6379'
jobLogQueue: 'sample-rate:0.00'
jobLogSampleRate: 0
maxConnections: 10
redisUri: 'redis://localhost:6379'
namespace: 'test:firehose:amqp'
hydrantNamespace: 'messages'
@worker.run (error) =>
throw error if error?
afterEach (done) ->
@worker.stop done
return # nothing
beforeEach 'setup amqp', (done) ->
@client = new MeshbluAmqp uuid: 'some-uuid', token: 'some-token', hostname: 'localhost'
@client.connect (error) =>
return done error if error?
@client.connectFirehose done
return # nothing
beforeEach 'message', (done) ->
doneTwice = _.after 2, done
message =
metadata:
route: [{toUuid: 'a', fromUuid: 'b', type: 'message.sent'}]
rawData: '{"foo":"bar"}'
@members = []
@subscriptionExists = false
checkList = (callback) =>
@redisClient.lrange 'subscriptions', 0, -1, (error, @members) =>
return callback error if error?
callback()
@client.once 'message', (@message) =>
doneTwice()
async.until (=> _.includes @members, @client.firehoseQueueName), checkList, (error) =>
return done error if error?
async.until (=> @worker.subscriptions?[@client.firehoseQueueName]?), ((cb) => setTimeout(cb, 100)), (error) =>
return done error if error?
@redisHydrantClient.publish 'some-uuid', JSON.stringify(message), (error, published) =>
return done error if error?
return done(new Error 'failed to publish') if published == 0
doneTwice()
it 'should emit a message', ->
expectedMetadata =
route: [
fromUuid: 'b'
toUuid: 'a'
type: 'message.sent'
]
expect(@message.metadata).to.deep.equal expectedMetadata
expect(@message.data).to.deep.equal foo:"bar"
| 109443 | _ = require 'lodash'
async = require 'async'
FirehoseWorker = require '../src/firehose-worker'
MeshbluAmqp = require 'meshblu-amqp'
RedisNS = require '@octoblu/redis-ns'
redis = require 'ioredis'
describe 'connect firehose subscription', ->
beforeEach 'create redis connection', (done) ->
rawClient = redis.createClient(dropBufferSupport: true)
@redisClient = new RedisNS 'test:firehose:amqp', rawClient
@redisClient.on 'ready', =>
@redisClient.keys '*', (error, keys) =>
return done error if error?
return done() if _.isEmpty keys
rawClient.del keys..., done
return # nothing
beforeEach 'hydrant', (done) ->
@redisHydrantClient = new RedisNS 'messages', redis.createClient(dropBufferSupport: true)
@redisHydrantClient.on 'ready', done
return # nothing
beforeEach 'run', ->
@worker = new FirehoseWorker
amqpUri: 'amqp://meshblu:judgementday@127.0.0.1'
jobTimeoutSeconds: 1
jobLogRedisUri: 'redis://localhost:6379'
jobLogQueue: 'sample-rate:0.00'
jobLogSampleRate: 0
maxConnections: 10
redisUri: 'redis://localhost:6379'
namespace: 'test:firehose:amqp'
hydrantNamespace: 'messages'
@worker.run (error) =>
throw error if error?
afterEach (done) ->
@worker.stop done
return # nothing
beforeEach 'setup amqp', (done) ->
@client = new MeshbluAmqp uuid: 'some-uuid', token: '<KEY>', hostname: 'localhost'
@client.connect (error) =>
return done error if error?
@client.connectFirehose done
return # nothing
beforeEach 'message', (done) ->
doneTwice = _.after 2, done
message =
metadata:
route: [{toUuid: 'a', fromUuid: 'b', type: 'message.sent'}]
rawData: '{"foo":"bar"}'
@members = []
@subscriptionExists = false
checkList = (callback) =>
@redisClient.lrange 'subscriptions', 0, -1, (error, @members) =>
return callback error if error?
callback()
@client.once 'message', (@message) =>
doneTwice()
async.until (=> _.includes @members, @client.firehoseQueueName), checkList, (error) =>
return done error if error?
async.until (=> @worker.subscriptions?[@client.firehoseQueueName]?), ((cb) => setTimeout(cb, 100)), (error) =>
return done error if error?
@redisHydrantClient.publish 'some-uuid', JSON.stringify(message), (error, published) =>
return done error if error?
return done(new Error 'failed to publish') if published == 0
doneTwice()
it 'should emit a message', ->
expectedMetadata =
route: [
fromUuid: 'b'
toUuid: 'a'
type: 'message.sent'
]
expect(@message.metadata).to.deep.equal expectedMetadata
expect(@message.data).to.deep.equal foo:"bar"
| true | _ = require 'lodash'
async = require 'async'
FirehoseWorker = require '../src/firehose-worker'
MeshbluAmqp = require 'meshblu-amqp'
RedisNS = require '@octoblu/redis-ns'
redis = require 'ioredis'
describe 'connect firehose subscription', ->
beforeEach 'create redis connection', (done) ->
rawClient = redis.createClient(dropBufferSupport: true)
@redisClient = new RedisNS 'test:firehose:amqp', rawClient
@redisClient.on 'ready', =>
@redisClient.keys '*', (error, keys) =>
return done error if error?
return done() if _.isEmpty keys
rawClient.del keys..., done
return # nothing
beforeEach 'hydrant', (done) ->
@redisHydrantClient = new RedisNS 'messages', redis.createClient(dropBufferSupport: true)
@redisHydrantClient.on 'ready', done
return # nothing
beforeEach 'run', ->
@worker = new FirehoseWorker
amqpUri: 'amqp://meshblu:judgementday@127.0.0.1'
jobTimeoutSeconds: 1
jobLogRedisUri: 'redis://localhost:6379'
jobLogQueue: 'sample-rate:0.00'
jobLogSampleRate: 0
maxConnections: 10
redisUri: 'redis://localhost:6379'
namespace: 'test:firehose:amqp'
hydrantNamespace: 'messages'
@worker.run (error) =>
throw error if error?
afterEach (done) ->
@worker.stop done
return # nothing
beforeEach 'setup amqp', (done) ->
@client = new MeshbluAmqp uuid: 'some-uuid', token: 'PI:KEY:<KEY>END_PI', hostname: 'localhost'
@client.connect (error) =>
return done error if error?
@client.connectFirehose done
return # nothing
beforeEach 'message', (done) ->
doneTwice = _.after 2, done
message =
metadata:
route: [{toUuid: 'a', fromUuid: 'b', type: 'message.sent'}]
rawData: '{"foo":"bar"}'
@members = []
@subscriptionExists = false
checkList = (callback) =>
@redisClient.lrange 'subscriptions', 0, -1, (error, @members) =>
return callback error if error?
callback()
@client.once 'message', (@message) =>
doneTwice()
async.until (=> _.includes @members, @client.firehoseQueueName), checkList, (error) =>
return done error if error?
async.until (=> @worker.subscriptions?[@client.firehoseQueueName]?), ((cb) => setTimeout(cb, 100)), (error) =>
return done error if error?
@redisHydrantClient.publish 'some-uuid', JSON.stringify(message), (error, published) =>
return done error if error?
return done(new Error 'failed to publish') if published == 0
doneTwice()
it 'should emit a message', ->
expectedMetadata =
route: [
fromUuid: 'b'
toUuid: 'a'
type: 'message.sent'
]
expect(@message.metadata).to.deep.equal expectedMetadata
expect(@message.data).to.deep.equal foo:"bar"
|
[
{
"context": "ion Simple Pooling Module\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n\n###*\n * 3rd library im",
"end": 96,
"score": 0.9998497366905212,
"start": 89,
"tag": "NAME",
"value": "Bastian"
},
{
"context": " Pooling Module\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n\n###*\n * 3rd library imports\n###\nra",
"end": 108,
"score": 0.6135956048965454,
"start": 98,
"tag": "USERNAME",
"value": "hereandnow"
},
{
"context": "dule\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n\n###*\n * 3rd library imports\n###\nrandomInt =",
"end": 117,
"score": 0.9953022003173828,
"start": 110,
"tag": "NAME",
"value": "Behrens"
}
] | src/simplepool.coffee | valiton/node-simple-pool | 2 | ###*
* @name simple-pool
* @description Simple Pooling Module
* @author Valiton GmbH, Bastian "hereandnow" Behrens
###
###*
* 3rd library imports
###
randomInt = require('random-tools').randomInt
module.exports = class SimplePool
###*
* create a new SimplePool instance
*
* @memberOf global
*
* @constructor
* @this {SimplePool}
###
constructor: (args...) ->
@current = 0
@pool = if args.length is 1 and Array.isArray args[0] then args[0] else args
###*
* add something to this pool
*
* @param {*} obj add anything you want here
* @function global.SimplePool.prototype.add
* @returns {this} the current instance for chaining
###
add: (obj, position) ->
unless position?
@pool.push obj
else
@pool.splice position, 0, obj
this
###*
* remove the given obj from the pool
*
* @param {*} obj which should be removed
* @function global.SimplePool.prototype.remove
* @returns {this} the current instance for chaining
###
remove: (obj) ->
index = @pool.indexOf obj
if index > -1
@pool.splice index, 1
this
###*
* get the next obj from the pool (roundrobin)
*
* @function global.SimplePool.prototype.get
* @returns {*} obj the next obj in the pool
###
get: ->
obj = @pool[@current]
@current += 1
@current = 0 if @current is @pool.length
obj
###*
* get the next obj from the pool (per chance)
*
* @function global.SimplePool.prototype.random
* @returns {*} obj a random obj
###
random: ->
@pool[randomInt @pool.length - 1]
###*
* get all objects from the pool
*
* @function global.SimplePool.prototype.all
* @returns {array} pool the total pool array
###
all: ->
@pool
| 72719 | ###*
* @name simple-pool
* @description Simple Pooling Module
* @author Valiton GmbH, <NAME> "hereandnow" <NAME>
###
###*
* 3rd library imports
###
randomInt = require('random-tools').randomInt
module.exports = class SimplePool
###*
* create a new SimplePool instance
*
* @memberOf global
*
* @constructor
* @this {SimplePool}
###
constructor: (args...) ->
@current = 0
@pool = if args.length is 1 and Array.isArray args[0] then args[0] else args
###*
* add something to this pool
*
* @param {*} obj add anything you want here
* @function global.SimplePool.prototype.add
* @returns {this} the current instance for chaining
###
add: (obj, position) ->
unless position?
@pool.push obj
else
@pool.splice position, 0, obj
this
###*
* remove the given obj from the pool
*
* @param {*} obj which should be removed
* @function global.SimplePool.prototype.remove
* @returns {this} the current instance for chaining
###
remove: (obj) ->
index = @pool.indexOf obj
if index > -1
@pool.splice index, 1
this
###*
* get the next obj from the pool (roundrobin)
*
* @function global.SimplePool.prototype.get
* @returns {*} obj the next obj in the pool
###
get: ->
obj = @pool[@current]
@current += 1
@current = 0 if @current is @pool.length
obj
###*
* get the next obj from the pool (per chance)
*
* @function global.SimplePool.prototype.random
* @returns {*} obj a random obj
###
random: ->
@pool[randomInt @pool.length - 1]
###*
* get all objects from the pool
*
* @function global.SimplePool.prototype.all
* @returns {array} pool the total pool array
###
all: ->
@pool
| true | ###*
* @name simple-pool
* @description Simple Pooling Module
* @author Valiton GmbH, PI:NAME:<NAME>END_PI "hereandnow" PI:NAME:<NAME>END_PI
###
###*
* 3rd library imports
###
randomInt = require('random-tools').randomInt
module.exports = class SimplePool
###*
* create a new SimplePool instance
*
* @memberOf global
*
* @constructor
* @this {SimplePool}
###
constructor: (args...) ->
@current = 0
@pool = if args.length is 1 and Array.isArray args[0] then args[0] else args
###*
* add something to this pool
*
* @param {*} obj add anything you want here
* @function global.SimplePool.prototype.add
* @returns {this} the current instance for chaining
###
add: (obj, position) ->
unless position?
@pool.push obj
else
@pool.splice position, 0, obj
this
###*
* remove the given obj from the pool
*
* @param {*} obj which should be removed
* @function global.SimplePool.prototype.remove
* @returns {this} the current instance for chaining
###
remove: (obj) ->
index = @pool.indexOf obj
if index > -1
@pool.splice index, 1
this
###*
* get the next obj from the pool (roundrobin)
*
* @function global.SimplePool.prototype.get
* @returns {*} obj the next obj in the pool
###
get: ->
obj = @pool[@current]
@current += 1
@current = 0 if @current is @pool.length
obj
###*
* get the next obj from the pool (per chance)
*
* @function global.SimplePool.prototype.random
* @returns {*} obj a random obj
###
random: ->
@pool[randomInt @pool.length - 1]
###*
* get all objects from the pool
*
* @function global.SimplePool.prototype.all
* @returns {array} pool the total pool array
###
all: ->
@pool
|
[
{
"context": " = require(\"stripe\") process.env.STRIPE_TOKEN or \"sk_test_lRsLtNDZ9EBsX2NrFx07H5mO\"\npending_charges = {}\n\nformat_page = (data) ->\n ",
"end": 265,
"score": 0.936449408531189,
"start": 233,
"tag": "KEY",
"value": "sk_test_lRsLtNDZ9EBsX2NrFx07H5mO"
},
{
"context": "lass=\"form-group\">\n <input type=\"text\" name=\"username\" placeholder=\"Username\" class=\"form-control\" />\n ",
"end": 4340,
"score": 0.9965401887893677,
"start": 4332,
"tag": "USERNAME",
"value": "username"
},
{
"context": " <input type=\"text\" name=\"username\" placeholder=\"Username\" class=\"form-control\" />\n </div>\n <div clas",
"end": 4363,
"score": 0.9976281523704529,
"start": 4355,
"tag": "USERNAME",
"value": "Username"
},
{
"context": ">\n <div class=\"form-group\">\n <input type=\"password\" name=\"password\" placeholder=\"Password\" class=\"fo",
"end": 4456,
"score": 0.9957687854766846,
"start": 4448,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "nput type=\"password\" name=\"password\" placeholder=\"Password\" class=\"form-control\" />\n </div>\n <input ty",
"end": 4495,
"score": 0.998992383480072,
"start": 4487,
"tag": "PASSWORD",
"value": "Password"
},
{
"context": "heckout.js\" class=\"stripe-button\"\n data-key=\"pk_test_k280ghlxr7GrqGF9lxBhy1Uj\"\n data-amount=\"#{price}\"\n data-name=\"Ba",
"end": 8571,
"score": 0.9995179176330566,
"start": 8539,
"tag": "KEY",
"value": "pk_test_k280ghlxr7GrqGF9lxBhy1Uj"
},
{
"context": "with your payment! (NOACCUSER)\n Contact support@getbag.io with this token: #{customer}\n \"\"\"\n ",
"end": 11215,
"score": 0.9999101758003235,
"start": 11198,
"tag": "EMAIL",
"value": "support@getbag.io"
},
{
"context": " Hey! Something went wrong with your payment! (NOSAVEUSER)\n Contact support@getbag.io with t",
"end": 12172,
"score": 0.9988560676574707,
"start": 12162,
"tag": "USERNAME",
"value": "NOSAVEUSER"
},
{
"context": "your payment! (NOSAVEUSER)\n Contact support@getbag.io with this token: #{customer}\n \"\"\"\n",
"end": 12215,
"score": 0.999927818775177,
"start": 12198,
"tag": "EMAIL",
"value": "support@getbag.io"
},
{
"context": " If you think this is in error, contact us at support@getbag.io and provide this token: #{consumer}\n \"\"\"\n ",
"end": 13213,
"score": 0.9999258518218994,
"start": 13196,
"tag": "EMAIL",
"value": "support@getbag.io"
}
] | src/account/index.coffee | 1egoman/bag-node | 0 | User = require "../models/user_model"
Foodstuffs = require "../models/foodstuff_model"
Bag = require "../models/bag_model"
auth_ctrl = require "../controllers/auth_controller"
stripe = require("stripe") process.env.STRIPE_TOKEN or "sk_test_lRsLtNDZ9EBsX2NrFx07H5mO"
pending_charges = {}
format_page = (data) ->
"""
<html>
<head>
<title>Manage Bag Account</title>
<link rel="stylesheet" href="//getbag.io/css/index.css">
</head>
<body>
<nav class="navbar navbar-default">
<div>
<div class="container">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header"><a name="top">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</a><a class="navbar-brand" href="//getbag.io">
<img src="//getbag.io/img/bag.svg">
</a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li>
#{if data.indexOf("Login") is -1 then '<a href="/logout">Logout</a>' else ''}
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</div>
</nav>
<div class="container">
#{data}
</div>
</body>
</html>
"""
# basic auth handler
# if a user is in the session, then allow them through, otherwise, redirect to
# the login page.
exports.protect = (req, res, next) ->
if req.session.user
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.redirect "/login?redirect=#{req.url}"
else
req.session.user = user
next()
else
res.redirect "/login?redirect=#{req.url}"
# the basic admin management page. THis is the "hub" that everything else comes
# off of.
exports.manage = (req, res) ->
# make each item read the correct thing.
exec = if req.session.user.plan is 2 then "Update settings for " else "Upgrade to "
pro = if req.session.user.plan is 1
"Update settings for "
else if req.session.user.plan is 2
"Downgrade to "
else
"Upgrade to "
free = if req.session.user.plan is 0 then "You have " else "Downgrade to "
res.send format_page """
<h1>Welcome, #{req.session.user.realname}</h1>
<div class="paid-plans">
<div class="plan plan-0 plan-free">
<div>
<h3>Bag Free</h3>
<p>Food is just a hobby.</p>
<ul>
<li>No Private Items
</li><li>No Additional features
</li><li>Free</li>
</ul>
</div>
<a
class="btn btn-block #{req.session.user.plan is 0 and "current"}"
#{req.session.user.plan isnt 0 and "href='/checkout/free'" or ''}
>
#{free}Bag Free
</a>
</div>
<div class="plan plan-5 plan-pro">
<div>
<h3>Bag Pro</h3>
<p>Grocery shopping has become serious.</p>
<ul>
<li>10 Private Items
</li><li>No Additional features
</li><li>$5.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 1 and "current"}" href="/checkout/pro">
#{pro}Bag Pro
</a>
</div>
<div class="plan plan-10 plan-exec">
<div>
<h3>Bag Exectutive</h3>
<p>You never joke about groceries.</p>
<ul>
<li>Unlimited Private Items
</li><li>Custom prices for items
</li><li>$10.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 2 and "current"}" href="/checkout/exec">
#{exec}Bag Executive
</a>
</div>
</div>
"""
# the login page
exports.login_get = (req, res) ->
res.send format_page """
<form method="POST">
<h1>Login to Bag</h1>
<div class="form-group">
<input type="text" name="username" placeholder="Username" class="form-control" />
</div>
<div class="form-group">
<input type="password" name="password" placeholder="Password" class="form-control" />
</div>
<input type="submit" class="btn btn-primary" value="Login" />
</form>
"""
# The login handler. This route grabs the login info and checks it.
exports.login_post = (req, res) ->
if req.body.username and req.body.password
auth_ctrl.handshake
body: req.body
type: 'ws'
, send: (payload) ->
if payload.err
res.send "Error: #{payload.err}"
else
req.session.user = payload
res.redirect req.query?.redirect or "/manage"
else
res.send "Error: No username or password provided."
# logout handler
exports.logout = (req, res) ->
req.session.user = null
res.redirect "//getbag.io"
# set up a new plan, and upgrade your account.
exports.checkout = (req, res) ->
switch req.params.plan
when "pro", "p", "professional"
price = 500
desc = "Bag Professional ($5.00)"
type = "pro"
when "exec", "e", "executive"
price = 1000
desc = "Bag Executive ($10.00)"
type = "exec"
# downgrade back to a free account.
else
if req.session.user.stripe_id
# delete the stripe customer
stripe.customers.del req.session.user.stripe_id, (err, confirm) ->
if err
res.send "Error deleting stripe user: #{err}"
else
# remove all payment info and subscription stuff.
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't get user: #{err}"
else
user.stripe_id = null
user.plan = 0
user.plan_expire = null
# remove all private foodstuffs
Foodstuffs.remove
user: req.session.user._id
private: true
, (err, foodstuffs) ->
if err
res.send "Couldn't delete private foodstuffs: #{err}"
else
# remove all custom prices
Bag.findOne user: req.session.user._id, (err, bag) ->
if err
res.send "Couldn't retreive user bag: #{err}"
else
for b in bag.contents
b.stores.custom = undefined if "custom" of b.stores
b.store = "" if b.store is "custom"
bag.save (err) ->
console.log bag, err
if err
res.send "Couldn't save bag: #{err}"
else
# save user with new plan
user.save (err) ->
if err
res.send "Couldn't save to database: #{err}"
else
res.send format_page """
Your plan has been cancelled.
You have been downgraded to our free plan.
For all changes to take effect, please restart bag on your phone.
"""
else
res.send "It seems you aren't signed up for any plan right now."
return
# checkout with the payment info specified.
res.send format_page """
<h1>Bag Payment</h1>
<p>
To continue, please click on the "Pay with Card" button below. Once you've clicked 'OK', we'll
try to charge your card for the payment specified. Once we've verified that
the charge was successful, we'll activate your account. If you refresh the page before the charge is accepted,
we will still activate your account, though you won't receive a confirmation.
</p>
<!-- stripe checkout -->
<form action="/checkout_complete" method="POST">
<input type="hidden" name="type" value="#{type}" />
<script
src="https://checkout.stripe.com/checkout.js" class="stripe-button"
data-key="pk_test_k280ghlxr7GrqGF9lxBhy1Uj"
data-amount="#{price}"
data-name="Bag"
data-description="#{desc}"
data-image="//getbag.io/img/bag.svg"
data-locale="auto">
</script>
</form>
"""
# this callback fires when the user finishes checking out with stripe
exports.checkout_complete = (req, res) ->
if req.body.stripeToken and req.body.stripeEmail
# sign user up for the subscription
if req.body.type in ["pro", "exec"]
# delete any old customers with a specified token
# we don't care about any errors, because we are injecting a fake token
# after all. This really should be made better, but for now it is probably
# fine.
stripe.customers.del req.session.user.stripe_id or "something_else", (err, confirm) ->
stripe.customers.create
source: req.body.stripeToken
plan: "bag_#{req.body.type}"
email: req.body.stripeEmail
, (err, customer) ->
if err
res.send "Error creating customer: #{err}"
else
# save customer data to database
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't access database: #{err}"
else
user.stripe_id = customer.id
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
else
res.test_get_customer_id customer.id if res.test_get_customer_id
# wait for the charge to go through...
pending_charges[customer.id] =
req: req
res: res
else
res.send "Invalid type to get - needs to be 'pro' or 'exec'."
else
res.send "No stripe info was sent in the transaction."
# after a card has been used, stripe will respond with a webhook.
exports.stripe_webhook = (req, res) ->
customer = req.body.data?.object?.customer
return res.send "Bad customer." if not customer
switch req.body.type
# a successful card charge. We'll use this to increase the end date of a
# user's subscription.
when "invoice.payment_succeeded"
if customer of pending_charges
# update the users length of payment by a month.
User.findOne stripe_id: customer, (err, user) ->
if err
res.send "Couldn't access user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOACCUSER)
Contact support@getbag.io with this token: #{customer}
"""
else
# set up the plan
user.plan = 0
user.plan = 1 if req.body.data?.object?.subtotal is 500
user.plan = 2 if req.body.data?.object?.subtotal is 1000
# add one more month, in milliseconds
user.plan_expire or= new Date().getTime() # by default, this is the current time.
user.plan_expire += do ->
date = new Date
month_days = new Date(date.getFullYear(), date.getMonth() + 1, 0).getDate()
month_days * 60 * 60 * 24 * 1000
# save the new user token stuff
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOSAVEUSER)
Contact support@getbag.io with this token: #{customer}
"""
else
res.send "Cool, thanks stripe!"
# respond to the pending request
if customer of pending_charges
pending_charges[customer].res.send format_page """
<h2>You have successfully signed up for Bag!</h2>
<p>
You should receive a receipt by email soon. In the meantime, enjoy!
For all features to work properly, you should restart the bag app on your phone.
</p>
"""
else
# uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
when "charge.failed"
if consumer of pending_charges
pending_charges[consumer].res.send """
Your card didn't charge.
If you think this is in error, contact us at support@getbag.io and provide this token: #{consumer}
"""
res.send "Cool, thanks stripe!"
else
1 # uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
else
res.send "Thanks anyway, but we didn't need this event."
| 36766 | User = require "../models/user_model"
Foodstuffs = require "../models/foodstuff_model"
Bag = require "../models/bag_model"
auth_ctrl = require "../controllers/auth_controller"
stripe = require("stripe") process.env.STRIPE_TOKEN or "<KEY>"
pending_charges = {}
format_page = (data) ->
"""
<html>
<head>
<title>Manage Bag Account</title>
<link rel="stylesheet" href="//getbag.io/css/index.css">
</head>
<body>
<nav class="navbar navbar-default">
<div>
<div class="container">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header"><a name="top">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</a><a class="navbar-brand" href="//getbag.io">
<img src="//getbag.io/img/bag.svg">
</a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li>
#{if data.indexOf("Login") is -1 then '<a href="/logout">Logout</a>' else ''}
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</div>
</nav>
<div class="container">
#{data}
</div>
</body>
</html>
"""
# basic auth handler
# if a user is in the session, then allow them through, otherwise, redirect to
# the login page.
exports.protect = (req, res, next) ->
if req.session.user
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.redirect "/login?redirect=#{req.url}"
else
req.session.user = user
next()
else
res.redirect "/login?redirect=#{req.url}"
# the basic admin management page. THis is the "hub" that everything else comes
# off of.
exports.manage = (req, res) ->
# make each item read the correct thing.
exec = if req.session.user.plan is 2 then "Update settings for " else "Upgrade to "
pro = if req.session.user.plan is 1
"Update settings for "
else if req.session.user.plan is 2
"Downgrade to "
else
"Upgrade to "
free = if req.session.user.plan is 0 then "You have " else "Downgrade to "
res.send format_page """
<h1>Welcome, #{req.session.user.realname}</h1>
<div class="paid-plans">
<div class="plan plan-0 plan-free">
<div>
<h3>Bag Free</h3>
<p>Food is just a hobby.</p>
<ul>
<li>No Private Items
</li><li>No Additional features
</li><li>Free</li>
</ul>
</div>
<a
class="btn btn-block #{req.session.user.plan is 0 and "current"}"
#{req.session.user.plan isnt 0 and "href='/checkout/free'" or ''}
>
#{free}Bag Free
</a>
</div>
<div class="plan plan-5 plan-pro">
<div>
<h3>Bag Pro</h3>
<p>Grocery shopping has become serious.</p>
<ul>
<li>10 Private Items
</li><li>No Additional features
</li><li>$5.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 1 and "current"}" href="/checkout/pro">
#{pro}Bag Pro
</a>
</div>
<div class="plan plan-10 plan-exec">
<div>
<h3>Bag Exectutive</h3>
<p>You never joke about groceries.</p>
<ul>
<li>Unlimited Private Items
</li><li>Custom prices for items
</li><li>$10.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 2 and "current"}" href="/checkout/exec">
#{exec}Bag Executive
</a>
</div>
</div>
"""
# the login page
exports.login_get = (req, res) ->
res.send format_page """
<form method="POST">
<h1>Login to Bag</h1>
<div class="form-group">
<input type="text" name="username" placeholder="Username" class="form-control" />
</div>
<div class="form-group">
<input type="<PASSWORD>" name="password" placeholder="<PASSWORD>" class="form-control" />
</div>
<input type="submit" class="btn btn-primary" value="Login" />
</form>
"""
# The login handler. This route grabs the login info and checks it.
exports.login_post = (req, res) ->
if req.body.username and req.body.password
auth_ctrl.handshake
body: req.body
type: 'ws'
, send: (payload) ->
if payload.err
res.send "Error: #{payload.err}"
else
req.session.user = payload
res.redirect req.query?.redirect or "/manage"
else
res.send "Error: No username or password provided."
# logout handler
exports.logout = (req, res) ->
req.session.user = null
res.redirect "//getbag.io"
# set up a new plan, and upgrade your account.
exports.checkout = (req, res) ->
switch req.params.plan
when "pro", "p", "professional"
price = 500
desc = "Bag Professional ($5.00)"
type = "pro"
when "exec", "e", "executive"
price = 1000
desc = "Bag Executive ($10.00)"
type = "exec"
# downgrade back to a free account.
else
if req.session.user.stripe_id
# delete the stripe customer
stripe.customers.del req.session.user.stripe_id, (err, confirm) ->
if err
res.send "Error deleting stripe user: #{err}"
else
# remove all payment info and subscription stuff.
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't get user: #{err}"
else
user.stripe_id = null
user.plan = 0
user.plan_expire = null
# remove all private foodstuffs
Foodstuffs.remove
user: req.session.user._id
private: true
, (err, foodstuffs) ->
if err
res.send "Couldn't delete private foodstuffs: #{err}"
else
# remove all custom prices
Bag.findOne user: req.session.user._id, (err, bag) ->
if err
res.send "Couldn't retreive user bag: #{err}"
else
for b in bag.contents
b.stores.custom = undefined if "custom" of b.stores
b.store = "" if b.store is "custom"
bag.save (err) ->
console.log bag, err
if err
res.send "Couldn't save bag: #{err}"
else
# save user with new plan
user.save (err) ->
if err
res.send "Couldn't save to database: #{err}"
else
res.send format_page """
Your plan has been cancelled.
You have been downgraded to our free plan.
For all changes to take effect, please restart bag on your phone.
"""
else
res.send "It seems you aren't signed up for any plan right now."
return
# checkout with the payment info specified.
res.send format_page """
<h1>Bag Payment</h1>
<p>
To continue, please click on the "Pay with Card" button below. Once you've clicked 'OK', we'll
try to charge your card for the payment specified. Once we've verified that
the charge was successful, we'll activate your account. If you refresh the page before the charge is accepted,
we will still activate your account, though you won't receive a confirmation.
</p>
<!-- stripe checkout -->
<form action="/checkout_complete" method="POST">
<input type="hidden" name="type" value="#{type}" />
<script
src="https://checkout.stripe.com/checkout.js" class="stripe-button"
data-key="<KEY>"
data-amount="#{price}"
data-name="Bag"
data-description="#{desc}"
data-image="//getbag.io/img/bag.svg"
data-locale="auto">
</script>
</form>
"""
# this callback fires when the user finishes checking out with stripe
exports.checkout_complete = (req, res) ->
if req.body.stripeToken and req.body.stripeEmail
# sign user up for the subscription
if req.body.type in ["pro", "exec"]
# delete any old customers with a specified token
# we don't care about any errors, because we are injecting a fake token
# after all. This really should be made better, but for now it is probably
# fine.
stripe.customers.del req.session.user.stripe_id or "something_else", (err, confirm) ->
stripe.customers.create
source: req.body.stripeToken
plan: "bag_#{req.body.type}"
email: req.body.stripeEmail
, (err, customer) ->
if err
res.send "Error creating customer: #{err}"
else
# save customer data to database
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't access database: #{err}"
else
user.stripe_id = customer.id
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
else
res.test_get_customer_id customer.id if res.test_get_customer_id
# wait for the charge to go through...
pending_charges[customer.id] =
req: req
res: res
else
res.send "Invalid type to get - needs to be 'pro' or 'exec'."
else
res.send "No stripe info was sent in the transaction."
# after a card has been used, stripe will respond with a webhook.
exports.stripe_webhook = (req, res) ->
customer = req.body.data?.object?.customer
return res.send "Bad customer." if not customer
switch req.body.type
# a successful card charge. We'll use this to increase the end date of a
# user's subscription.
when "invoice.payment_succeeded"
if customer of pending_charges
# update the users length of payment by a month.
User.findOne stripe_id: customer, (err, user) ->
if err
res.send "Couldn't access user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOACCUSER)
Contact <EMAIL> with this token: #{customer}
"""
else
# set up the plan
user.plan = 0
user.plan = 1 if req.body.data?.object?.subtotal is 500
user.plan = 2 if req.body.data?.object?.subtotal is 1000
# add one more month, in milliseconds
user.plan_expire or= new Date().getTime() # by default, this is the current time.
user.plan_expire += do ->
date = new Date
month_days = new Date(date.getFullYear(), date.getMonth() + 1, 0).getDate()
month_days * 60 * 60 * 24 * 1000
# save the new user token stuff
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOSAVEUSER)
Contact <EMAIL> with this token: #{customer}
"""
else
res.send "Cool, thanks stripe!"
# respond to the pending request
if customer of pending_charges
pending_charges[customer].res.send format_page """
<h2>You have successfully signed up for Bag!</h2>
<p>
You should receive a receipt by email soon. In the meantime, enjoy!
For all features to work properly, you should restart the bag app on your phone.
</p>
"""
else
# uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
when "charge.failed"
if consumer of pending_charges
pending_charges[consumer].res.send """
Your card didn't charge.
If you think this is in error, contact us at <EMAIL> and provide this token: #{consumer}
"""
res.send "Cool, thanks stripe!"
else
1 # uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
else
res.send "Thanks anyway, but we didn't need this event."
| true | User = require "../models/user_model"
Foodstuffs = require "../models/foodstuff_model"
Bag = require "../models/bag_model"
auth_ctrl = require "../controllers/auth_controller"
stripe = require("stripe") process.env.STRIPE_TOKEN or "PI:KEY:<KEY>END_PI"
pending_charges = {}
format_page = (data) ->
"""
<html>
<head>
<title>Manage Bag Account</title>
<link rel="stylesheet" href="//getbag.io/css/index.css">
</head>
<body>
<nav class="navbar navbar-default">
<div>
<div class="container">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header"><a name="top">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</a><a class="navbar-brand" href="//getbag.io">
<img src="//getbag.io/img/bag.svg">
</a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li>
#{if data.indexOf("Login") is -1 then '<a href="/logout">Logout</a>' else ''}
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</div>
</nav>
<div class="container">
#{data}
</div>
</body>
</html>
"""
# basic auth handler
# if a user is in the session, then allow them through, otherwise, redirect to
# the login page.
exports.protect = (req, res, next) ->
if req.session.user
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.redirect "/login?redirect=#{req.url}"
else
req.session.user = user
next()
else
res.redirect "/login?redirect=#{req.url}"
# the basic admin management page. THis is the "hub" that everything else comes
# off of.
exports.manage = (req, res) ->
# make each item read the correct thing.
exec = if req.session.user.plan is 2 then "Update settings for " else "Upgrade to "
pro = if req.session.user.plan is 1
"Update settings for "
else if req.session.user.plan is 2
"Downgrade to "
else
"Upgrade to "
free = if req.session.user.plan is 0 then "You have " else "Downgrade to "
res.send format_page """
<h1>Welcome, #{req.session.user.realname}</h1>
<div class="paid-plans">
<div class="plan plan-0 plan-free">
<div>
<h3>Bag Free</h3>
<p>Food is just a hobby.</p>
<ul>
<li>No Private Items
</li><li>No Additional features
</li><li>Free</li>
</ul>
</div>
<a
class="btn btn-block #{req.session.user.plan is 0 and "current"}"
#{req.session.user.plan isnt 0 and "href='/checkout/free'" or ''}
>
#{free}Bag Free
</a>
</div>
<div class="plan plan-5 plan-pro">
<div>
<h3>Bag Pro</h3>
<p>Grocery shopping has become serious.</p>
<ul>
<li>10 Private Items
</li><li>No Additional features
</li><li>$5.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 1 and "current"}" href="/checkout/pro">
#{pro}Bag Pro
</a>
</div>
<div class="plan plan-10 plan-exec">
<div>
<h3>Bag Exectutive</h3>
<p>You never joke about groceries.</p>
<ul>
<li>Unlimited Private Items
</li><li>Custom prices for items
</li><li>$10.00/month</li>
</ul>
</div>
<a class="btn btn-block #{req.session.user.plan is 2 and "current"}" href="/checkout/exec">
#{exec}Bag Executive
</a>
</div>
</div>
"""
# the login page
exports.login_get = (req, res) ->
res.send format_page """
<form method="POST">
<h1>Login to Bag</h1>
<div class="form-group">
<input type="text" name="username" placeholder="Username" class="form-control" />
</div>
<div class="form-group">
<input type="PI:PASSWORD:<PASSWORD>END_PI" name="password" placeholder="PI:PASSWORD:<PASSWORD>END_PI" class="form-control" />
</div>
<input type="submit" class="btn btn-primary" value="Login" />
</form>
"""
# The login handler. This route grabs the login info and checks it.
exports.login_post = (req, res) ->
if req.body.username and req.body.password
auth_ctrl.handshake
body: req.body
type: 'ws'
, send: (payload) ->
if payload.err
res.send "Error: #{payload.err}"
else
req.session.user = payload
res.redirect req.query?.redirect or "/manage"
else
res.send "Error: No username or password provided."
# logout handler
exports.logout = (req, res) ->
req.session.user = null
res.redirect "//getbag.io"
# set up a new plan, and upgrade your account.
exports.checkout = (req, res) ->
switch req.params.plan
when "pro", "p", "professional"
price = 500
desc = "Bag Professional ($5.00)"
type = "pro"
when "exec", "e", "executive"
price = 1000
desc = "Bag Executive ($10.00)"
type = "exec"
# downgrade back to a free account.
else
if req.session.user.stripe_id
# delete the stripe customer
stripe.customers.del req.session.user.stripe_id, (err, confirm) ->
if err
res.send "Error deleting stripe user: #{err}"
else
# remove all payment info and subscription stuff.
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't get user: #{err}"
else
user.stripe_id = null
user.plan = 0
user.plan_expire = null
# remove all private foodstuffs
Foodstuffs.remove
user: req.session.user._id
private: true
, (err, foodstuffs) ->
if err
res.send "Couldn't delete private foodstuffs: #{err}"
else
# remove all custom prices
Bag.findOne user: req.session.user._id, (err, bag) ->
if err
res.send "Couldn't retreive user bag: #{err}"
else
for b in bag.contents
b.stores.custom = undefined if "custom" of b.stores
b.store = "" if b.store is "custom"
bag.save (err) ->
console.log bag, err
if err
res.send "Couldn't save bag: #{err}"
else
# save user with new plan
user.save (err) ->
if err
res.send "Couldn't save to database: #{err}"
else
res.send format_page """
Your plan has been cancelled.
You have been downgraded to our free plan.
For all changes to take effect, please restart bag on your phone.
"""
else
res.send "It seems you aren't signed up for any plan right now."
return
# checkout with the payment info specified.
res.send format_page """
<h1>Bag Payment</h1>
<p>
To continue, please click on the "Pay with Card" button below. Once you've clicked 'OK', we'll
try to charge your card for the payment specified. Once we've verified that
the charge was successful, we'll activate your account. If you refresh the page before the charge is accepted,
we will still activate your account, though you won't receive a confirmation.
</p>
<!-- stripe checkout -->
<form action="/checkout_complete" method="POST">
<input type="hidden" name="type" value="#{type}" />
<script
src="https://checkout.stripe.com/checkout.js" class="stripe-button"
data-key="PI:KEY:<KEY>END_PI"
data-amount="#{price}"
data-name="Bag"
data-description="#{desc}"
data-image="//getbag.io/img/bag.svg"
data-locale="auto">
</script>
</form>
"""
# this callback fires when the user finishes checking out with stripe
exports.checkout_complete = (req, res) ->
if req.body.stripeToken and req.body.stripeEmail
# sign user up for the subscription
if req.body.type in ["pro", "exec"]
# delete any old customers with a specified token
# we don't care about any errors, because we are injecting a fake token
# after all. This really should be made better, but for now it is probably
# fine.
stripe.customers.del req.session.user.stripe_id or "something_else", (err, confirm) ->
stripe.customers.create
source: req.body.stripeToken
plan: "bag_#{req.body.type}"
email: req.body.stripeEmail
, (err, customer) ->
if err
res.send "Error creating customer: #{err}"
else
# save customer data to database
User.findOne _id: req.session.user._id, (err, user) ->
if err
res.send "Couldn't access database: #{err}"
else
user.stripe_id = customer.id
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
else
res.test_get_customer_id customer.id if res.test_get_customer_id
# wait for the charge to go through...
pending_charges[customer.id] =
req: req
res: res
else
res.send "Invalid type to get - needs to be 'pro' or 'exec'."
else
res.send "No stripe info was sent in the transaction."
# after a card has been used, stripe will respond with a webhook.
exports.stripe_webhook = (req, res) ->
customer = req.body.data?.object?.customer
return res.send "Bad customer." if not customer
switch req.body.type
# a successful card charge. We'll use this to increase the end date of a
# user's subscription.
when "invoice.payment_succeeded"
if customer of pending_charges
# update the users length of payment by a month.
User.findOne stripe_id: customer, (err, user) ->
if err
res.send "Couldn't access user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOACCUSER)
Contact PI:EMAIL:<EMAIL>END_PI with this token: #{customer}
"""
else
# set up the plan
user.plan = 0
user.plan = 1 if req.body.data?.object?.subtotal is 500
user.plan = 2 if req.body.data?.object?.subtotal is 1000
# add one more month, in milliseconds
user.plan_expire or= new Date().getTime() # by default, this is the current time.
user.plan_expire += do ->
date = new Date
month_days = new Date(date.getFullYear(), date.getMonth() + 1, 0).getDate()
month_days * 60 * 60 * 24 * 1000
# save the new user token stuff
user.save (err) ->
if err
res.send "Couldn't save user: #{err}"
# uhoh - the user should contact us for help.
pending_charges[customer].res.send format_page """
Hey! Something went wrong with your payment! (NOSAVEUSER)
Contact PI:EMAIL:<EMAIL>END_PI with this token: #{customer}
"""
else
res.send "Cool, thanks stripe!"
# respond to the pending request
if customer of pending_charges
pending_charges[customer].res.send format_page """
<h2>You have successfully signed up for Bag!</h2>
<p>
You should receive a receipt by email soon. In the meantime, enjoy!
For all features to work properly, you should restart the bag app on your phone.
</p>
"""
else
# uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
when "charge.failed"
if consumer of pending_charges
pending_charges[consumer].res.send """
Your card didn't charge.
If you think this is in error, contact us at PI:EMAIL:<EMAIL>END_PI and provide this token: #{consumer}
"""
res.send "Cool, thanks stripe!"
else
1 # uhh, what??? That card was never used????
res.send "Uh, that card was never used. What are you talking about stripe???"
else
res.send "Thanks anyway, but we didn't need this event."
|
[
{
"context": "er: 's3')\n (-> project.configure(access_key: 1234, secret_key: 1234)).should.not.throw()\n proj",
"end": 2297,
"score": 0.990706741809845,
"start": 2293,
"tag": "KEY",
"value": "1234"
},
{
"context": "> project.configure(access_key: 1234, secret_key: 1234)).should.not.throw()\n project.config.should",
"end": 2314,
"score": 0.9285407662391663,
"start": 2311,
"tag": "KEY",
"value": "123"
},
{
"context": "ject.config.should.deep.equal\n access_key: 1234\n secret_key: 1234\n ignore: ['**/shi",
"end": 2400,
"score": 0.9951822757720947,
"start": 2396,
"tag": "KEY",
"value": "1234"
},
{
"context": "qual\n access_key: 1234\n secret_key: 1234\n ignore: ['**/ship*.conf']\n\n it 'should",
"end": 2425,
"score": 0.9816582202911377,
"start": 2421,
"tag": "KEY",
"value": "1234"
},
{
"context": " (-> project.configure(wow: 1234, secret_key: 1234))\n .should.throw('you must specify these",
"end": 2644,
"score": 0.5836938619613647,
"start": 2643,
"tag": "KEY",
"value": "2"
},
{
"context": "ployer: 's3')\n project.configure(access_key: 1234, secret_key: 1234)\n project.config.ignore.sh",
"end": 2907,
"score": 0.984456479549408,
"start": 2903,
"tag": "KEY",
"value": "1234"
},
{
"context": " project.configure(access_key: 1234, secret_key: 1234)\n project.config.ignore.should.include '**/s",
"end": 2925,
"score": 0.9264705181121826,
"start": 2921,
"tag": "KEY",
"value": "1234"
},
{
"context": "project.configure(access_key: 'foo', secret_key: 'bar')\n shipfile = path.join(process.cwd(), 'ship",
"end": 3845,
"score": 0.9874736070632935,
"start": 3842,
"tag": "KEY",
"value": "bar"
},
{
"context": "('foo')\n res.s3.secret_key.should.equal('bar')\n .then(-> fs.unlinkSync(shipfile))\n ",
"end": 4135,
"score": 0.6437748074531555,
"start": 4132,
"tag": "KEY",
"value": "bar"
},
{
"context": "project.configure(access_key: 'foo', secret_key: 'bar')\n shipfile = path.join(__dirname, '../ship.",
"end": 4401,
"score": 0.9758339524269104,
"start": 4398,
"tag": "KEY",
"value": "bar"
},
{
"context": "('foo')\n res.s3.secret_key.should.equal('bar')\n .then(-> fs.unlinkSync(shipfile))\n ",
"end": 4698,
"score": 0.8902710676193237,
"start": 4695,
"tag": "KEY",
"value": "bar"
}
] | test/index.coffee | carrot/ship | 151 | path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
nodefn = require 'when/node'
describe 'api', ->
describe 'constructor', ->
it 'should construct a new ship instance', ->
(-> new Ship(root: __dirname, deployer: 's3')).should.not.throw()
it 'should error if passed an invalid deployer', ->
(-> new Ship(root: __dirname, deployer: 'wow'))
.should.throw('wow is not a valid deployer')
it 'should error if passed a nonexistant path to deploy', ->
(-> new Ship(root: 'wow', deployer: 's3')).should.throw()
it 'should correctly format the shipfile with an environment passed', ->
project = new Ship(root: __dirname, deployer: 'nowhere', env: 'staging')
path.basename(project.shipfile).should.equal('ship.staging.conf')
it 'should find the shipfile in a custom conf directory', ->
p = path.join(_path, 'api', 'custom_conf_path')
conf_path = path.join(p, 'conf')
project = new Ship(root: p, deployer: 'nowhere', conf: conf_path)
project.shipfile.should.equal(path.join(conf_path, 'ship.conf'))
it 'should look for a shipfile in cwd if not present in root', ->
cwd = process.cwd()
test_cwd = path.join(_path, 'api', 'cwd')
dir = path.join(test_cwd, 'no_ship_conf')
process.chdir(test_cwd)
project = new Ship(root: dir, deployer: 'nowhere')
project.shipfile.should.equal(path.join(test_cwd, 'ship.conf'))
process.chdir(cwd)
describe 'is_configured', ->
it 'should not be configured if no @config or shipfile', ->
project = new Ship(root: __dirname, deployer: 's3')
project.is_configured().should.be.false
it 'should be configured if @config is defined', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config = {}
project.is_configured().should.be.true
it 'should be configured if a shipfile is present at root', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 's3')
project.is_configured().should.be.true
describe 'configure', ->
it 'should correctly configure a deployer with passed in data', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(access_key: 1234, secret_key: 1234)).should.not.throw()
project.config.should.deep.equal
access_key: 1234
secret_key: 1234
ignore: ['**/ship*.conf']
it 'should error if passed in data does not match requirements', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(wow: 1234, secret_key: 1234))
.should.throw('you must specify these keys: access_key secret_key')
should.not.exist(project.config)
it 'should always ignore shipfiles', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 1234, secret_key: 1234)
project.config.ignore.should.include '**/ship*.conf'
it 'should not error if the deployer has no config requirements'
describe 'config_prompt', ->
it 'should prompt the user to enter config info via command line', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config_prompt()
.progress (prompt) ->
prompt.rl.emit("line", "1")
prompt.rl.emit("line", "2")
.tap (res) -> res.should.deep.equal(access_key: '1', secret_key: '2')
.tap -> project.config.should.deep.equal(access_key: '1', secret_key: '2')
.should.be.fulfilled
it 'should not activate the prompt if deployer has no config requirements'
describe 'write_config', ->
it 'should write a shipfile with the config info to the cwd', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: 'bar')
shipfile = path.join(process.cwd(), 'ship.conf')
project.write_config()
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('bar')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should write to an alternate path if an override is provided', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: 'bar')
shipfile = path.join(__dirname, '../ship.conf')
project.write_config(shipfile)
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('bar')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should error if instance has not been configured', ->
project = new Ship(root: __dirname, deployer: 's3')
project.write_config()
.should.be.rejectedWith('deployer has not yet been configured')
describe 'deploy', ->
it 'should load in root/shipfile.conf as config if present', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.tap ->
project.config.should.deep.equal
nothing: 'wow'
ignore: ['**/ship*.conf']
.should.be.fulfilled
it 'should just deploy if already configured', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.configure(nothing: 'foo')
project.deploy().should.be.fulfilled
it 'should error if not configured and no shipfile present', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must configure the deployer')
it "should error if shipfile keys don't match the deployer's", ->
dir = path.join(_path, 'api/incorrect_config')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must specify these keys: nothing')
it 'should use the correct shipfile given an environment', ->
dir = path.join(_path, 'api/staging_env')
project = new Ship(root: dir, deployer: 'nowhere', env: 'staging')
project.deploy().should.be.fulfilled
it 'should not error if not configured and deployer has no config requirements'
| 215243 | path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
nodefn = require 'when/node'
describe 'api', ->
describe 'constructor', ->
it 'should construct a new ship instance', ->
(-> new Ship(root: __dirname, deployer: 's3')).should.not.throw()
it 'should error if passed an invalid deployer', ->
(-> new Ship(root: __dirname, deployer: 'wow'))
.should.throw('wow is not a valid deployer')
it 'should error if passed a nonexistant path to deploy', ->
(-> new Ship(root: 'wow', deployer: 's3')).should.throw()
it 'should correctly format the shipfile with an environment passed', ->
project = new Ship(root: __dirname, deployer: 'nowhere', env: 'staging')
path.basename(project.shipfile).should.equal('ship.staging.conf')
it 'should find the shipfile in a custom conf directory', ->
p = path.join(_path, 'api', 'custom_conf_path')
conf_path = path.join(p, 'conf')
project = new Ship(root: p, deployer: 'nowhere', conf: conf_path)
project.shipfile.should.equal(path.join(conf_path, 'ship.conf'))
it 'should look for a shipfile in cwd if not present in root', ->
cwd = process.cwd()
test_cwd = path.join(_path, 'api', 'cwd')
dir = path.join(test_cwd, 'no_ship_conf')
process.chdir(test_cwd)
project = new Ship(root: dir, deployer: 'nowhere')
project.shipfile.should.equal(path.join(test_cwd, 'ship.conf'))
process.chdir(cwd)
describe 'is_configured', ->
it 'should not be configured if no @config or shipfile', ->
project = new Ship(root: __dirname, deployer: 's3')
project.is_configured().should.be.false
it 'should be configured if @config is defined', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config = {}
project.is_configured().should.be.true
it 'should be configured if a shipfile is present at root', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 's3')
project.is_configured().should.be.true
describe 'configure', ->
it 'should correctly configure a deployer with passed in data', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(access_key: <KEY>, secret_key: <KEY>4)).should.not.throw()
project.config.should.deep.equal
access_key: <KEY>
secret_key: <KEY>
ignore: ['**/ship*.conf']
it 'should error if passed in data does not match requirements', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(wow: 1234, secret_key: 1<KEY>34))
.should.throw('you must specify these keys: access_key secret_key')
should.not.exist(project.config)
it 'should always ignore shipfiles', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: <KEY>, secret_key: <KEY>)
project.config.ignore.should.include '**/ship*.conf'
it 'should not error if the deployer has no config requirements'
describe 'config_prompt', ->
it 'should prompt the user to enter config info via command line', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config_prompt()
.progress (prompt) ->
prompt.rl.emit("line", "1")
prompt.rl.emit("line", "2")
.tap (res) -> res.should.deep.equal(access_key: '1', secret_key: '2')
.tap -> project.config.should.deep.equal(access_key: '1', secret_key: '2')
.should.be.fulfilled
it 'should not activate the prompt if deployer has no config requirements'
describe 'write_config', ->
it 'should write a shipfile with the config info to the cwd', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: '<KEY>')
shipfile = path.join(process.cwd(), 'ship.conf')
project.write_config()
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('<KEY>')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should write to an alternate path if an override is provided', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: '<KEY>')
shipfile = path.join(__dirname, '../ship.conf')
project.write_config(shipfile)
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('<KEY>')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should error if instance has not been configured', ->
project = new Ship(root: __dirname, deployer: 's3')
project.write_config()
.should.be.rejectedWith('deployer has not yet been configured')
describe 'deploy', ->
it 'should load in root/shipfile.conf as config if present', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.tap ->
project.config.should.deep.equal
nothing: 'wow'
ignore: ['**/ship*.conf']
.should.be.fulfilled
it 'should just deploy if already configured', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.configure(nothing: 'foo')
project.deploy().should.be.fulfilled
it 'should error if not configured and no shipfile present', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must configure the deployer')
it "should error if shipfile keys don't match the deployer's", ->
dir = path.join(_path, 'api/incorrect_config')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must specify these keys: nothing')
it 'should use the correct shipfile given an environment', ->
dir = path.join(_path, 'api/staging_env')
project = new Ship(root: dir, deployer: 'nowhere', env: 'staging')
project.deploy().should.be.fulfilled
it 'should not error if not configured and deployer has no config requirements'
| true | path = require 'path'
fs = require 'fs'
yaml = require 'js-yaml'
nodefn = require 'when/node'
describe 'api', ->
describe 'constructor', ->
it 'should construct a new ship instance', ->
(-> new Ship(root: __dirname, deployer: 's3')).should.not.throw()
it 'should error if passed an invalid deployer', ->
(-> new Ship(root: __dirname, deployer: 'wow'))
.should.throw('wow is not a valid deployer')
it 'should error if passed a nonexistant path to deploy', ->
(-> new Ship(root: 'wow', deployer: 's3')).should.throw()
it 'should correctly format the shipfile with an environment passed', ->
project = new Ship(root: __dirname, deployer: 'nowhere', env: 'staging')
path.basename(project.shipfile).should.equal('ship.staging.conf')
it 'should find the shipfile in a custom conf directory', ->
p = path.join(_path, 'api', 'custom_conf_path')
conf_path = path.join(p, 'conf')
project = new Ship(root: p, deployer: 'nowhere', conf: conf_path)
project.shipfile.should.equal(path.join(conf_path, 'ship.conf'))
it 'should look for a shipfile in cwd if not present in root', ->
cwd = process.cwd()
test_cwd = path.join(_path, 'api', 'cwd')
dir = path.join(test_cwd, 'no_ship_conf')
process.chdir(test_cwd)
project = new Ship(root: dir, deployer: 'nowhere')
project.shipfile.should.equal(path.join(test_cwd, 'ship.conf'))
process.chdir(cwd)
describe 'is_configured', ->
it 'should not be configured if no @config or shipfile', ->
project = new Ship(root: __dirname, deployer: 's3')
project.is_configured().should.be.false
it 'should be configured if @config is defined', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config = {}
project.is_configured().should.be.true
it 'should be configured if a shipfile is present at root', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 's3')
project.is_configured().should.be.true
describe 'configure', ->
it 'should correctly configure a deployer with passed in data', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(access_key: PI:KEY:<KEY>END_PI, secret_key: PI:KEY:<KEY>END_PI4)).should.not.throw()
project.config.should.deep.equal
access_key: PI:KEY:<KEY>END_PI
secret_key: PI:KEY:<KEY>END_PI
ignore: ['**/ship*.conf']
it 'should error if passed in data does not match requirements', ->
project = new Ship(root: __dirname, deployer: 's3')
(-> project.configure(wow: 1234, secret_key: 1PI:KEY:<KEY>END_PI34))
.should.throw('you must specify these keys: access_key secret_key')
should.not.exist(project.config)
it 'should always ignore shipfiles', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: PI:KEY:<KEY>END_PI, secret_key: PI:KEY:<KEY>END_PI)
project.config.ignore.should.include '**/ship*.conf'
it 'should not error if the deployer has no config requirements'
describe 'config_prompt', ->
it 'should prompt the user to enter config info via command line', ->
project = new Ship(root: __dirname, deployer: 's3')
project.config_prompt()
.progress (prompt) ->
prompt.rl.emit("line", "1")
prompt.rl.emit("line", "2")
.tap (res) -> res.should.deep.equal(access_key: '1', secret_key: '2')
.tap -> project.config.should.deep.equal(access_key: '1', secret_key: '2')
.should.be.fulfilled
it 'should not activate the prompt if deployer has no config requirements'
describe 'write_config', ->
it 'should write a shipfile with the config info to the cwd', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: 'PI:KEY:<KEY>END_PI')
shipfile = path.join(process.cwd(), 'ship.conf')
project.write_config()
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('PI:KEY:<KEY>END_PI')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should write to an alternate path if an override is provided', ->
project = new Ship(root: __dirname, deployer: 's3')
project.configure(access_key: 'foo', secret_key: 'PI:KEY:<KEY>END_PI')
shipfile = path.join(__dirname, '../ship.conf')
project.write_config(shipfile)
.then(nodefn.lift(fs.readFile, shipfile, 'utf8'))
.then(yaml.safeLoad)
.tap (res) ->
res.s3.access_key.should.equal('foo')
res.s3.secret_key.should.equal('PI:KEY:<KEY>END_PI')
.then(-> fs.unlinkSync(shipfile))
.should.be.fulfilled
it 'should error if instance has not been configured', ->
project = new Ship(root: __dirname, deployer: 's3')
project.write_config()
.should.be.rejectedWith('deployer has not yet been configured')
describe 'deploy', ->
it 'should load in root/shipfile.conf as config if present', ->
dir = path.join(_path, 'api/one_deployer')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.tap ->
project.config.should.deep.equal
nothing: 'wow'
ignore: ['**/ship*.conf']
.should.be.fulfilled
it 'should just deploy if already configured', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.configure(nothing: 'foo')
project.deploy().should.be.fulfilled
it 'should error if not configured and no shipfile present', ->
project = new Ship(root: __dirname, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must configure the deployer')
it "should error if shipfile keys don't match the deployer's", ->
dir = path.join(_path, 'api/incorrect_config')
project = new Ship(root: dir, deployer: 'nowhere')
project.deploy()
.should.be.rejectedWith('you must specify these keys: nothing')
it 'should use the correct shipfile given an environment', ->
dir = path.join(_path, 'api/staging_env')
project = new Ship(root: dir, deployer: 'nowhere', env: 'staging')
project.deploy().should.be.fulfilled
it 'should not error if not configured and deployer has no config requirements'
|
[
{
"context": "fileoverview Tests for no-await-in-loop.\n# @author Nat Mote (nmote)\n###\n\n'use strict'\n\nrule = require '../../",
"end": 67,
"score": 0.9998597502708435,
"start": 59,
"tag": "NAME",
"value": "Nat Mote"
},
{
"context": "w Tests for no-await-in-loop.\n# @author Nat Mote (nmote)\n###\n\n'use strict'\n\nrule = require '../../rules/n",
"end": 74,
"score": 0.9996405839920044,
"start": 69,
"tag": "USERNAME",
"value": "nmote"
}
] | src/tests/rules/no-await-in-loop.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-await-in-loop.
# @author Nat Mote (nmote)
###
'use strict'
rule = require '../../rules/no-await-in-loop'
{RuleTester} = require 'eslint'
path = require 'path'
error = messageId: 'unexpectedAwait', type: 'AwaitExpression'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-await-in-loop', rule,
valid: [
'''
->
await bar
'''
'''
->
for bar of await baz
;
'''
'''
->
for bar from await baz
;
'''
'''
->
for await bar from await baz
;
'''
# While loops
'''
->
loop
foo = ->
await bar
'''
# Blocked by a function expression
'''
->
while true
y = -> await bar
'''
# Blocked by a class method
'''
->
while yes
class Foo
foo: -> await bar
'''
# Asynchronous iteration intentionally
'''
->
for await x from xs
await f x
'''
]
invalid: [
# While loops
code: '''
->
while baz
await bar
'''
errors: [error]
,
code: '''
->
while await foo()
;
'''
errors: [error]
,
code: '''
->
while baz
for await x from xs
;
'''
errors: [{...error, type: 'For'}]
,
# For of loops
code: '''
->
for bar from baz
await bar
'''
errors: [error]
,
code: '''
->
for bar of baz
await bar
'''
errors: [error]
,
# For in loops
code: '''
->
for bar in baz
await bar
'''
errors: [error]
,
# Deep in a loop body
code: '''
->
while yes
if bar
foo await bar
'''
errors: [error]
,
# Deep in a loop condition
code: '''
->
while xyz or 5 > await x
;
'''
errors: [error]
,
# In a nested loop of for-await-of
code: '''
->
for await x from xs
while 1
await f x
'''
errors: [error]
]
| 200853 | ###*
# @fileoverview Tests for no-await-in-loop.
# @author <NAME> (nmote)
###
'use strict'
rule = require '../../rules/no-await-in-loop'
{RuleTester} = require 'eslint'
path = require 'path'
error = messageId: 'unexpectedAwait', type: 'AwaitExpression'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-await-in-loop', rule,
valid: [
'''
->
await bar
'''
'''
->
for bar of await baz
;
'''
'''
->
for bar from await baz
;
'''
'''
->
for await bar from await baz
;
'''
# While loops
'''
->
loop
foo = ->
await bar
'''
# Blocked by a function expression
'''
->
while true
y = -> await bar
'''
# Blocked by a class method
'''
->
while yes
class Foo
foo: -> await bar
'''
# Asynchronous iteration intentionally
'''
->
for await x from xs
await f x
'''
]
invalid: [
# While loops
code: '''
->
while baz
await bar
'''
errors: [error]
,
code: '''
->
while await foo()
;
'''
errors: [error]
,
code: '''
->
while baz
for await x from xs
;
'''
errors: [{...error, type: 'For'}]
,
# For of loops
code: '''
->
for bar from baz
await bar
'''
errors: [error]
,
code: '''
->
for bar of baz
await bar
'''
errors: [error]
,
# For in loops
code: '''
->
for bar in baz
await bar
'''
errors: [error]
,
# Deep in a loop body
code: '''
->
while yes
if bar
foo await bar
'''
errors: [error]
,
# Deep in a loop condition
code: '''
->
while xyz or 5 > await x
;
'''
errors: [error]
,
# In a nested loop of for-await-of
code: '''
->
for await x from xs
while 1
await f x
'''
errors: [error]
]
| true | ###*
# @fileoverview Tests for no-await-in-loop.
# @author PI:NAME:<NAME>END_PI (nmote)
###
'use strict'
rule = require '../../rules/no-await-in-loop'
{RuleTester} = require 'eslint'
path = require 'path'
error = messageId: 'unexpectedAwait', type: 'AwaitExpression'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-await-in-loop', rule,
valid: [
'''
->
await bar
'''
'''
->
for bar of await baz
;
'''
'''
->
for bar from await baz
;
'''
'''
->
for await bar from await baz
;
'''
# While loops
'''
->
loop
foo = ->
await bar
'''
# Blocked by a function expression
'''
->
while true
y = -> await bar
'''
# Blocked by a class method
'''
->
while yes
class Foo
foo: -> await bar
'''
# Asynchronous iteration intentionally
'''
->
for await x from xs
await f x
'''
]
invalid: [
# While loops
code: '''
->
while baz
await bar
'''
errors: [error]
,
code: '''
->
while await foo()
;
'''
errors: [error]
,
code: '''
->
while baz
for await x from xs
;
'''
errors: [{...error, type: 'For'}]
,
# For of loops
code: '''
->
for bar from baz
await bar
'''
errors: [error]
,
code: '''
->
for bar of baz
await bar
'''
errors: [error]
,
# For in loops
code: '''
->
for bar in baz
await bar
'''
errors: [error]
,
# Deep in a loop body
code: '''
->
while yes
if bar
foo await bar
'''
errors: [error]
,
# Deep in a loop condition
code: '''
->
while xyz or 5 > await x
;
'''
errors: [error]
,
# In a nested loop of for-await-of
code: '''
->
for await x from xs
while 1
await f x
'''
errors: [error]
]
|
[
{
"context": " @pizzaSurvey.addRowAtIndex({\n name: \"firstrow\",\n label: \"first row\",\n type: \"",
"end": 1840,
"score": 0.6028540134429932,
"start": 1832,
"tag": "NAME",
"value": "firstrow"
},
{
"context": " @pizzaSurvey.addRowAtIndex({\n name: \"secondrow\",\n label: \"second row\",\n type: ",
"end": 2111,
"score": 0.7469061613082886,
"start": 2102,
"tag": "NAME",
"value": "secondrow"
}
] | source/javascripts/spec/spec_xlform.coffee | dorey/xlform-builder | 1 | describe "xlform survey model (XLF.Survey)", ->
beforeEach ->
@pizzaSurvey = XLF.createSurveyFromCsv(PIZZA_SURVEY)
it "creates xlform", ->
xlf = new XLF.Survey name: "Sample"
expect(xlf).toBeDefined()
expect(xlf instanceof XLF.Survey).toBe(true)
expect(xlf.get("name")).toBe("Sample")
it "ensures every node has access to the parent survey", ->
@pizzaSurvey.getSurvey
it "can import from csv_repr", ->
expect(@pizzaSurvey.rows.length).toBe(1)
firstRow = @pizzaSurvey.rows.at(0)
expect(firstRow.getValue("name")).toEqual("likes_pizza")
describe "with simple survey", ->
beforeEach ->
@firstRow = @pizzaSurvey.rows.at(0)
describe "lists", ->
it "iterates over every row", ->
expect(@pizzaSurvey.rows).toBeDefined()
expect(@firstRow).toBeDefined()
it "can add a list as an object", ->
expect(@pizzaSurvey.choices.length).toBe(1)
@pizzaSurvey.choices.add LISTS.gender
expect(@pizzaSurvey.choices.length).toBe(2)
x1 = @pizzaSurvey.toCsvJson()
# it should prevent duplicate lists with the same id
@pizzaSurvey.choices.add LISTS.yes_no
expect(@pizzaSurvey.choices.length).toBe(2)
x2 = @pizzaSurvey.toCsvJson()
expect(x1).toEqual(x2)
it "can add row to a specific index", ->
expect(@pizzaSurvey.addRowAtIndex).toBeDefined()
# last question
rowc = @pizzaSurvey.rows.length
expect(@pizzaSurvey.rows.length).toBe 1
@pizzaSurvey.addRowAtIndex({
name: "lastrow",
label: "last row",
type: "text"
}, rowc)
expect(@pizzaSurvey.rows.length).toBe 2
expect(@pizzaSurvey.rows.last().get("label").get("value")).toBe("last row")
@pizzaSurvey.addRowAtIndex({
name: "firstrow",
label: "first row",
type: "note"
}, 0)
expect(@pizzaSurvey.rows.length).toBe 3
expect(@pizzaSurvey.rows.first().get("label").get("value")).toBe("first row")
@pizzaSurvey.addRowAtIndex({
name: "secondrow",
label: "second row",
type: "note"
}, 1)
expect(@pizzaSurvey.rows.length).toBe 4
expect(@pizzaSurvey.rows.at(1).get("label").get("value")).toBe("second row")
labels = _.map @pizzaSurvey.rows.pluck("label"), (i)-> i.get("value")
expect(labels).toEqual([ 'first row', 'second row', 'Do you like pizza?', 'last row' ])
it "row types changing is trackable", ->
expect(@firstRow.getValue("type")).toBe("select_one yes_no")
typeDetail = @firstRow.get("type")
expect(typeDetail.get("typeId")).toBe("select_one")
expect(typeDetail.get("list").get("name")).toBe "yes_no"
list = @firstRow.getList()
expect(list).toBeDefined()
expect(list.get("name")).toBe("yes_no")
describe "with custom surveys", ->
beforeEach ->
@createSurveyCsv = (survey=[],choices=[])->
choiceSheet = if choices.length is 0 then "" else """
choices,,,
,list name,name,label
,#{choices.join("\n,")}
"""
"""
survey,,,
,type,name,label,hint
,#{survey.join("\n,")}
#{choiceSheet}
"""
@createSurvey = (survey=[],choices=[])=>
XLF.createSurveyFromCsv @createSurveyCsv survey, choices
@firstRow = (s)-> s.rows.at(0)
@compareCsvs = (x1, x2)->
x1r = x1.split("\n")
x2r = x2.split("\n")
for r in _.min(x1r.length, x2r.length)
expect(x1r[r]).toBe(x2r[r])
expect(x1).toBe(x2)
@dumpAndLoad = (scsv)=>
s1 = XLF.createSurveyFromCsv scsv
x1 = s1.toCSV()
s2 = XLF.createSurveyFromCsv x1
x2 = s2.toCSV()
@compareCsvs(x1, x2)
it "breaks with an unk qtype", ->
makeInvalidTypeSurvey = =>
@createSurvey ["telegram,a,a,a"]
expect(makeInvalidTypeSurvey).toThrow()
it "exports and imports without breaking", ->
scsv = @createSurveyCsv ["text,text,text,text"]
@dumpAndLoad scsv
# types = ["note", "text", "integer", "decimal",
# "geopoint", "image", "barcode", "date",
# "datetime", "audio", "video", "select_one",
# "select_multiple"]
it "tries a few question types", ->
srv = @createSurvey ["text,text,text,text"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("rowType").name).toBe("text")
# # a survey with 2 lists: "x" and "y"
srv = @createSurvey [""""select_multiple x",a,a,a"""],
["x,ax,ax","x,bx,bx,","y,ay,ay","y,by,by"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("x")
expect(row1.getList().get("name")).toBe("x")
# change row to to "select_multiple y".
r1type.set("value", "select_multiple y")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("y")
expect(row1.toJSON().type).toBe("select_multiple y")
expect(row1.getList().get("name")).toBe("y")
# change row to "text"
row1.get("type").set("value", "text")
expect(row1.get("type").get("value")).toBe("text")
# Right now, thinking that we should keep the list around
# and test to make sure the exported value doesn't have a list
expect(row1.get("type").get("list").get("name")).toBeDefined()
expect(row1.getList().get("name")).toBeDefined()
expect(row1.toJSON().type).toBe("text")
# # adding an invalid list will break things.
#
# I'm thinking: adding an invalid list will only break validation of
# the survey. If it's not defined, it will prompt the user to make
# the row valid.
#
# setToInvalidList = ()->
# row1.get("type").set("value", "select_one badlist")
# expect(setToInvalidList).toThrow()
``
describe "groups", ->
it "can add a group", ->
@pizzaSurvey.addRow type: "text", name: "pizza", hint: "pizza", label: "pizza"
expect(@pizzaSurvey.rows.last() instanceof XLF.Row).toBe(true)
@pizzaSurvey.addRow type: "group", name: "group"
grp = @pizzaSurvey.rows.last()
grp.addRow type: "text", name: "textquestioningroup", label: "Text question in group"
grp.addRow type: "group", name: "groupingroup"
second_group = grp.rows.last()
second_group.addRow type: "text", name: "secondgroupquestion", label: "Second group question"
describe "lists", ->
it "can change a list for a question", ->
# add a new list. "yes, no, maybe"
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
# test original state
firstRow = @pizzaSurvey.rows.first()
expect(firstRow.getList().get("name")).toBe("yes_no")
# change the list for first question to be "yes_no_maybe" instead of "yes_no"
expect(firstRow.getList().get("name")).toBe("yes_no")
firstRow.setList(ynm)
expect(firstRow.getList().get("name")).toBe("yes_no_maybe")
# change it back
firstRow.setList("yes_no")
expect(firstRow.getList().get("name")).toBe("yes_no")
# cannot change it to a nonexistant list
expect(-> firstRow.setList("nonexistant_list")).toThrow()
# changing name of list object will not unlink the list
list = firstRow.getList()
list.set("name", "no_yes")
expect(firstRow.getList()).toBeDefined()
expect(firstRow.getList()?.get("name")).toBe("no_yes")
it "can change options for a list", ->
yn = @pizzaSurvey.choices.get("yes_no")
expect(yn.options).toBeDefined()
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
expect(ynm.options.length).toBe(0)
ynm.options.add name: "maybe", label: "Maybe"
ynm.options.add [{name: "yes", label: "Yes"}, {name: "no", label: "No"}]
expect(ynm.options.length).toBe(3)
# don't allow duplicate options
ynm.options.add name: "maybe", label: "Maybe2"
expect(ynm.options.length).toBe(3)
expect(ynm.options.first().get("label")).toBe("Maybe")
describe "census xlform", ->
beforeEach ->
@census = XLF.createSurveyFromCsv(CENSUS_SURVEY)
it "looks good", ->
expect(@census).toBeDefined()
###
Misc data. (basically fixtures for the tests above)
###
LISTS =
yes_no:
name: "yes_no"
options: [
{"list name": "yes_no", name: "yes", label: "Yes"},
{"list name": "yes_no", name: "no", label: "No"}
]
gender:
name: "gender"
options: [
{"list name": "gender", name: "f", label: "Female"},
{"list name": "gender", name: "m", label: "Male"}
]
PIZZA_SURVEY = """
survey,,,
,type,name,label
,select_one yes_no,likes_pizza,Do you like pizza?
choices,,,
,list name,name,label
,yes_no,yes,Yes
,yes_no,no,No
"""
CENSUS_SURVEY = """
"survey","type","name","label"
,"integer","q1","How many people were living or staying in this house, apartment, or mobile home on April 1, 2010?"
,"select_one yes_no","q2","Were there any additional people staying here April 1, 2010 that you did not include in Question 1?"
,"select_one ownership_type or_other","q3","Is this house, apartment, or mobile home: owned with mortgage, owned without mortgage, rented, occupied without rent?"
,"text","q4","What is your telephone number?"
,"text","q5","Please provide information for each person living here. Start with a person here who owns or rents this house, apartment, or mobile home. If the owner or renter lives somewhere else, start with any adult living here. This will be Person 1. What is Person 1's name?"
,"select_one male_female","q6","What is Person 1's sex?"
,"date","q7","What is Person 1's age and Date of Birth?"
,"text","q8","Is Person 1 of Hispanic, Latino or Spanish origin?"
,"text","q9","What is Person 1's race?"
,"select_one yes_no","q10","Does Person 1 sometimes live or stay somewhere else?"
"choices","list name","name","label"
,"yes_no","yes","Yes"
,"yes_no","no","No"
,"male_female","male","Male"
,"male_female","female","Female"
,"ownership_type","owned_with_mortgage","owned with mortgage",
,"ownership_type","owned_without_mortgage","owned without mortgage"
,"ownership_type","rented","rented"
,"ownership_type","occupied_without_rent","occupied without rent"
"settings"
,"form_title","form_id"
,"Census Questions (2010)","census2010"
"""
describe "testing the view", ->
it "builds the view", ->
pizza = XLF.createSurveyFromCsv(PIZZA_SURVEY)
@xlv = new SurveyApp survey: pizza
div = $("<div>").appendTo("body")
$el = @xlv.render().$el
$el.appendTo(div)
expect(div.html()).not.toContain("empty")
expect(div.find("li.xlf-row-view").length).toBe(1)
lastRowEl = div.find("li.xlf-row-view").eq(0)
# adds row selector
clickNewRow = ()->
lastRowEl.find(".add-row-btn").click()
expect(clickNewRow).not.toThrow()
expect(lastRowEl.find(".line").eq(-1).hasClass("expanded")).toBeTruthy()
expect(pizza.rows.length).toBe(1)
lastRowEl.find(".line.expanded").find(".menu-item-geopoint").trigger("click")
expect(pizza.rows.length).toBe(2)
expect(div.find(".xlf-row-view").length).toBe(2)
expect(div.find(".xlf-row-view").eq(-1).find(".xlf-dv-label").text()).toMatch("location") | 48438 | describe "xlform survey model (XLF.Survey)", ->
beforeEach ->
@pizzaSurvey = XLF.createSurveyFromCsv(PIZZA_SURVEY)
it "creates xlform", ->
xlf = new XLF.Survey name: "Sample"
expect(xlf).toBeDefined()
expect(xlf instanceof XLF.Survey).toBe(true)
expect(xlf.get("name")).toBe("Sample")
it "ensures every node has access to the parent survey", ->
@pizzaSurvey.getSurvey
it "can import from csv_repr", ->
expect(@pizzaSurvey.rows.length).toBe(1)
firstRow = @pizzaSurvey.rows.at(0)
expect(firstRow.getValue("name")).toEqual("likes_pizza")
describe "with simple survey", ->
beforeEach ->
@firstRow = @pizzaSurvey.rows.at(0)
describe "lists", ->
it "iterates over every row", ->
expect(@pizzaSurvey.rows).toBeDefined()
expect(@firstRow).toBeDefined()
it "can add a list as an object", ->
expect(@pizzaSurvey.choices.length).toBe(1)
@pizzaSurvey.choices.add LISTS.gender
expect(@pizzaSurvey.choices.length).toBe(2)
x1 = @pizzaSurvey.toCsvJson()
# it should prevent duplicate lists with the same id
@pizzaSurvey.choices.add LISTS.yes_no
expect(@pizzaSurvey.choices.length).toBe(2)
x2 = @pizzaSurvey.toCsvJson()
expect(x1).toEqual(x2)
it "can add row to a specific index", ->
expect(@pizzaSurvey.addRowAtIndex).toBeDefined()
# last question
rowc = @pizzaSurvey.rows.length
expect(@pizzaSurvey.rows.length).toBe 1
@pizzaSurvey.addRowAtIndex({
name: "lastrow",
label: "last row",
type: "text"
}, rowc)
expect(@pizzaSurvey.rows.length).toBe 2
expect(@pizzaSurvey.rows.last().get("label").get("value")).toBe("last row")
@pizzaSurvey.addRowAtIndex({
name: "<NAME>",
label: "first row",
type: "note"
}, 0)
expect(@pizzaSurvey.rows.length).toBe 3
expect(@pizzaSurvey.rows.first().get("label").get("value")).toBe("first row")
@pizzaSurvey.addRowAtIndex({
name: "<NAME>",
label: "second row",
type: "note"
}, 1)
expect(@pizzaSurvey.rows.length).toBe 4
expect(@pizzaSurvey.rows.at(1).get("label").get("value")).toBe("second row")
labels = _.map @pizzaSurvey.rows.pluck("label"), (i)-> i.get("value")
expect(labels).toEqual([ 'first row', 'second row', 'Do you like pizza?', 'last row' ])
it "row types changing is trackable", ->
expect(@firstRow.getValue("type")).toBe("select_one yes_no")
typeDetail = @firstRow.get("type")
expect(typeDetail.get("typeId")).toBe("select_one")
expect(typeDetail.get("list").get("name")).toBe "yes_no"
list = @firstRow.getList()
expect(list).toBeDefined()
expect(list.get("name")).toBe("yes_no")
describe "with custom surveys", ->
beforeEach ->
@createSurveyCsv = (survey=[],choices=[])->
choiceSheet = if choices.length is 0 then "" else """
choices,,,
,list name,name,label
,#{choices.join("\n,")}
"""
"""
survey,,,
,type,name,label,hint
,#{survey.join("\n,")}
#{choiceSheet}
"""
@createSurvey = (survey=[],choices=[])=>
XLF.createSurveyFromCsv @createSurveyCsv survey, choices
@firstRow = (s)-> s.rows.at(0)
@compareCsvs = (x1, x2)->
x1r = x1.split("\n")
x2r = x2.split("\n")
for r in _.min(x1r.length, x2r.length)
expect(x1r[r]).toBe(x2r[r])
expect(x1).toBe(x2)
@dumpAndLoad = (scsv)=>
s1 = XLF.createSurveyFromCsv scsv
x1 = s1.toCSV()
s2 = XLF.createSurveyFromCsv x1
x2 = s2.toCSV()
@compareCsvs(x1, x2)
it "breaks with an unk qtype", ->
makeInvalidTypeSurvey = =>
@createSurvey ["telegram,a,a,a"]
expect(makeInvalidTypeSurvey).toThrow()
it "exports and imports without breaking", ->
scsv = @createSurveyCsv ["text,text,text,text"]
@dumpAndLoad scsv
# types = ["note", "text", "integer", "decimal",
# "geopoint", "image", "barcode", "date",
# "datetime", "audio", "video", "select_one",
# "select_multiple"]
it "tries a few question types", ->
srv = @createSurvey ["text,text,text,text"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("rowType").name).toBe("text")
# # a survey with 2 lists: "x" and "y"
srv = @createSurvey [""""select_multiple x",a,a,a"""],
["x,ax,ax","x,bx,bx,","y,ay,ay","y,by,by"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("x")
expect(row1.getList().get("name")).toBe("x")
# change row to to "select_multiple y".
r1type.set("value", "select_multiple y")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("y")
expect(row1.toJSON().type).toBe("select_multiple y")
expect(row1.getList().get("name")).toBe("y")
# change row to "text"
row1.get("type").set("value", "text")
expect(row1.get("type").get("value")).toBe("text")
# Right now, thinking that we should keep the list around
# and test to make sure the exported value doesn't have a list
expect(row1.get("type").get("list").get("name")).toBeDefined()
expect(row1.getList().get("name")).toBeDefined()
expect(row1.toJSON().type).toBe("text")
# # adding an invalid list will break things.
#
# I'm thinking: adding an invalid list will only break validation of
# the survey. If it's not defined, it will prompt the user to make
# the row valid.
#
# setToInvalidList = ()->
# row1.get("type").set("value", "select_one badlist")
# expect(setToInvalidList).toThrow()
``
describe "groups", ->
it "can add a group", ->
@pizzaSurvey.addRow type: "text", name: "pizza", hint: "pizza", label: "pizza"
expect(@pizzaSurvey.rows.last() instanceof XLF.Row).toBe(true)
@pizzaSurvey.addRow type: "group", name: "group"
grp = @pizzaSurvey.rows.last()
grp.addRow type: "text", name: "textquestioningroup", label: "Text question in group"
grp.addRow type: "group", name: "groupingroup"
second_group = grp.rows.last()
second_group.addRow type: "text", name: "secondgroupquestion", label: "Second group question"
describe "lists", ->
it "can change a list for a question", ->
# add a new list. "yes, no, maybe"
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
# test original state
firstRow = @pizzaSurvey.rows.first()
expect(firstRow.getList().get("name")).toBe("yes_no")
# change the list for first question to be "yes_no_maybe" instead of "yes_no"
expect(firstRow.getList().get("name")).toBe("yes_no")
firstRow.setList(ynm)
expect(firstRow.getList().get("name")).toBe("yes_no_maybe")
# change it back
firstRow.setList("yes_no")
expect(firstRow.getList().get("name")).toBe("yes_no")
# cannot change it to a nonexistant list
expect(-> firstRow.setList("nonexistant_list")).toThrow()
# changing name of list object will not unlink the list
list = firstRow.getList()
list.set("name", "no_yes")
expect(firstRow.getList()).toBeDefined()
expect(firstRow.getList()?.get("name")).toBe("no_yes")
it "can change options for a list", ->
yn = @pizzaSurvey.choices.get("yes_no")
expect(yn.options).toBeDefined()
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
expect(ynm.options.length).toBe(0)
ynm.options.add name: "maybe", label: "Maybe"
ynm.options.add [{name: "yes", label: "Yes"}, {name: "no", label: "No"}]
expect(ynm.options.length).toBe(3)
# don't allow duplicate options
ynm.options.add name: "maybe", label: "Maybe2"
expect(ynm.options.length).toBe(3)
expect(ynm.options.first().get("label")).toBe("Maybe")
describe "census xlform", ->
beforeEach ->
@census = XLF.createSurveyFromCsv(CENSUS_SURVEY)
it "looks good", ->
expect(@census).toBeDefined()
###
Misc data. (basically fixtures for the tests above)
###
LISTS =
yes_no:
name: "yes_no"
options: [
{"list name": "yes_no", name: "yes", label: "Yes"},
{"list name": "yes_no", name: "no", label: "No"}
]
gender:
name: "gender"
options: [
{"list name": "gender", name: "f", label: "Female"},
{"list name": "gender", name: "m", label: "Male"}
]
PIZZA_SURVEY = """
survey,,,
,type,name,label
,select_one yes_no,likes_pizza,Do you like pizza?
choices,,,
,list name,name,label
,yes_no,yes,Yes
,yes_no,no,No
"""
CENSUS_SURVEY = """
"survey","type","name","label"
,"integer","q1","How many people were living or staying in this house, apartment, or mobile home on April 1, 2010?"
,"select_one yes_no","q2","Were there any additional people staying here April 1, 2010 that you did not include in Question 1?"
,"select_one ownership_type or_other","q3","Is this house, apartment, or mobile home: owned with mortgage, owned without mortgage, rented, occupied without rent?"
,"text","q4","What is your telephone number?"
,"text","q5","Please provide information for each person living here. Start with a person here who owns or rents this house, apartment, or mobile home. If the owner or renter lives somewhere else, start with any adult living here. This will be Person 1. What is Person 1's name?"
,"select_one male_female","q6","What is Person 1's sex?"
,"date","q7","What is Person 1's age and Date of Birth?"
,"text","q8","Is Person 1 of Hispanic, Latino or Spanish origin?"
,"text","q9","What is Person 1's race?"
,"select_one yes_no","q10","Does Person 1 sometimes live or stay somewhere else?"
"choices","list name","name","label"
,"yes_no","yes","Yes"
,"yes_no","no","No"
,"male_female","male","Male"
,"male_female","female","Female"
,"ownership_type","owned_with_mortgage","owned with mortgage",
,"ownership_type","owned_without_mortgage","owned without mortgage"
,"ownership_type","rented","rented"
,"ownership_type","occupied_without_rent","occupied without rent"
"settings"
,"form_title","form_id"
,"Census Questions (2010)","census2010"
"""
describe "testing the view", ->
it "builds the view", ->
pizza = XLF.createSurveyFromCsv(PIZZA_SURVEY)
@xlv = new SurveyApp survey: pizza
div = $("<div>").appendTo("body")
$el = @xlv.render().$el
$el.appendTo(div)
expect(div.html()).not.toContain("empty")
expect(div.find("li.xlf-row-view").length).toBe(1)
lastRowEl = div.find("li.xlf-row-view").eq(0)
# adds row selector
clickNewRow = ()->
lastRowEl.find(".add-row-btn").click()
expect(clickNewRow).not.toThrow()
expect(lastRowEl.find(".line").eq(-1).hasClass("expanded")).toBeTruthy()
expect(pizza.rows.length).toBe(1)
lastRowEl.find(".line.expanded").find(".menu-item-geopoint").trigger("click")
expect(pizza.rows.length).toBe(2)
expect(div.find(".xlf-row-view").length).toBe(2)
expect(div.find(".xlf-row-view").eq(-1).find(".xlf-dv-label").text()).toMatch("location") | true | describe "xlform survey model (XLF.Survey)", ->
beforeEach ->
@pizzaSurvey = XLF.createSurveyFromCsv(PIZZA_SURVEY)
it "creates xlform", ->
xlf = new XLF.Survey name: "Sample"
expect(xlf).toBeDefined()
expect(xlf instanceof XLF.Survey).toBe(true)
expect(xlf.get("name")).toBe("Sample")
it "ensures every node has access to the parent survey", ->
@pizzaSurvey.getSurvey
it "can import from csv_repr", ->
expect(@pizzaSurvey.rows.length).toBe(1)
firstRow = @pizzaSurvey.rows.at(0)
expect(firstRow.getValue("name")).toEqual("likes_pizza")
describe "with simple survey", ->
beforeEach ->
@firstRow = @pizzaSurvey.rows.at(0)
describe "lists", ->
it "iterates over every row", ->
expect(@pizzaSurvey.rows).toBeDefined()
expect(@firstRow).toBeDefined()
it "can add a list as an object", ->
expect(@pizzaSurvey.choices.length).toBe(1)
@pizzaSurvey.choices.add LISTS.gender
expect(@pizzaSurvey.choices.length).toBe(2)
x1 = @pizzaSurvey.toCsvJson()
# it should prevent duplicate lists with the same id
@pizzaSurvey.choices.add LISTS.yes_no
expect(@pizzaSurvey.choices.length).toBe(2)
x2 = @pizzaSurvey.toCsvJson()
expect(x1).toEqual(x2)
it "can add row to a specific index", ->
expect(@pizzaSurvey.addRowAtIndex).toBeDefined()
# last question
rowc = @pizzaSurvey.rows.length
expect(@pizzaSurvey.rows.length).toBe 1
@pizzaSurvey.addRowAtIndex({
name: "lastrow",
label: "last row",
type: "text"
}, rowc)
expect(@pizzaSurvey.rows.length).toBe 2
expect(@pizzaSurvey.rows.last().get("label").get("value")).toBe("last row")
@pizzaSurvey.addRowAtIndex({
name: "PI:NAME:<NAME>END_PI",
label: "first row",
type: "note"
}, 0)
expect(@pizzaSurvey.rows.length).toBe 3
expect(@pizzaSurvey.rows.first().get("label").get("value")).toBe("first row")
@pizzaSurvey.addRowAtIndex({
name: "PI:NAME:<NAME>END_PI",
label: "second row",
type: "note"
}, 1)
expect(@pizzaSurvey.rows.length).toBe 4
expect(@pizzaSurvey.rows.at(1).get("label").get("value")).toBe("second row")
labels = _.map @pizzaSurvey.rows.pluck("label"), (i)-> i.get("value")
expect(labels).toEqual([ 'first row', 'second row', 'Do you like pizza?', 'last row' ])
it "row types changing is trackable", ->
expect(@firstRow.getValue("type")).toBe("select_one yes_no")
typeDetail = @firstRow.get("type")
expect(typeDetail.get("typeId")).toBe("select_one")
expect(typeDetail.get("list").get("name")).toBe "yes_no"
list = @firstRow.getList()
expect(list).toBeDefined()
expect(list.get("name")).toBe("yes_no")
describe "with custom surveys", ->
beforeEach ->
@createSurveyCsv = (survey=[],choices=[])->
choiceSheet = if choices.length is 0 then "" else """
choices,,,
,list name,name,label
,#{choices.join("\n,")}
"""
"""
survey,,,
,type,name,label,hint
,#{survey.join("\n,")}
#{choiceSheet}
"""
@createSurvey = (survey=[],choices=[])=>
XLF.createSurveyFromCsv @createSurveyCsv survey, choices
@firstRow = (s)-> s.rows.at(0)
@compareCsvs = (x1, x2)->
x1r = x1.split("\n")
x2r = x2.split("\n")
for r in _.min(x1r.length, x2r.length)
expect(x1r[r]).toBe(x2r[r])
expect(x1).toBe(x2)
@dumpAndLoad = (scsv)=>
s1 = XLF.createSurveyFromCsv scsv
x1 = s1.toCSV()
s2 = XLF.createSurveyFromCsv x1
x2 = s2.toCSV()
@compareCsvs(x1, x2)
it "breaks with an unk qtype", ->
makeInvalidTypeSurvey = =>
@createSurvey ["telegram,a,a,a"]
expect(makeInvalidTypeSurvey).toThrow()
it "exports and imports without breaking", ->
scsv = @createSurveyCsv ["text,text,text,text"]
@dumpAndLoad scsv
# types = ["note", "text", "integer", "decimal",
# "geopoint", "image", "barcode", "date",
# "datetime", "audio", "video", "select_one",
# "select_multiple"]
it "tries a few question types", ->
srv = @createSurvey ["text,text,text,text"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("rowType").name).toBe("text")
# # a survey with 2 lists: "x" and "y"
srv = @createSurvey [""""select_multiple x",a,a,a"""],
["x,ax,ax","x,bx,bx,","y,ay,ay","y,by,by"]
row1 = srv.rows.at(0)
r1type = row1.get("type")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("x")
expect(row1.getList().get("name")).toBe("x")
# change row to to "select_multiple y".
r1type.set("value", "select_multiple y")
expect(r1type.get("typeId")).toBe("select_multiple")
expect(r1type.get("list").get("name")).toBe("y")
expect(row1.toJSON().type).toBe("select_multiple y")
expect(row1.getList().get("name")).toBe("y")
# change row to "text"
row1.get("type").set("value", "text")
expect(row1.get("type").get("value")).toBe("text")
# Right now, thinking that we should keep the list around
# and test to make sure the exported value doesn't have a list
expect(row1.get("type").get("list").get("name")).toBeDefined()
expect(row1.getList().get("name")).toBeDefined()
expect(row1.toJSON().type).toBe("text")
# # adding an invalid list will break things.
#
# I'm thinking: adding an invalid list will only break validation of
# the survey. If it's not defined, it will prompt the user to make
# the row valid.
#
# setToInvalidList = ()->
# row1.get("type").set("value", "select_one badlist")
# expect(setToInvalidList).toThrow()
``
describe "groups", ->
it "can add a group", ->
@pizzaSurvey.addRow type: "text", name: "pizza", hint: "pizza", label: "pizza"
expect(@pizzaSurvey.rows.last() instanceof XLF.Row).toBe(true)
@pizzaSurvey.addRow type: "group", name: "group"
grp = @pizzaSurvey.rows.last()
grp.addRow type: "text", name: "textquestioningroup", label: "Text question in group"
grp.addRow type: "group", name: "groupingroup"
second_group = grp.rows.last()
second_group.addRow type: "text", name: "secondgroupquestion", label: "Second group question"
describe "lists", ->
it "can change a list for a question", ->
# add a new list. "yes, no, maybe"
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
# test original state
firstRow = @pizzaSurvey.rows.first()
expect(firstRow.getList().get("name")).toBe("yes_no")
# change the list for first question to be "yes_no_maybe" instead of "yes_no"
expect(firstRow.getList().get("name")).toBe("yes_no")
firstRow.setList(ynm)
expect(firstRow.getList().get("name")).toBe("yes_no_maybe")
# change it back
firstRow.setList("yes_no")
expect(firstRow.getList().get("name")).toBe("yes_no")
# cannot change it to a nonexistant list
expect(-> firstRow.setList("nonexistant_list")).toThrow()
# changing name of list object will not unlink the list
list = firstRow.getList()
list.set("name", "no_yes")
expect(firstRow.getList()).toBeDefined()
expect(firstRow.getList()?.get("name")).toBe("no_yes")
it "can change options for a list", ->
yn = @pizzaSurvey.choices.get("yes_no")
expect(yn.options).toBeDefined()
@pizzaSurvey.choices.add(name: "yes_no_maybe")
ynm = @pizzaSurvey.choices.get("yes_no_maybe")
expect(ynm).toBeDefined()
expect(ynm.options.length).toBe(0)
ynm.options.add name: "maybe", label: "Maybe"
ynm.options.add [{name: "yes", label: "Yes"}, {name: "no", label: "No"}]
expect(ynm.options.length).toBe(3)
# don't allow duplicate options
ynm.options.add name: "maybe", label: "Maybe2"
expect(ynm.options.length).toBe(3)
expect(ynm.options.first().get("label")).toBe("Maybe")
describe "census xlform", ->
beforeEach ->
@census = XLF.createSurveyFromCsv(CENSUS_SURVEY)
it "looks good", ->
expect(@census).toBeDefined()
###
Misc data. (basically fixtures for the tests above)
###
LISTS =
yes_no:
name: "yes_no"
options: [
{"list name": "yes_no", name: "yes", label: "Yes"},
{"list name": "yes_no", name: "no", label: "No"}
]
gender:
name: "gender"
options: [
{"list name": "gender", name: "f", label: "Female"},
{"list name": "gender", name: "m", label: "Male"}
]
PIZZA_SURVEY = """
survey,,,
,type,name,label
,select_one yes_no,likes_pizza,Do you like pizza?
choices,,,
,list name,name,label
,yes_no,yes,Yes
,yes_no,no,No
"""
CENSUS_SURVEY = """
"survey","type","name","label"
,"integer","q1","How many people were living or staying in this house, apartment, or mobile home on April 1, 2010?"
,"select_one yes_no","q2","Were there any additional people staying here April 1, 2010 that you did not include in Question 1?"
,"select_one ownership_type or_other","q3","Is this house, apartment, or mobile home: owned with mortgage, owned without mortgage, rented, occupied without rent?"
,"text","q4","What is your telephone number?"
,"text","q5","Please provide information for each person living here. Start with a person here who owns or rents this house, apartment, or mobile home. If the owner or renter lives somewhere else, start with any adult living here. This will be Person 1. What is Person 1's name?"
,"select_one male_female","q6","What is Person 1's sex?"
,"date","q7","What is Person 1's age and Date of Birth?"
,"text","q8","Is Person 1 of Hispanic, Latino or Spanish origin?"
,"text","q9","What is Person 1's race?"
,"select_one yes_no","q10","Does Person 1 sometimes live or stay somewhere else?"
"choices","list name","name","label"
,"yes_no","yes","Yes"
,"yes_no","no","No"
,"male_female","male","Male"
,"male_female","female","Female"
,"ownership_type","owned_with_mortgage","owned with mortgage",
,"ownership_type","owned_without_mortgage","owned without mortgage"
,"ownership_type","rented","rented"
,"ownership_type","occupied_without_rent","occupied without rent"
"settings"
,"form_title","form_id"
,"Census Questions (2010)","census2010"
"""
describe "testing the view", ->
it "builds the view", ->
pizza = XLF.createSurveyFromCsv(PIZZA_SURVEY)
@xlv = new SurveyApp survey: pizza
div = $("<div>").appendTo("body")
$el = @xlv.render().$el
$el.appendTo(div)
expect(div.html()).not.toContain("empty")
expect(div.find("li.xlf-row-view").length).toBe(1)
lastRowEl = div.find("li.xlf-row-view").eq(0)
# adds row selector
clickNewRow = ()->
lastRowEl.find(".add-row-btn").click()
expect(clickNewRow).not.toThrow()
expect(lastRowEl.find(".line").eq(-1).hasClass("expanded")).toBeTruthy()
expect(pizza.rows.length).toBe(1)
lastRowEl.find(".line.expanded").find(".menu-item-geopoint").trigger("click")
expect(pizza.rows.length).toBe(2)
expect(div.find(".xlf-row-view").length).toBe(2)
expect(div.find(".xlf-row-view").eq(-1).find(".xlf-dv-label").text()).toMatch("location") |
[
{
"context": "## Copyright (c) 2011, Chris Umbel\n##\n## Permission is hereby granted, free of charg",
"end": 34,
"score": 0.9998162984848022,
"start": 23,
"tag": "NAME",
"value": "Chris Umbel"
}
] | src/wordnet_file.coffee | khanali21/wordnet | 3 | ## Copyright (c) 2011, Chris Umbel
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
fs = require('fs')
path = require('path')
util = require('util')
module.exports = class WordNetFile
constructor: (@dataDir, @fileName) ->
@filePath = path.join(@dataDir, @fileName)
open: (callback) ->
self = @
if @fd
return callback.call self, null, @fd
filePath = @filePath
fs.open filePath, 'r', null, (err, fd) =>
return callback.call self, err, null if err?
@fd = fd
callback.call self, err, fd
close: () ->
if @fd?
fs.close(@fd)
delete @fd
appendLineChar: (fd, pos, buffPos, buff, callback) ->
self = @
length = buff.length
space = length - buffPos
fs.read fd, buff, buffPos, space, pos, (err, count, buffer) ->
return callback.call(self, err, null) if err?
for i in [0..count - 1]
if buff[i] == 10
return callback.call(self, null, buff.slice(0, i).toString('ASCII'))
## Okay, no newline; extend and tail recurse
newBuff = new Buffer(length * 2)
buff.copy(newBuff, 0, 0, length)
self.appendLineChar fd, pos + length, length, newBuff, callback
| 176214 | ## Copyright (c) 2011, <NAME>
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
fs = require('fs')
path = require('path')
util = require('util')
module.exports = class WordNetFile
constructor: (@dataDir, @fileName) ->
@filePath = path.join(@dataDir, @fileName)
open: (callback) ->
self = @
if @fd
return callback.call self, null, @fd
filePath = @filePath
fs.open filePath, 'r', null, (err, fd) =>
return callback.call self, err, null if err?
@fd = fd
callback.call self, err, fd
close: () ->
if @fd?
fs.close(@fd)
delete @fd
appendLineChar: (fd, pos, buffPos, buff, callback) ->
self = @
length = buff.length
space = length - buffPos
fs.read fd, buff, buffPos, space, pos, (err, count, buffer) ->
return callback.call(self, err, null) if err?
for i in [0..count - 1]
if buff[i] == 10
return callback.call(self, null, buff.slice(0, i).toString('ASCII'))
## Okay, no newline; extend and tail recurse
newBuff = new Buffer(length * 2)
buff.copy(newBuff, 0, 0, length)
self.appendLineChar fd, pos + length, length, newBuff, callback
| true | ## Copyright (c) 2011, PI:NAME:<NAME>END_PI
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
fs = require('fs')
path = require('path')
util = require('util')
module.exports = class WordNetFile
constructor: (@dataDir, @fileName) ->
@filePath = path.join(@dataDir, @fileName)
open: (callback) ->
self = @
if @fd
return callback.call self, null, @fd
filePath = @filePath
fs.open filePath, 'r', null, (err, fd) =>
return callback.call self, err, null if err?
@fd = fd
callback.call self, err, fd
close: () ->
if @fd?
fs.close(@fd)
delete @fd
appendLineChar: (fd, pos, buffPos, buff, callback) ->
self = @
length = buff.length
space = length - buffPos
fs.read fd, buff, buffPos, space, pos, (err, count, buffer) ->
return callback.call(self, err, null) if err?
for i in [0..count - 1]
if buff[i] == 10
return callback.call(self, null, buff.slice(0, i).toString('ASCII'))
## Okay, no newline; extend and tail recurse
newBuff = new Buffer(length * 2)
buff.copy(newBuff, 0, 0, length)
self.appendLineChar fd, pos + length, length, newBuff, callback
|
[
{
"context": ">\n *\n * Copyright (c) 2015 Ryan Gaus\n * Licensed under the MIT license.\n ",
"end": 2772,
"score": 0.9997830390930176,
"start": 2763,
"tag": "NAME",
"value": "Ryan Gaus"
}
] | Gruntfile.coffee | 1egoman/cena_app | 0 | 'use strict'
module.exports = (grunt) ->
# load all grunt tasks
require('matchdep').filterDev('grunt-*').forEach (contrib) ->
grunt.log.ok [ contrib + ' is loaded' ]
grunt.loadNpmTasks contrib
config =
dist: 'dist'
src: 'src'
distTest: 'test/dist'
srcTest: 'test/src'
pkg: grunt.file.readJSON('package.json')
# Project configuration.
grunt.initConfig
config: config
clean:
dist:
files: [
dot: true
src: [
'<%= config.dist %>/*'
'<%= config.distTest %>/*'
'!<%= config.dist %>/.git*'
]
]
css:
files: [
dot: true
src: [ 'public/sass/index.css' ]
]
coffee:
dist: files: [ {
expand: true
cwd: '<%= config.src %>'
src: '{,*/}*.coffee'
dest: '<%= config.dist %>'
ext: '.js'
} ]
test: files: [ {
expand: true
cwd: '<%= config.srcTest %>'
src: '{,*/}*.spec.coffee'
dest: '<%= config.distTest %>'
ext: '.spec.js'
} ]
frontend: files: [ {
expand: true
cwd: 'src/frontend'
src: '{,*/}*.coffee'
dest: 'public/js'
ext: '.js'
} ]
jshint:
options:
jshintrc: '.jshintrc'
gruntfile:
src: 'Gruntfile.js'
watch:
gruntfile:
files: '<%= jshint.gruntfile.src %>'
tasks: [
'jshint:gruntfile'
]
dist:
files: '<%= config.src %>/*'
tasks: [
'coffee:dist'
'simplemocha:backend'
]
test:
files: '<%= config.srcTest %>/specs/*'
tasks: [
'coffee:test'
'simplemocha:backend'
]
frontend:
files: 'src/frontend/**/*.coffee'
tasks: [
'coffee:frontend'
]
css:
files: 'public/sass/**/*.scss'
tasks: [
'clean:css'
]
simplemocha:
options:
globals: [
'sinon'
'chai'
'should'
'expect'
'assert'
'AssertionError'
]
timeout: 3000
ignoreLeaks: false
ui: 'bdd'
reporter: 'spec'
backend: src: [
'test/support/globals.js'
'test/dist/**/*.spec.js'
]
auto_install:
local: {}
subdir:
options:
stdout: true
stderr: true
failOnError: true
npm: false
usebanner:
dist:
options:
position: 'top'
banner: """/*
* cena_auth at version <%= config.pkg.version %>
* <%= config.pkg.repository.url %>
*
* Copyright (c) 2015 Ryan Gaus
* Licensed under the MIT license.
*/"""
linebreak: true
files:
src: [ 'dist/**/*.js' ]
grunt.registerTask 'coverageBackend', 'Test backend files as well as code coverage.', ->
done = @async()
path = './test/support/runner.js'
options =
cmd: 'istanbul'
grunt: false
args: [
'cover'
'--default-excludes'
'-x'
'app/**'
'--report'
'lcov'
'--dir'
'./coverage/backend'
path
]
opts: stdio: 'inherit'
grunt.util.spawn options, (error, result) ->
if result and result.stderr
process.stderr.write result.stderr
if result and result.stdout
grunt.log.writeln result.stdout
# abort tasks in queue if there's an error
done error
grunt.registerTask 'bower_install', 'install frontend dependencies', ->
exec = require('child_process').exec
cb = @async()
exec './node_modules/bower/bin/bower install', {}, (err, stdout) ->
console.log stdout
cb()
# Tasks
grunt.registerTask 'default', [
'coffee'
'jshint'
'usebanner'
]
grunt.registerTask 'test', [
'clean'
'coffee'
'simplemocha:backend'
]
grunt.registerTask 'coverage', [
'clean'
'coffee'
'coverageBackend'
]
grunt.registerTask 'heroku', [
'clean'
'coffee'
'bower_install'
]
grunt.registerTask 'banner', [ 'usebanner' ]
| 9569 | 'use strict'
module.exports = (grunt) ->
# load all grunt tasks
require('matchdep').filterDev('grunt-*').forEach (contrib) ->
grunt.log.ok [ contrib + ' is loaded' ]
grunt.loadNpmTasks contrib
config =
dist: 'dist'
src: 'src'
distTest: 'test/dist'
srcTest: 'test/src'
pkg: grunt.file.readJSON('package.json')
# Project configuration.
grunt.initConfig
config: config
clean:
dist:
files: [
dot: true
src: [
'<%= config.dist %>/*'
'<%= config.distTest %>/*'
'!<%= config.dist %>/.git*'
]
]
css:
files: [
dot: true
src: [ 'public/sass/index.css' ]
]
coffee:
dist: files: [ {
expand: true
cwd: '<%= config.src %>'
src: '{,*/}*.coffee'
dest: '<%= config.dist %>'
ext: '.js'
} ]
test: files: [ {
expand: true
cwd: '<%= config.srcTest %>'
src: '{,*/}*.spec.coffee'
dest: '<%= config.distTest %>'
ext: '.spec.js'
} ]
frontend: files: [ {
expand: true
cwd: 'src/frontend'
src: '{,*/}*.coffee'
dest: 'public/js'
ext: '.js'
} ]
jshint:
options:
jshintrc: '.jshintrc'
gruntfile:
src: 'Gruntfile.js'
watch:
gruntfile:
files: '<%= jshint.gruntfile.src %>'
tasks: [
'jshint:gruntfile'
]
dist:
files: '<%= config.src %>/*'
tasks: [
'coffee:dist'
'simplemocha:backend'
]
test:
files: '<%= config.srcTest %>/specs/*'
tasks: [
'coffee:test'
'simplemocha:backend'
]
frontend:
files: 'src/frontend/**/*.coffee'
tasks: [
'coffee:frontend'
]
css:
files: 'public/sass/**/*.scss'
tasks: [
'clean:css'
]
simplemocha:
options:
globals: [
'sinon'
'chai'
'should'
'expect'
'assert'
'AssertionError'
]
timeout: 3000
ignoreLeaks: false
ui: 'bdd'
reporter: 'spec'
backend: src: [
'test/support/globals.js'
'test/dist/**/*.spec.js'
]
auto_install:
local: {}
subdir:
options:
stdout: true
stderr: true
failOnError: true
npm: false
usebanner:
dist:
options:
position: 'top'
banner: """/*
* cena_auth at version <%= config.pkg.version %>
* <%= config.pkg.repository.url %>
*
* Copyright (c) 2015 <NAME>
* Licensed under the MIT license.
*/"""
linebreak: true
files:
src: [ 'dist/**/*.js' ]
grunt.registerTask 'coverageBackend', 'Test backend files as well as code coverage.', ->
done = @async()
path = './test/support/runner.js'
options =
cmd: 'istanbul'
grunt: false
args: [
'cover'
'--default-excludes'
'-x'
'app/**'
'--report'
'lcov'
'--dir'
'./coverage/backend'
path
]
opts: stdio: 'inherit'
grunt.util.spawn options, (error, result) ->
if result and result.stderr
process.stderr.write result.stderr
if result and result.stdout
grunt.log.writeln result.stdout
# abort tasks in queue if there's an error
done error
grunt.registerTask 'bower_install', 'install frontend dependencies', ->
exec = require('child_process').exec
cb = @async()
exec './node_modules/bower/bin/bower install', {}, (err, stdout) ->
console.log stdout
cb()
# Tasks
grunt.registerTask 'default', [
'coffee'
'jshint'
'usebanner'
]
grunt.registerTask 'test', [
'clean'
'coffee'
'simplemocha:backend'
]
grunt.registerTask 'coverage', [
'clean'
'coffee'
'coverageBackend'
]
grunt.registerTask 'heroku', [
'clean'
'coffee'
'bower_install'
]
grunt.registerTask 'banner', [ 'usebanner' ]
| true | 'use strict'
module.exports = (grunt) ->
# load all grunt tasks
require('matchdep').filterDev('grunt-*').forEach (contrib) ->
grunt.log.ok [ contrib + ' is loaded' ]
grunt.loadNpmTasks contrib
config =
dist: 'dist'
src: 'src'
distTest: 'test/dist'
srcTest: 'test/src'
pkg: grunt.file.readJSON('package.json')
# Project configuration.
grunt.initConfig
config: config
clean:
dist:
files: [
dot: true
src: [
'<%= config.dist %>/*'
'<%= config.distTest %>/*'
'!<%= config.dist %>/.git*'
]
]
css:
files: [
dot: true
src: [ 'public/sass/index.css' ]
]
coffee:
dist: files: [ {
expand: true
cwd: '<%= config.src %>'
src: '{,*/}*.coffee'
dest: '<%= config.dist %>'
ext: '.js'
} ]
test: files: [ {
expand: true
cwd: '<%= config.srcTest %>'
src: '{,*/}*.spec.coffee'
dest: '<%= config.distTest %>'
ext: '.spec.js'
} ]
frontend: files: [ {
expand: true
cwd: 'src/frontend'
src: '{,*/}*.coffee'
dest: 'public/js'
ext: '.js'
} ]
jshint:
options:
jshintrc: '.jshintrc'
gruntfile:
src: 'Gruntfile.js'
watch:
gruntfile:
files: '<%= jshint.gruntfile.src %>'
tasks: [
'jshint:gruntfile'
]
dist:
files: '<%= config.src %>/*'
tasks: [
'coffee:dist'
'simplemocha:backend'
]
test:
files: '<%= config.srcTest %>/specs/*'
tasks: [
'coffee:test'
'simplemocha:backend'
]
frontend:
files: 'src/frontend/**/*.coffee'
tasks: [
'coffee:frontend'
]
css:
files: 'public/sass/**/*.scss'
tasks: [
'clean:css'
]
simplemocha:
options:
globals: [
'sinon'
'chai'
'should'
'expect'
'assert'
'AssertionError'
]
timeout: 3000
ignoreLeaks: false
ui: 'bdd'
reporter: 'spec'
backend: src: [
'test/support/globals.js'
'test/dist/**/*.spec.js'
]
auto_install:
local: {}
subdir:
options:
stdout: true
stderr: true
failOnError: true
npm: false
usebanner:
dist:
options:
position: 'top'
banner: """/*
* cena_auth at version <%= config.pkg.version %>
* <%= config.pkg.repository.url %>
*
* Copyright (c) 2015 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
*/"""
linebreak: true
files:
src: [ 'dist/**/*.js' ]
grunt.registerTask 'coverageBackend', 'Test backend files as well as code coverage.', ->
done = @async()
path = './test/support/runner.js'
options =
cmd: 'istanbul'
grunt: false
args: [
'cover'
'--default-excludes'
'-x'
'app/**'
'--report'
'lcov'
'--dir'
'./coverage/backend'
path
]
opts: stdio: 'inherit'
grunt.util.spawn options, (error, result) ->
if result and result.stderr
process.stderr.write result.stderr
if result and result.stdout
grunt.log.writeln result.stdout
# abort tasks in queue if there's an error
done error
grunt.registerTask 'bower_install', 'install frontend dependencies', ->
exec = require('child_process').exec
cb = @async()
exec './node_modules/bower/bin/bower install', {}, (err, stdout) ->
console.log stdout
cb()
# Tasks
grunt.registerTask 'default', [
'coffee'
'jshint'
'usebanner'
]
grunt.registerTask 'test', [
'clean'
'coffee'
'simplemocha:backend'
]
grunt.registerTask 'coverage', [
'clean'
'coffee'
'coverageBackend'
]
grunt.registerTask 'heroku', [
'clean'
'coffee'
'bower_install'
]
grunt.registerTask 'banner', [ 'usebanner' ]
|
[
{
"context": "ame = \"markov\") ->\n @keyPrefix = \"#{modelName}:\"\n\n # Configure redis the same way that redi",
"end": 366,
"score": 0.5688269734382629,
"start": 366,
"tag": "KEY",
"value": ""
}
] | src/storage/redis.coffee | migstopheles/hubot-markov | 57 | Redis = require 'redis'
Url = require 'url'
# Markov storage implementation that uses redis hash keys to store the model.
class RedisStorage
# Create a storage module connected to Redis.
# Key prefix is used to isolate stored markov transitions from other keys in the database.
constructor: (@connStr, modelName = "markov") ->
@keyPrefix = "#{modelName}:"
# Configure redis the same way that redis-brain does.
info = Url.parse @connStr or
process.env.REDISTOGO_URL or
process.env.REDISCLOUD_URL or
process.env.BOXEN_REDIS_URL or
process.env.REDIS_URL or
'redis://localhost:6379'
@client = Redis.createClient(info.port, info.hostname)
if info.auth
@client.auth info.auth.split(":")[1]
# No initialization necessary for Redis.
initialize: (callback) ->
process.nextTick callback
# Uniformly and unambiguously convert an array of Strings and nulls into a valid
# Redis key. Uses a length-prefixed encoding.
#
# _encode([null, null, "a"]) = "markov:001a"
# _encode(["a", "bb", "ccc"]) = "markov:1a2b3c"
_encode: (key) ->
encoded = for part in key
if part then "#{part.length}#{part}" else "0"
@keyPrefix + encoded.join('')
# Record a set of transitions within the model. "transition.from" is an array of Strings
# and nulls marking the prior state and "transition.to" is the observed next state, which
# may be an end-of-chain sentinel.
incrementTransitions: (transitions, callback) ->
for transition in transitions
@client.hincrby(@._encode(transition.from), transition.to, 1)
process.nextTick callback
# Retrieve an object containing the possible next hops from a prior state and their
# relative frequencies. Invokes "callback" with the object.
get: (prior, callback) ->
@client.hgetall @._encode(prior), (err, hash) ->
return callback(err) if err?
converted = {}
for own state, count of hash
converted[state] = parseInt(count)
callback(null, converted)
# Remove all persistent storage related to this model.
destroy: (callback) ->
cursor = null
advance = =>
if cursor is '0'
return callback(null)
else
@client.scan [cursor or '0', 'match', "#{@keyPrefix}*"], processBatch
processBatch = (err, reply) =>
return callback(err) if err?
[cursor, batch] = reply
if batch.length is 0
advance()
else
@client.del batch, (err) =>
return callback(err) if err?
advance()
advance()
disconnect: (callback) ->
@client.quit()
process.nextTick -> callback(null)
module.exports = RedisStorage
| 200901 | Redis = require 'redis'
Url = require 'url'
# Markov storage implementation that uses redis hash keys to store the model.
class RedisStorage
# Create a storage module connected to Redis.
# Key prefix is used to isolate stored markov transitions from other keys in the database.
constructor: (@connStr, modelName = "markov") ->
@keyPrefix = "#{modelName<KEY>}:"
# Configure redis the same way that redis-brain does.
info = Url.parse @connStr or
process.env.REDISTOGO_URL or
process.env.REDISCLOUD_URL or
process.env.BOXEN_REDIS_URL or
process.env.REDIS_URL or
'redis://localhost:6379'
@client = Redis.createClient(info.port, info.hostname)
if info.auth
@client.auth info.auth.split(":")[1]
# No initialization necessary for Redis.
initialize: (callback) ->
process.nextTick callback
# Uniformly and unambiguously convert an array of Strings and nulls into a valid
# Redis key. Uses a length-prefixed encoding.
#
# _encode([null, null, "a"]) = "markov:001a"
# _encode(["a", "bb", "ccc"]) = "markov:1a2b3c"
_encode: (key) ->
encoded = for part in key
if part then "#{part.length}#{part}" else "0"
@keyPrefix + encoded.join('')
# Record a set of transitions within the model. "transition.from" is an array of Strings
# and nulls marking the prior state and "transition.to" is the observed next state, which
# may be an end-of-chain sentinel.
incrementTransitions: (transitions, callback) ->
for transition in transitions
@client.hincrby(@._encode(transition.from), transition.to, 1)
process.nextTick callback
# Retrieve an object containing the possible next hops from a prior state and their
# relative frequencies. Invokes "callback" with the object.
get: (prior, callback) ->
@client.hgetall @._encode(prior), (err, hash) ->
return callback(err) if err?
converted = {}
for own state, count of hash
converted[state] = parseInt(count)
callback(null, converted)
# Remove all persistent storage related to this model.
destroy: (callback) ->
cursor = null
advance = =>
if cursor is '0'
return callback(null)
else
@client.scan [cursor or '0', 'match', "#{@keyPrefix}*"], processBatch
processBatch = (err, reply) =>
return callback(err) if err?
[cursor, batch] = reply
if batch.length is 0
advance()
else
@client.del batch, (err) =>
return callback(err) if err?
advance()
advance()
disconnect: (callback) ->
@client.quit()
process.nextTick -> callback(null)
module.exports = RedisStorage
| true | Redis = require 'redis'
Url = require 'url'
# Markov storage implementation that uses redis hash keys to store the model.
class RedisStorage
# Create a storage module connected to Redis.
# Key prefix is used to isolate stored markov transitions from other keys in the database.
constructor: (@connStr, modelName = "markov") ->
@keyPrefix = "#{modelNamePI:KEY:<KEY>END_PI}:"
# Configure redis the same way that redis-brain does.
info = Url.parse @connStr or
process.env.REDISTOGO_URL or
process.env.REDISCLOUD_URL or
process.env.BOXEN_REDIS_URL or
process.env.REDIS_URL or
'redis://localhost:6379'
@client = Redis.createClient(info.port, info.hostname)
if info.auth
@client.auth info.auth.split(":")[1]
# No initialization necessary for Redis.
initialize: (callback) ->
process.nextTick callback
# Uniformly and unambiguously convert an array of Strings and nulls into a valid
# Redis key. Uses a length-prefixed encoding.
#
# _encode([null, null, "a"]) = "markov:001a"
# _encode(["a", "bb", "ccc"]) = "markov:1a2b3c"
_encode: (key) ->
encoded = for part in key
if part then "#{part.length}#{part}" else "0"
@keyPrefix + encoded.join('')
# Record a set of transitions within the model. "transition.from" is an array of Strings
# and nulls marking the prior state and "transition.to" is the observed next state, which
# may be an end-of-chain sentinel.
incrementTransitions: (transitions, callback) ->
for transition in transitions
@client.hincrby(@._encode(transition.from), transition.to, 1)
process.nextTick callback
# Retrieve an object containing the possible next hops from a prior state and their
# relative frequencies. Invokes "callback" with the object.
get: (prior, callback) ->
@client.hgetall @._encode(prior), (err, hash) ->
return callback(err) if err?
converted = {}
for own state, count of hash
converted[state] = parseInt(count)
callback(null, converted)
# Remove all persistent storage related to this model.
destroy: (callback) ->
cursor = null
advance = =>
if cursor is '0'
return callback(null)
else
@client.scan [cursor or '0', 'match', "#{@keyPrefix}*"], processBatch
processBatch = (err, reply) =>
return callback(err) if err?
[cursor, batch] = reply
if batch.length is 0
advance()
else
@client.del batch, (err) =>
return callback(err) if err?
advance()
advance()
disconnect: (callback) ->
@client.quit()
process.nextTick -> callback(null)
module.exports = RedisStorage
|
[
{
"context": "on\"\n null\n\n ## https://github.com/cypress-io/cypress/issues/65\n it \"provides the correct ",
"end": 17667,
"score": 0.9996018409729004,
"start": 17657,
"tag": "USERNAME",
"value": "cypress-io"
},
{
"context": " headers: {\n \"x-token\": \"123\"\n }\n })\n ",
"end": 23911,
"score": 0.8049545288085938,
"start": 23908,
"tag": "KEY",
"value": "123"
},
{
"context": "uestHeaders\").should(\"have.property\", \"x-token\", \"123\")\n\n context \"responses\", ->\n beforeEa",
"end": 24057,
"score": 0.8859685063362122,
"start": 24054,
"tag": "KEY",
"value": "123"
},
{
"context": " cy.state(\"window\").$.post(\"/users\", \"name=brian\")\n\n it \"can accept response as a function\", ->",
"end": 37478,
"score": 0.7208580374717712,
"start": 37473,
"tag": "USERNAME",
"value": "brian"
},
{
"context": "hr.aborted).to.be.true\n\n ## https://github.com/cypress-io/cypress/issues/3008\n it \"aborts xhrs even when",
"end": 57168,
"score": 0.9996204376220703,
"start": 57158,
"tag": "USERNAME",
"value": "cypress-io"
},
{
"context": "hr.aborted).to.be.true\n\n ## https://github.com/cypress-io/cypress/issues/1652\n it \"does not set aborted ",
"end": 57758,
"score": 0.9995796084403992,
"start": 57748,
"tag": "USERNAME",
"value": "cypress-io"
}
] | packages/driver/test/cypress/integration/commands/xhr_spec.coffee | ahmedbera/cypress | 3 | _ = Cypress._
$ = Cypress.$
Promise = Cypress.Promise
describe "src/cy/commands/xhr", ->
before ->
cy
.visit("/fixtures/jquery.html")
.then (win) ->
h = $(win.document.head)
h.find("script").remove()
@head = h.prop("outerHTML")
@body = win.document.body.outerHTML
beforeEach ->
doc = cy.state("document")
$(doc.head).empty().html(@head)
$(doc.body).empty().html(@body)
context "#startXhrServer", ->
it "continues to be a defined properties", ->
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
expect(xhr.onload).to.be.a("function")
expect(xhr.onerror).to.be.a("function")
expect(xhr.onreadystatechange).to.be.a("function")
it "prevents infinite recursion", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
xhr = new win.XMLHttpRequest
xhr.addEventListener("readystatechange", wrap, false)
xhr.onreadystatechange = ->
throw new Error("NOOO")
xhr.onreadystatechange
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.open("GET", "/foo")
xhr.onload = ->
throw new Error("NOOO")
xhr.onload
xhr.onload = ->
onloaded = true
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(404)
it "allows multiple readystatechange calls", ->
responseText = null
responseStatuses = 0
cy
.server()
.route({ url: /longtext.txt/ }).as("getLongText")
.task('create:long:file')
.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = ->
responseText = xhr.responseText
if xhr.readyState == 3
responseStatuses++
xhr.open("GET", "/_test-output/longtext.txt?" + Cypress._.random(0, 1e6))
xhr.send()
null
.wait("@getLongText").then (xhr) ->
expect(responseStatuses).to.be.gt(1)
expect(xhr.status).to.eq(200)
it "works with jquery too", ->
failed = false
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
xhr = @
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
open = win.XMLHttpRequest.prototype.open
win.XMLHttpRequest.prototype.open = ->
@addEventListener("readystatechange", wrap, false)
open.apply(@, arguments)
xhr = win.$.get("/foo")
.fail ->
failed = true
.always ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(failed).to.be.true
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls existing onload handlers", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.onload = ->
onloaded = true
xhr.open("GET", "/foo")
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send asynchronously", ->
onloaded = false
cy
.server()
.route({url: /timeout/}).as("getTimeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
_.delay ->
xhr.onload = ->
onloaded = true
, 20
null
.wait("@getTimeout").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(200)
it "fallbacks even when onreadystatechange is overriden", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /timeout/}).as("get.timeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.onload = ->
onloaded = true
null
.wait("@get.timeout").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(200)
describe.skip "filtering requests", ->
beforeEach ->
cy.server()
extensions = {
html: "ajax html"
js: "{foo: \"bar\"}"
css: "body {}"
}
_.each extensions, (val, ext) ->
it "filters out non ajax requests by default for extension: .#{ext}", (done) ->
cy.state("window").$.get("/fixtures/app.#{ext}").done (res) ->
expect(res).to.eq val
done()
it "can disable default filtering", (done) ->
## this should throw since it should return 404 when no
## route matches it
cy.server({ignore: false}).window().then (w) ->
Promise.resolve(w.$.get("/fixtures/app.html")).catch -> done()
describe "url rewriting", ->
it "has a FQDN absolute-relative url", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/foo")
it "has a relative URL", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/fixtures/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present on nested routes", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/nested/route/path'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("../foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/nested/foo")
expect(@open).to.be.calledWith("GET", "../foo")
it "allows cross origin requests to go out as necessary", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3501/foo")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/foo")
it "rewrites FQDN url's for stubs", ->
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:9999/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:9999/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:9999/foo")
it "rewrites absolute url's for stubs", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites 404's url's for stubs", ->
cy
.server({force404: true})
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("POST", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites urls with nested segments", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/fixtures/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/fixtures/phones/phones.json")
it "does not rewrite CORS", ->
cy
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.get("http://www.google.com/phones/phones.json").fail ->
resolve()
.then ->
xhr = cy.state("requests")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "http://www.google.com/phones/phones.json")
it "can stub real CORS requests too", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://www.google.com/phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://www.google.com/phones/phones.json")
it "can stub CORS string routes", ->
cy
.server()
.route("http://localhost:3501/fixtures/app.json").as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/fixtures/app.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3501/fixtures/app.json")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/fixtures/app.json")
# it "can stub root requests to CORS", ->
# cy
# .server()
# .route({
# url: "http://localhost:3501"
# stub: false
# }).as("getPhones")
# .window().then (win) ->
# @open = cy.spy(cy.state("server").options, "onOpen")
# win.$.get("http://localhost:3501")
# null
# .wait("@getPhones")
# .then ->
# xhr = cy.state("responses")[0].xhr
# expect(xhr.url).to.eq("http://localhost:3501")
# expect(@open).to.be.calledWith("GET", "/http://localhost:3501")
it "sets display correctly when there is no remoteOrigin", ->
## this is an example of having cypress act as your webserver
## when the remoteHost is <root>
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
## trick cypress into thinking the remoteOrigin is location:9999
cy.stub(cy, "getRemoteLocation").withArgs("origin").returns("")
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "decodes proxy urls", ->
cy
.server()
.route({
url: /users/
response: {}
}).as("getUsers")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/users?q=(id eq 123)")
null
.wait("@getUsers")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/users?q=(id eq 123)")
url = encodeURI("users?q=(id eq 123)")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
it "decodes proxy urls #2", ->
cy
.server()
.route(/accounts/, {}).as("getAccounts")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
null
.wait("@getAccounts")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/accounts?page=1&$filter=(rowStatus+eq+1)&$orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
url = "accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true"
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
describe "#onResponse", ->
it "calls onResponse callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {foo: "bar"}
onResponse: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.get("/foo")
null
describe "#onAbort", ->
it "calls onAbort callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {}
onAbort: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.aborted).to.be.true
done()
})
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.abort()
null
describe "request parsing", ->
it "adds parses requestBody into JSON", (done) ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
onRequest: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.requestBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.ajax
type: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
dataType: "json"
null
## https://github.com/cypress-io/cypress/issues/65
it "provides the correct requestBody on multiple requests", ->
post = (win, obj) ->
win.$.ajax({
type: "POST"
url: "/foo"
data: JSON.stringify(obj)
dataType: "json"
})
return null
cy
.server()
.route("POST", /foo/, {}).as("getFoo")
.window().then (win) ->
post(win, {foo: "bar1"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar1"})
.window().then (win) ->
post(win, {foo: "bar2"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar2"})
it "handles arraybuffer", ->
cy
.server()
.route("GET", /buffer/).as("getBuffer")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.responseType = "arraybuffer"
xhr.open("GET", "/buffer")
xhr.send()
null
.wait("@getBuffer").then (xhr) ->
expect(xhr.responseBody.toString()).to.eq("[object ArrayBuffer]")
it "handles xml", ->
cy
.server()
.route("GET", /xml/).as("getXML")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/xml")
xhr.send()
null
.wait("@getXML").its("responseBody").should("eq", "<foo>bar</foo>")
describe "issue #84", ->
it "does not incorrectly match options", ->
cy
.server()
.route({
method: "GET"
url: /answers/
status: 503
response: {}
})
.route(/forms/, []).as("getForm")
.window().then (win) ->
win.$.getJSON("/forms")
null
.wait("@getForm").its("status").should("eq", 200)
describe "#issue #85", ->
it "correctly returns the right XHR alias", ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
}).as("getFoo")
.route(/folders/, {foo: "bar"}).as("getFolders")
.window().then (win) ->
win.$.getJSON("/folders")
win.$.post("/foo", {})
null
.wait("@getFolders")
.wait("@getFoo")
.route(/folders/, {foo: "baz"}).as("getFoldersWithSearch")
.window().then (win) ->
win.$.getJSON("/folders/123/activities?foo=bar")
null
.wait("@getFoldersWithSearch").its("url")
.should("contain", "?foo=bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
context "requests", ->
it "immediately logs xhr obj", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo")
null
.then ->
lastLog = @lastLog
expect(lastLog.pick("name", "displayName", "event", "alias", "aliasType", "state")).to.deep.eq {
name: "xhr"
displayName: "xhr stub"
event: true
alias: "getFoo"
aliasType: "route"
state: "pending"
}
snapshots = lastLog.get("snapshots")
expect(snapshots.length).to.eq(1)
expect(snapshots[0].name).to.eq("request")
expect(snapshots[0].body).to.be.an("object")
it "does not end xhr requests when the associated command ends", ->
logs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (w) ->
w.$.getJSON("foo")
w.$.getJSON("foo")
w.$.getJSON("foo")
null
.then ->
cmd = cy.queue.find({name: "window"})
logs = cmd.get("next").get("logs")
expect(logs.length).to.eq(3)
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("end")).not.to.be.true
.wait(["@getFoo", "@getFoo", "@getFoo"]).then ->
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("ended")).to.be.true
it "updates log immediately whenever an xhr is aborted", ->
snapshot = null
xhrs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (win) ->
xhr1 = win.$.getJSON("foo1")
xhr2 = win.$.getJSON("foo2")
xhr1.abort()
null
.then ->
xhrs = cy.queue.logs({name: "xhr"})
expect(xhrs[0].get("state")).to.eq("failed")
expect(xhrs[0].get("error").name).to.eq("AbortError")
expect(xhrs[0].get("snapshots").length).to.eq(2)
expect(xhrs[0].get("snapshots")[0].name).to.eq("request")
expect(xhrs[0].get("snapshots")[0].body).to.be.a("object")
expect(xhrs[0].get("snapshots")[1].name).to.eq("aborted")
expect(xhrs[0].get("snapshots")[1].body).to.be.a("object")
expect(cy.state("requests").length).to.eq(2)
## the abort should have set its response
expect(cy.state("responses").length).to.eq(1)
.wait(["@getFoo", "@getFoo"]).then ->
## should not re-snapshot after the response
expect(xhrs[0].get("snapshots").length).to.eq(2)
it "can access requestHeaders", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.ajax({
method: "GET"
url: "/foo"
headers: {
"x-token": "123"
}
})
null
.wait("@getFoo").its("requestHeaders").should("have.property", "x-token", "123")
context "responses", ->
beforeEach ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar")
null
.wait("@getFoo")
it "logs obj", ->
obj = {
name: "xhr"
displayName: "xhr stub"
event: true
message: ""
type: "parent"
aliasType: "route"
referencesAlias: undefined
alias: "getFoo"
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).to.deep.eq(value, "expected key: #{key} to eq value: #{value}")
it "ends", ->
lastLog = @lastLog
expect(lastLog.get("state")).to.eq("passed")
it "snapshots again", ->
lastLog = @lastLog
expect(lastLog.get("snapshots").length).to.eq(2)
expect(lastLog.get("snapshots")[0].name).to.eq("request")
expect(lastLog.get("snapshots")[0].body).to.be.an("object")
expect(lastLog.get("snapshots")[1].name).to.eq("response")
expect(lastLog.get("snapshots")[1].body).to.be.an("object")
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 200)
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "sets err on log when caused by code errors", (done) ->
finalThenCalled = false
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.window().then (win) ->
new Promise (resolve) ->
win.$.get("http://www.google.com/foo.json")
.fail ->
foo.bar()
it "causes errors caused by onreadystatechange callback function", (done) ->
e = new Error("onreadystatechange caused this error")
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
expect(err).to.eq(e)
done()
cy
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.onreadystatechange = ->
throw e
xhr.send()
context "#server", ->
it "sets serverIsStubbed", ->
cy.server().then ->
expect(cy.state("serverIsStubbed")).to.be.true
it "can disable serverIsStubbed", ->
cy.server({enable: false}).then ->
expect(cy.state("serverIsStubbed")).to.be.false
it "sends enable to server", ->
set = cy.spy cy.state("server"), "set"
cy.server().then ->
expect(set).to.be.calledWithExactly({enable: true})
it "can disable the server after enabling it", ->
set = cy.spy cy.state("server"), "set"
cy
.server()
.route(/app/, {}).as("getJSON")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("deep.eq", {})
.server({enable: false})
.then ->
expect(set).to.be.calledWithExactly({enable: false})
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("not.deep.eq", {})
it "sets delay at 0 by default", ->
cy
.server()
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(0)
it "passes down options.delay to routes", ->
cy
.server({delay: 100})
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(100)
it "passes event argument to xhr.onreadystatechange", (done) ->
cy.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = (e) ->
expect(e).to.be.an.instanceof(win.Event)
done()
xhr.open("GET", "http://localhost:3500/")
describe "errors", ->
context "argument signature", ->
_.each ["asdf", 123, null, undefined], (arg) ->
it "throws on bad argument: #{arg}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.server() accepts only an object literal as its argument"
done()
cy.server(arg)
it "after turning off server it throws attempting to route", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.eq("cy.route() cannot be invoked before starting the cy.server()")
done()
cy
.server()
.route(/app/, {})
.server({enable: false})
.route(/app/, {})
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "provides specific #onFail", (done) ->
cy.on "fail", (err) =>
obj = {
name: "xhr"
referencesAlias: undefined
alias: "getFoo"
aliasType: "route"
type: "parent"
error: err
instrument: "command"
message: ""
event: true
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).deep.eq(value, "expected key: #{key} to eq value: #{value}")
done()
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("/foo").done ->
throw new Error("specific ajax error")
context.skip "#server", ->
beforeEach ->
defaults = {
ignore: true
respond: true
delay: 10
beforeRequest: ->
afterResponse: ->
onAbort: ->
onError: ->
onFilter: ->
}
@options = (obj) ->
_.extend obj, defaults
@create = cy.spy @Cypress.Server, "create"
it "can accept an onRequest and onResponse callback", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server(onRequest, onResponse)
it "can accept onRequest and onRespond through options", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server({onRequest: onRequest, onResponse: onResponse})
describe "without sinon present", ->
beforeEach ->
## force us to start from blank window
cy.state("$autIframe").prop("src", "about:blank")
it "can start server with no errors", ->
cy
.server()
.visit("http://localhost:3500/fixtures/sinon.html")
it "can add routes with no errors", ->
cy
.server()
.route(/foo/, {})
.visit("http://localhost:3500/fixtures/sinon.html")
it "routes xhr requests", ->
cy
.server()
.route(/foo/, {foo: "bar"})
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.then (resp) ->
expect(resp).to.deep.eq {foo: "bar"}
it "works with aliases", ->
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.wait("@getFoo").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({foo: "bar"})
it "prevents XHR's from going out from sinon.html", ->
cy
.server()
.route(/bar/, {bar: "baz"}).as("getBar")
.visit("http://localhost:3500/fixtures/sinon.html")
.wait("@getBar").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({bar: "baz"})
context "#route", ->
beforeEach ->
@expectOptionsToBe = (opts) =>
options = @route.getCall(0).args[0]
_.each opts, (value, key) ->
expect(options[key]).to.deep.eq(opts[key], "failed on property: (#{key})")
cy.server().then ->
@route = cy.spy(cy.state("server"), "route")
it "accepts url, response", ->
cy.route("/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts regex url, response", ->
cy.route(/foo/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /foo/
response: {}
})
it "does not mutate other routes when using shorthand", ->
cy
.route("POST", /foo/, {}).as("getFoo")
.route(/bar/, {}).as("getBar")
.then ->
expect(@route.firstCall.args[0].method).to.eq("POST")
expect(@route.secondCall.args[0].method).to.eq("GET")
it "accepts url, response, onRequest", ->
onRequest = ->
cy.route({
url: "/foo",
response: {},
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "accepts method, url, response", ->
cy.route("GET", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts method, url, response, onRequest", ->
onRequest = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts method, url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "uppercases method", ->
cy.route("get", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts string or regex as the url", ->
cy.route("get", /.*/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
response: {}
})
it "does not require response or method when not stubbing", ->
cy
.server()
.route(/users/).as("getUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "GET"
url: /users/
})
it "does not require response when not stubbing", ->
cy
.server()
.route("POST", /users/).as("createUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "POST"
url: /users/
})
it "accepts an object literal as options", ->
onRequest = ->
onResponse = ->
opts = {
method: "PUT"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
}
cy.route(opts).then ->
@expectOptionsToBe(opts)
it "can accept wildcard * as URL and converts to /.*/ regex", ->
opts = {
url: "*"
response: {}
}
cy.route(opts).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
originalUrl: "*"
response: {}
})
## FIXME
it.skip "can explicitly done() in onRequest function from options", (done) ->
cy
.server()
.route({
method: "POST"
url: "/users"
response: {}
onRequest: -> done()
})
.then ->
cy.state("window").$.post("/users", "name=brian")
it "can accept response as a function", ->
users = [{}, {}]
getUsers = -> users
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "invokes response function with runnable.ctx", ->
ctx = @
users = [{}, {}]
getUsers = ->
expect(@ is ctx).to.be.true
cy.route(/users/, getUsers)
it "passes options as argument", ->
ctx = @
users = [{}, {}]
getUsers = (opts) ->
expect(opts).to.be.an("object")
expect(opts.method).to.eq("GET")
cy.route(/users/, getUsers)
it "can accept response as a function which returns a promise", ->
users = [{}, {}]
getUsers = ->
new Promise (resolve, reject) ->
setTimeout ->
resolve(users)
, 10
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "can accept a function which returns options", ->
users = [{}, {}]
getRoute = ->
{
method: "GET"
url: /users/
status: 201
response: -> Promise.resolve(users)
}
cy.route(getRoute)
.then ->
@expectOptionsToBe({
method: "GET"
status: 201
url: /users/
response: users
})
it "invokes route function with runnable.ctx", ->
ctx = @
getUsers = ->
expect(@ is ctx).to.be.true
{
url: /foo/
}
cy.route(getUsers)
it.skip "adds multiple routes to the responses array", ->
cy
.route("foo", {})
.route("bar", {})
.then ->
expect(cy.state("sandbox").server.responses).to.have.length(2)
it "can use regular strings as response", ->
cy
.route("/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it "can stub requests with uncommon HTTP methods", ->
cy
.route("PROPFIND", "/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.ajax({
url: "/foo"
method: "PROPFIND"
})
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it.skip "does not error when response is null but respond is false", ->
cy.route
url: /foo/
respond: false
describe "deprecations", ->
beforeEach ->
@warn = cy.spy(window.top.console, "warn")
it "logs on {force404: false}", ->
cy
.server({force404: false})
.then ->
expect(@warn).to.be.calledWith("Cypress Warning: Passing cy.server({force404: false}) is now the default behavior of cy.server(). You can safely remove this option.")
it "does not log on {force404: true}", ->
cy
.server({force404: true})
.then ->
expect(@warn).not.to.be.called
describe "request response alias", ->
it "matches xhrs with lowercase methods", ->
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("get", "/foo")
xhr.send()
.wait("@getFoo")
it "can pass an alias reference to route", ->
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, "@foo").as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can pass an alias when using a response function", ->
getFoo = ->
Promise.resolve("@foo")
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, getFoo).as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can alias a route without stubbing it", ->
cy
.route(/fixtures\/app/).as("getFoo")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getFoo").then (xhr) ->
log = cy.queue.logs({name: "xhr"})[0]
expect(log.get("displayName")).to.eq("xhr")
expect(log.get("alias")).to.eq("getFoo")
expect(xhr.responseBody).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 50)
@logs = []
cy.on "log:added", (attrs, log) =>
@lastLog = log
@logs.push(log)
return null
it "throws if cy.server() hasnt been invoked", (done) ->
cy.state("serverIsStubbed", false)
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot be invoked before starting the cy.server()"
done()
cy.route()
it "url must be a string or regexp", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
cy.route({
url: {}
})
it "url must be a string or regexp when a function", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
getUrl = ->
Promise.resolve({url: {}})
cy.route(getUrl)
it "fails when functions reject", (done) ->
error = new Error
cy.on "fail", (err) ->
expect(err).to.eq(error)
done()
getUrl = ->
Promise.reject(error)
cy.route(getUrl)
it "fails when method is invalid", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid method: 'POSTS'."
done()
cy.route("posts", "/foo", {})
it "requires a url when given a response", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() must be called with a url. It can be a string or regular expression."
done()
cy.route({})
_.each [null, undefined], (val) ->
it "throws if response options was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route({url: /foo/, response: val})
it "throws if response argument was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route(/foo/, val)
it "requires arguments", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was not provided any arguments. You must provide valid arguments."
done()
cy.route()
it "sets err on log when caused by the XHR response", (done) ->
@route.restore()
cy.on "fail", (err) =>
lastLog = @lastLog
## route + window + xhr log === 3
expect(@logs.length).to.eq(3)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar").done ->
foo.bar()
it.skip "explodes if response fixture signature errors", (done) ->
@trigger = cy.stub(@Cypress, "trigger").withArgs("fixture").callsArgWithAsync(2, {__error: "some error"})
logs = []
_this = @
## we have to restore the trigger when commandErr is called
## so that something logs out!
cy.commandErr = _.wrap cy.commandErr, (orig, err) ->
_this.Cypress.trigger.restore()
orig.call(@, err)
cy.on "log:added", (attrs, @log) =>
logs.push @log
cy.on "fail", (err) =>
expect(err.message).to.eq "some error"
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, fixture:bar"
done()
cy
.route(/foo/, "fixture:bar")
## TODO: handle this uncaught exception failure
it.skip "does not retry (cancels existing promise) when xhr errors", (done) ->
cancel = cy.spy(Promise.prototype, "cancel")
cy.on "command:retry", =>
if cy.state("error")
done("should have cancelled and not retried after failing")
cy.on "fail", (err) =>
p = cy.state("promise")
_.delay =>
expect(cancel).to.be.calledOn(p)
done()
, 100
cy
.route({
url: /foo/,
response: {}
delay: 100
})
.window().then (win) ->
win.$.getJSON("/foo").done ->
throw new Error("foo failed")
null
.get("button").should("have.class", "does-not-exist")
it "explodes if response alias cannot be found", (done) ->
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(2)
expect(err.message).to.eq "cy.route() could not find a registered alias for: '@bar'.\nAvailable aliases are: 'foo'."
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, @bar"
done()
cy
.wrap({foo: "bar"}).as("foo")
.route(/foo/, "@bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.instrument is "route"
@lastLog = log
@logs.push(log)
return null
it "has name of route", ->
cy.route("/foo", {}).then ->
lastLog = @lastLog
expect(lastLog.get("name")).to.eq "route"
it "uses the wildcard URL", ->
cy.route("*", {}).then ->
lastLog = @lastLog
expect(lastLog.get("url")).to.eq("*")
it "#consoleProps", ->
cy.route("*", {foo: "bar"}).as("foo").then ->
expect(@lastLog.invoke("consoleProps")).to.deep.eq {
Command: "route"
Method: "GET"
URL: "*"
Status: 200
Response: {foo: "bar"}
Alias: "foo"
# Responded: 1 time
# "-------": ""
# Responses: []
}
describe "numResponses", ->
it "is initially 0", ->
cy.route(/foo/, {}).then =>
lastLog = @lastLog
expect(lastLog.get("numResponses")).to.eq 0
it "is incremented to 2", ->
cy
.route(/foo/, {})
.window().then (win) ->
win.$.get("/foo")
.then ->
expect(@lastLog.get("numResponses")).to.eq 1
it "is incremented for each matching request", ->
cy
.route(/foo/, {})
.window().then (win) ->
Promise.all([
win.$.get("/foo")
win.$.get("/foo")
win.$.get("/foo")
])
.then ->
expect(@lastLog.get("numResponses")).to.eq 3
context "consoleProps logs", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "when stubbed", ->
it "says Stubbed: Yes", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
.then ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "zero configuration / zero routes", ->
beforeEach ->
cy
.server({force404: true})
.window().then (win) ->
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
it "calculates duration", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Duration).to.be.a("number")
expect(consoleProps.Duration).to.be.gt(1)
expect(consoleProps.Duration).to.be.lt(1000)
it "sends back regular 404", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "POST"
Status: "404 (Not Found)"
URL: "http://localhost:3500/foo"
XHR: xhr.xhr
})
it "says Stubbed: Yes when sent 404 back", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "whitelisting", ->
it "does not send back 404s on whitelisted routes", ->
cy
.server()
.window().then (win) ->
win.$.get("/fixtures/app.js")
.then (resp) ->
expect(resp).to.eq "{ 'bar' }\n"
describe "route setup", ->
beforeEach ->
cy
.server({force404: true})
.route("/foo", {}).as("anyRequest")
.window().then (win) ->
win.$.get("/bar")
null
it "sends back 404 when request doesnt match route", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Note).to.eq("This request did not match any of your routes. It was automatically sent back '404'. Setting cy.server({force404: false}) will turn off this behavior.")
describe "{force404: false}", ->
beforeEach ->
cy
.server()
.window().then (win) ->
win.$.getJSON("/fixtures/app.json")
it "says Stubbed: No when request isnt forced 404", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("No")
it "logs request + response headers", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Request.headers).to.be.an("object")
expect(consoleProps.Response.headers).to.be.an("object")
it "logs Method, Status, URL, and XHR", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "GET"
URL: "http://localhost:3500/fixtures/app.json"
Status: "200 (OK)"
XHR: xhr.xhr
})
it "logs response", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Response.body).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
it "sets groups Initiator", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
group = consoleProps.groups()[0]
expect(group.name).to.eq("Initiator")
expect(group.label).to.be.false
expect(group.items[0]).to.be.a("string")
expect(group.items[0].split("\n").length).to.gt(1)
context "renderProps", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "in any case", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct message", ->
cy.then ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET 200 /foo")
describe "when response is successful", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("successful")
describe "when response is pending", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", delay: 500, response: {} })
.window().then (win) ->
win.$.get("/foo")
null
## FAILING
it "sends correct message", ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET --- /foo")
it "sends correct indicator", ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("pending")
describe "when response is outside 200 range", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", status: 500, response: {} })
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").fail -> resolve()
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("bad")
context "abort", ->
xhrs = []
beforeEach ->
cy.visit("/fixtures/jquery.html")
it "does not abort xhr's between tests", ->
cy.window().then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhrs.push(xhr)
it "has not aborted the xhrs", ->
_.each xhrs, (xhr) ->
expect(xhr.aborted).not.to.be.false
it "aborts xhrs that haven't been sent", ->
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.abort()
expect(xhr.aborted).to.be.true
it "aborts xhrs currently in flight", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=999")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(log.invoke("renderProps")).to.deep.eq({
message: "GET (aborted) /timeout?ms=999",
indicator: 'aborted',
})
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/3008
it "aborts xhrs even when responseType not '' or 'text'", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.responseType = 'arraybuffer'
xhr.open("GET", "/timeout?ms=1000")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/1652
it "does not set aborted on XHR's that have completed by have had .abort() called", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.onload = ->
xhr.abort()
xhr.foo = "bar"
resolve(xhr)
xhr.send()
.then (xhr) ->
cy
.wrap(null)
.should ->
## ensure this is set to prevent accidental
## race conditions down the road if something
## goes wrong
expect(xhr.foo).to.eq("bar")
expect(xhr.aborted).not.to.be.true
expect(log.get("state")).to.eq("passed")
context "Cypress.on(window:unload)", ->
it "cancels all open XHR's", ->
xhrs = []
cy
.window()
.then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=200")
xhr.send()
xhrs.push(xhr)
.reload()
.then ->
_.each xhrs, (xhr) ->
expect(xhr.canceled).to.be.true
context "Cypress.on(window:before:load)", ->
it "reapplies server + route automatically before window:load", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
.visit("http://localhost:3500/fixtures/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
it "reapplies server + route automatically during page transitions", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
url = "http://localhost:3500/fixtures/generic.html"
$a = win.$("<a href='#{url}'>jquery</a>")
.appendTo(win.document.body)
## synchronous beforeunload
$a.get(0).click()
.url().should("include", "/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
context.skip "#cancel", ->
it "calls server#cancel", (done) ->
cancel = null
@Cypress.once "abort", ->
expect(cancel).to.be.called
done()
cy.server().then ->
cancel = cy.spy cy.state("server"), "cancel"
@Cypress.trigger "abort"
context.skip "#respond", ->
it "calls server#respond", ->
respond = null
cy
.server({delay: 100}).then (server) ->
respond = cy.spy server, "respond"
.window().then (win) ->
win.$.get("/users")
null
.respond().then ->
expect(respond).to.be.calledOnce
describe "errors", ->
beforeEach ->
@allowErrors()
it "errors without a server", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() cannot be invoked before starting the cy.server()"
done()
cy.respond()
it "errors with no pending requests", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() did not find any pending requests to respond to"
done()
cy
.server()
.route(/users/, {})
.window().then (win) ->
## this is waited on to be resolved
## because of jquery promise thenable
win.$.get("/users")
.respond()
## currently this does not fail. we'll wait until someone cares
# it "errors if response was null or undefined", (done) ->
# cy.on "fail", (err) ->
# cy
# .server()
# .route({
# url: /foo/
# respond: false
# })
# .window().then (win) ->
# win.$.get("/foo")
# null
# .respond() | 182202 | _ = Cypress._
$ = Cypress.$
Promise = Cypress.Promise
describe "src/cy/commands/xhr", ->
before ->
cy
.visit("/fixtures/jquery.html")
.then (win) ->
h = $(win.document.head)
h.find("script").remove()
@head = h.prop("outerHTML")
@body = win.document.body.outerHTML
beforeEach ->
doc = cy.state("document")
$(doc.head).empty().html(@head)
$(doc.body).empty().html(@body)
context "#startXhrServer", ->
it "continues to be a defined properties", ->
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
expect(xhr.onload).to.be.a("function")
expect(xhr.onerror).to.be.a("function")
expect(xhr.onreadystatechange).to.be.a("function")
it "prevents infinite recursion", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
xhr = new win.XMLHttpRequest
xhr.addEventListener("readystatechange", wrap, false)
xhr.onreadystatechange = ->
throw new Error("NOOO")
xhr.onreadystatechange
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.open("GET", "/foo")
xhr.onload = ->
throw new Error("NOOO")
xhr.onload
xhr.onload = ->
onloaded = true
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(404)
it "allows multiple readystatechange calls", ->
responseText = null
responseStatuses = 0
cy
.server()
.route({ url: /longtext.txt/ }).as("getLongText")
.task('create:long:file')
.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = ->
responseText = xhr.responseText
if xhr.readyState == 3
responseStatuses++
xhr.open("GET", "/_test-output/longtext.txt?" + Cypress._.random(0, 1e6))
xhr.send()
null
.wait("@getLongText").then (xhr) ->
expect(responseStatuses).to.be.gt(1)
expect(xhr.status).to.eq(200)
it "works with jquery too", ->
failed = false
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
xhr = @
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
open = win.XMLHttpRequest.prototype.open
win.XMLHttpRequest.prototype.open = ->
@addEventListener("readystatechange", wrap, false)
open.apply(@, arguments)
xhr = win.$.get("/foo")
.fail ->
failed = true
.always ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(failed).to.be.true
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls existing onload handlers", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.onload = ->
onloaded = true
xhr.open("GET", "/foo")
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send asynchronously", ->
onloaded = false
cy
.server()
.route({url: /timeout/}).as("getTimeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
_.delay ->
xhr.onload = ->
onloaded = true
, 20
null
.wait("@getTimeout").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(200)
it "fallbacks even when onreadystatechange is overriden", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /timeout/}).as("get.timeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.onload = ->
onloaded = true
null
.wait("@get.timeout").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(200)
describe.skip "filtering requests", ->
beforeEach ->
cy.server()
extensions = {
html: "ajax html"
js: "{foo: \"bar\"}"
css: "body {}"
}
_.each extensions, (val, ext) ->
it "filters out non ajax requests by default for extension: .#{ext}", (done) ->
cy.state("window").$.get("/fixtures/app.#{ext}").done (res) ->
expect(res).to.eq val
done()
it "can disable default filtering", (done) ->
## this should throw since it should return 404 when no
## route matches it
cy.server({ignore: false}).window().then (w) ->
Promise.resolve(w.$.get("/fixtures/app.html")).catch -> done()
describe "url rewriting", ->
it "has a FQDN absolute-relative url", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/foo")
it "has a relative URL", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/fixtures/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present on nested routes", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/nested/route/path'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("../foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/nested/foo")
expect(@open).to.be.calledWith("GET", "../foo")
it "allows cross origin requests to go out as necessary", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3501/foo")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/foo")
it "rewrites FQDN url's for stubs", ->
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:9999/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:9999/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:9999/foo")
it "rewrites absolute url's for stubs", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites 404's url's for stubs", ->
cy
.server({force404: true})
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("POST", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites urls with nested segments", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/fixtures/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/fixtures/phones/phones.json")
it "does not rewrite CORS", ->
cy
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.get("http://www.google.com/phones/phones.json").fail ->
resolve()
.then ->
xhr = cy.state("requests")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "http://www.google.com/phones/phones.json")
it "can stub real CORS requests too", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://www.google.com/phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://www.google.com/phones/phones.json")
it "can stub CORS string routes", ->
cy
.server()
.route("http://localhost:3501/fixtures/app.json").as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/fixtures/app.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3501/fixtures/app.json")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/fixtures/app.json")
# it "can stub root requests to CORS", ->
# cy
# .server()
# .route({
# url: "http://localhost:3501"
# stub: false
# }).as("getPhones")
# .window().then (win) ->
# @open = cy.spy(cy.state("server").options, "onOpen")
# win.$.get("http://localhost:3501")
# null
# .wait("@getPhones")
# .then ->
# xhr = cy.state("responses")[0].xhr
# expect(xhr.url).to.eq("http://localhost:3501")
# expect(@open).to.be.calledWith("GET", "/http://localhost:3501")
it "sets display correctly when there is no remoteOrigin", ->
## this is an example of having cypress act as your webserver
## when the remoteHost is <root>
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
## trick cypress into thinking the remoteOrigin is location:9999
cy.stub(cy, "getRemoteLocation").withArgs("origin").returns("")
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "decodes proxy urls", ->
cy
.server()
.route({
url: /users/
response: {}
}).as("getUsers")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/users?q=(id eq 123)")
null
.wait("@getUsers")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/users?q=(id eq 123)")
url = encodeURI("users?q=(id eq 123)")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
it "decodes proxy urls #2", ->
cy
.server()
.route(/accounts/, {}).as("getAccounts")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
null
.wait("@getAccounts")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/accounts?page=1&$filter=(rowStatus+eq+1)&$orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
url = "accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true"
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
describe "#onResponse", ->
it "calls onResponse callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {foo: "bar"}
onResponse: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.get("/foo")
null
describe "#onAbort", ->
it "calls onAbort callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {}
onAbort: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.aborted).to.be.true
done()
})
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.abort()
null
describe "request parsing", ->
it "adds parses requestBody into JSON", (done) ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
onRequest: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.requestBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.ajax
type: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
dataType: "json"
null
## https://github.com/cypress-io/cypress/issues/65
it "provides the correct requestBody on multiple requests", ->
post = (win, obj) ->
win.$.ajax({
type: "POST"
url: "/foo"
data: JSON.stringify(obj)
dataType: "json"
})
return null
cy
.server()
.route("POST", /foo/, {}).as("getFoo")
.window().then (win) ->
post(win, {foo: "bar1"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar1"})
.window().then (win) ->
post(win, {foo: "bar2"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar2"})
it "handles arraybuffer", ->
cy
.server()
.route("GET", /buffer/).as("getBuffer")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.responseType = "arraybuffer"
xhr.open("GET", "/buffer")
xhr.send()
null
.wait("@getBuffer").then (xhr) ->
expect(xhr.responseBody.toString()).to.eq("[object ArrayBuffer]")
it "handles xml", ->
cy
.server()
.route("GET", /xml/).as("getXML")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/xml")
xhr.send()
null
.wait("@getXML").its("responseBody").should("eq", "<foo>bar</foo>")
describe "issue #84", ->
it "does not incorrectly match options", ->
cy
.server()
.route({
method: "GET"
url: /answers/
status: 503
response: {}
})
.route(/forms/, []).as("getForm")
.window().then (win) ->
win.$.getJSON("/forms")
null
.wait("@getForm").its("status").should("eq", 200)
describe "#issue #85", ->
it "correctly returns the right XHR alias", ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
}).as("getFoo")
.route(/folders/, {foo: "bar"}).as("getFolders")
.window().then (win) ->
win.$.getJSON("/folders")
win.$.post("/foo", {})
null
.wait("@getFolders")
.wait("@getFoo")
.route(/folders/, {foo: "baz"}).as("getFoldersWithSearch")
.window().then (win) ->
win.$.getJSON("/folders/123/activities?foo=bar")
null
.wait("@getFoldersWithSearch").its("url")
.should("contain", "?foo=bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
context "requests", ->
it "immediately logs xhr obj", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo")
null
.then ->
lastLog = @lastLog
expect(lastLog.pick("name", "displayName", "event", "alias", "aliasType", "state")).to.deep.eq {
name: "xhr"
displayName: "xhr stub"
event: true
alias: "getFoo"
aliasType: "route"
state: "pending"
}
snapshots = lastLog.get("snapshots")
expect(snapshots.length).to.eq(1)
expect(snapshots[0].name).to.eq("request")
expect(snapshots[0].body).to.be.an("object")
it "does not end xhr requests when the associated command ends", ->
logs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (w) ->
w.$.getJSON("foo")
w.$.getJSON("foo")
w.$.getJSON("foo")
null
.then ->
cmd = cy.queue.find({name: "window"})
logs = cmd.get("next").get("logs")
expect(logs.length).to.eq(3)
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("end")).not.to.be.true
.wait(["@getFoo", "@getFoo", "@getFoo"]).then ->
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("ended")).to.be.true
it "updates log immediately whenever an xhr is aborted", ->
snapshot = null
xhrs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (win) ->
xhr1 = win.$.getJSON("foo1")
xhr2 = win.$.getJSON("foo2")
xhr1.abort()
null
.then ->
xhrs = cy.queue.logs({name: "xhr"})
expect(xhrs[0].get("state")).to.eq("failed")
expect(xhrs[0].get("error").name).to.eq("AbortError")
expect(xhrs[0].get("snapshots").length).to.eq(2)
expect(xhrs[0].get("snapshots")[0].name).to.eq("request")
expect(xhrs[0].get("snapshots")[0].body).to.be.a("object")
expect(xhrs[0].get("snapshots")[1].name).to.eq("aborted")
expect(xhrs[0].get("snapshots")[1].body).to.be.a("object")
expect(cy.state("requests").length).to.eq(2)
## the abort should have set its response
expect(cy.state("responses").length).to.eq(1)
.wait(["@getFoo", "@getFoo"]).then ->
## should not re-snapshot after the response
expect(xhrs[0].get("snapshots").length).to.eq(2)
it "can access requestHeaders", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.ajax({
method: "GET"
url: "/foo"
headers: {
"x-token": "<KEY>"
}
})
null
.wait("@getFoo").its("requestHeaders").should("have.property", "x-token", "<KEY>")
context "responses", ->
beforeEach ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar")
null
.wait("@getFoo")
it "logs obj", ->
obj = {
name: "xhr"
displayName: "xhr stub"
event: true
message: ""
type: "parent"
aliasType: "route"
referencesAlias: undefined
alias: "getFoo"
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).to.deep.eq(value, "expected key: #{key} to eq value: #{value}")
it "ends", ->
lastLog = @lastLog
expect(lastLog.get("state")).to.eq("passed")
it "snapshots again", ->
lastLog = @lastLog
expect(lastLog.get("snapshots").length).to.eq(2)
expect(lastLog.get("snapshots")[0].name).to.eq("request")
expect(lastLog.get("snapshots")[0].body).to.be.an("object")
expect(lastLog.get("snapshots")[1].name).to.eq("response")
expect(lastLog.get("snapshots")[1].body).to.be.an("object")
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 200)
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "sets err on log when caused by code errors", (done) ->
finalThenCalled = false
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.window().then (win) ->
new Promise (resolve) ->
win.$.get("http://www.google.com/foo.json")
.fail ->
foo.bar()
it "causes errors caused by onreadystatechange callback function", (done) ->
e = new Error("onreadystatechange caused this error")
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
expect(err).to.eq(e)
done()
cy
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.onreadystatechange = ->
throw e
xhr.send()
context "#server", ->
it "sets serverIsStubbed", ->
cy.server().then ->
expect(cy.state("serverIsStubbed")).to.be.true
it "can disable serverIsStubbed", ->
cy.server({enable: false}).then ->
expect(cy.state("serverIsStubbed")).to.be.false
it "sends enable to server", ->
set = cy.spy cy.state("server"), "set"
cy.server().then ->
expect(set).to.be.calledWithExactly({enable: true})
it "can disable the server after enabling it", ->
set = cy.spy cy.state("server"), "set"
cy
.server()
.route(/app/, {}).as("getJSON")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("deep.eq", {})
.server({enable: false})
.then ->
expect(set).to.be.calledWithExactly({enable: false})
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("not.deep.eq", {})
it "sets delay at 0 by default", ->
cy
.server()
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(0)
it "passes down options.delay to routes", ->
cy
.server({delay: 100})
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(100)
it "passes event argument to xhr.onreadystatechange", (done) ->
cy.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = (e) ->
expect(e).to.be.an.instanceof(win.Event)
done()
xhr.open("GET", "http://localhost:3500/")
describe "errors", ->
context "argument signature", ->
_.each ["asdf", 123, null, undefined], (arg) ->
it "throws on bad argument: #{arg}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.server() accepts only an object literal as its argument"
done()
cy.server(arg)
it "after turning off server it throws attempting to route", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.eq("cy.route() cannot be invoked before starting the cy.server()")
done()
cy
.server()
.route(/app/, {})
.server({enable: false})
.route(/app/, {})
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "provides specific #onFail", (done) ->
cy.on "fail", (err) =>
obj = {
name: "xhr"
referencesAlias: undefined
alias: "getFoo"
aliasType: "route"
type: "parent"
error: err
instrument: "command"
message: ""
event: true
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).deep.eq(value, "expected key: #{key} to eq value: #{value}")
done()
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("/foo").done ->
throw new Error("specific ajax error")
context.skip "#server", ->
beforeEach ->
defaults = {
ignore: true
respond: true
delay: 10
beforeRequest: ->
afterResponse: ->
onAbort: ->
onError: ->
onFilter: ->
}
@options = (obj) ->
_.extend obj, defaults
@create = cy.spy @Cypress.Server, "create"
it "can accept an onRequest and onResponse callback", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server(onRequest, onResponse)
it "can accept onRequest and onRespond through options", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server({onRequest: onRequest, onResponse: onResponse})
describe "without sinon present", ->
beforeEach ->
## force us to start from blank window
cy.state("$autIframe").prop("src", "about:blank")
it "can start server with no errors", ->
cy
.server()
.visit("http://localhost:3500/fixtures/sinon.html")
it "can add routes with no errors", ->
cy
.server()
.route(/foo/, {})
.visit("http://localhost:3500/fixtures/sinon.html")
it "routes xhr requests", ->
cy
.server()
.route(/foo/, {foo: "bar"})
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.then (resp) ->
expect(resp).to.deep.eq {foo: "bar"}
it "works with aliases", ->
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.wait("@getFoo").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({foo: "bar"})
it "prevents XHR's from going out from sinon.html", ->
cy
.server()
.route(/bar/, {bar: "baz"}).as("getBar")
.visit("http://localhost:3500/fixtures/sinon.html")
.wait("@getBar").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({bar: "baz"})
context "#route", ->
beforeEach ->
@expectOptionsToBe = (opts) =>
options = @route.getCall(0).args[0]
_.each opts, (value, key) ->
expect(options[key]).to.deep.eq(opts[key], "failed on property: (#{key})")
cy.server().then ->
@route = cy.spy(cy.state("server"), "route")
it "accepts url, response", ->
cy.route("/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts regex url, response", ->
cy.route(/foo/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /foo/
response: {}
})
it "does not mutate other routes when using shorthand", ->
cy
.route("POST", /foo/, {}).as("getFoo")
.route(/bar/, {}).as("getBar")
.then ->
expect(@route.firstCall.args[0].method).to.eq("POST")
expect(@route.secondCall.args[0].method).to.eq("GET")
it "accepts url, response, onRequest", ->
onRequest = ->
cy.route({
url: "/foo",
response: {},
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "accepts method, url, response", ->
cy.route("GET", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts method, url, response, onRequest", ->
onRequest = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts method, url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "uppercases method", ->
cy.route("get", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts string or regex as the url", ->
cy.route("get", /.*/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
response: {}
})
it "does not require response or method when not stubbing", ->
cy
.server()
.route(/users/).as("getUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "GET"
url: /users/
})
it "does not require response when not stubbing", ->
cy
.server()
.route("POST", /users/).as("createUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "POST"
url: /users/
})
it "accepts an object literal as options", ->
onRequest = ->
onResponse = ->
opts = {
method: "PUT"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
}
cy.route(opts).then ->
@expectOptionsToBe(opts)
it "can accept wildcard * as URL and converts to /.*/ regex", ->
opts = {
url: "*"
response: {}
}
cy.route(opts).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
originalUrl: "*"
response: {}
})
## FIXME
it.skip "can explicitly done() in onRequest function from options", (done) ->
cy
.server()
.route({
method: "POST"
url: "/users"
response: {}
onRequest: -> done()
})
.then ->
cy.state("window").$.post("/users", "name=brian")
it "can accept response as a function", ->
users = [{}, {}]
getUsers = -> users
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "invokes response function with runnable.ctx", ->
ctx = @
users = [{}, {}]
getUsers = ->
expect(@ is ctx).to.be.true
cy.route(/users/, getUsers)
it "passes options as argument", ->
ctx = @
users = [{}, {}]
getUsers = (opts) ->
expect(opts).to.be.an("object")
expect(opts.method).to.eq("GET")
cy.route(/users/, getUsers)
it "can accept response as a function which returns a promise", ->
users = [{}, {}]
getUsers = ->
new Promise (resolve, reject) ->
setTimeout ->
resolve(users)
, 10
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "can accept a function which returns options", ->
users = [{}, {}]
getRoute = ->
{
method: "GET"
url: /users/
status: 201
response: -> Promise.resolve(users)
}
cy.route(getRoute)
.then ->
@expectOptionsToBe({
method: "GET"
status: 201
url: /users/
response: users
})
it "invokes route function with runnable.ctx", ->
ctx = @
getUsers = ->
expect(@ is ctx).to.be.true
{
url: /foo/
}
cy.route(getUsers)
it.skip "adds multiple routes to the responses array", ->
cy
.route("foo", {})
.route("bar", {})
.then ->
expect(cy.state("sandbox").server.responses).to.have.length(2)
it "can use regular strings as response", ->
cy
.route("/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it "can stub requests with uncommon HTTP methods", ->
cy
.route("PROPFIND", "/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.ajax({
url: "/foo"
method: "PROPFIND"
})
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it.skip "does not error when response is null but respond is false", ->
cy.route
url: /foo/
respond: false
describe "deprecations", ->
beforeEach ->
@warn = cy.spy(window.top.console, "warn")
it "logs on {force404: false}", ->
cy
.server({force404: false})
.then ->
expect(@warn).to.be.calledWith("Cypress Warning: Passing cy.server({force404: false}) is now the default behavior of cy.server(). You can safely remove this option.")
it "does not log on {force404: true}", ->
cy
.server({force404: true})
.then ->
expect(@warn).not.to.be.called
describe "request response alias", ->
it "matches xhrs with lowercase methods", ->
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("get", "/foo")
xhr.send()
.wait("@getFoo")
it "can pass an alias reference to route", ->
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, "@foo").as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can pass an alias when using a response function", ->
getFoo = ->
Promise.resolve("@foo")
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, getFoo).as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can alias a route without stubbing it", ->
cy
.route(/fixtures\/app/).as("getFoo")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getFoo").then (xhr) ->
log = cy.queue.logs({name: "xhr"})[0]
expect(log.get("displayName")).to.eq("xhr")
expect(log.get("alias")).to.eq("getFoo")
expect(xhr.responseBody).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 50)
@logs = []
cy.on "log:added", (attrs, log) =>
@lastLog = log
@logs.push(log)
return null
it "throws if cy.server() hasnt been invoked", (done) ->
cy.state("serverIsStubbed", false)
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot be invoked before starting the cy.server()"
done()
cy.route()
it "url must be a string or regexp", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
cy.route({
url: {}
})
it "url must be a string or regexp when a function", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
getUrl = ->
Promise.resolve({url: {}})
cy.route(getUrl)
it "fails when functions reject", (done) ->
error = new Error
cy.on "fail", (err) ->
expect(err).to.eq(error)
done()
getUrl = ->
Promise.reject(error)
cy.route(getUrl)
it "fails when method is invalid", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid method: 'POSTS'."
done()
cy.route("posts", "/foo", {})
it "requires a url when given a response", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() must be called with a url. It can be a string or regular expression."
done()
cy.route({})
_.each [null, undefined], (val) ->
it "throws if response options was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route({url: /foo/, response: val})
it "throws if response argument was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route(/foo/, val)
it "requires arguments", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was not provided any arguments. You must provide valid arguments."
done()
cy.route()
it "sets err on log when caused by the XHR response", (done) ->
@route.restore()
cy.on "fail", (err) =>
lastLog = @lastLog
## route + window + xhr log === 3
expect(@logs.length).to.eq(3)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar").done ->
foo.bar()
it.skip "explodes if response fixture signature errors", (done) ->
@trigger = cy.stub(@Cypress, "trigger").withArgs("fixture").callsArgWithAsync(2, {__error: "some error"})
logs = []
_this = @
## we have to restore the trigger when commandErr is called
## so that something logs out!
cy.commandErr = _.wrap cy.commandErr, (orig, err) ->
_this.Cypress.trigger.restore()
orig.call(@, err)
cy.on "log:added", (attrs, @log) =>
logs.push @log
cy.on "fail", (err) =>
expect(err.message).to.eq "some error"
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, fixture:bar"
done()
cy
.route(/foo/, "fixture:bar")
## TODO: handle this uncaught exception failure
it.skip "does not retry (cancels existing promise) when xhr errors", (done) ->
cancel = cy.spy(Promise.prototype, "cancel")
cy.on "command:retry", =>
if cy.state("error")
done("should have cancelled and not retried after failing")
cy.on "fail", (err) =>
p = cy.state("promise")
_.delay =>
expect(cancel).to.be.calledOn(p)
done()
, 100
cy
.route({
url: /foo/,
response: {}
delay: 100
})
.window().then (win) ->
win.$.getJSON("/foo").done ->
throw new Error("foo failed")
null
.get("button").should("have.class", "does-not-exist")
it "explodes if response alias cannot be found", (done) ->
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(2)
expect(err.message).to.eq "cy.route() could not find a registered alias for: '@bar'.\nAvailable aliases are: 'foo'."
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, @bar"
done()
cy
.wrap({foo: "bar"}).as("foo")
.route(/foo/, "@bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.instrument is "route"
@lastLog = log
@logs.push(log)
return null
it "has name of route", ->
cy.route("/foo", {}).then ->
lastLog = @lastLog
expect(lastLog.get("name")).to.eq "route"
it "uses the wildcard URL", ->
cy.route("*", {}).then ->
lastLog = @lastLog
expect(lastLog.get("url")).to.eq("*")
it "#consoleProps", ->
cy.route("*", {foo: "bar"}).as("foo").then ->
expect(@lastLog.invoke("consoleProps")).to.deep.eq {
Command: "route"
Method: "GET"
URL: "*"
Status: 200
Response: {foo: "bar"}
Alias: "foo"
# Responded: 1 time
# "-------": ""
# Responses: []
}
describe "numResponses", ->
it "is initially 0", ->
cy.route(/foo/, {}).then =>
lastLog = @lastLog
expect(lastLog.get("numResponses")).to.eq 0
it "is incremented to 2", ->
cy
.route(/foo/, {})
.window().then (win) ->
win.$.get("/foo")
.then ->
expect(@lastLog.get("numResponses")).to.eq 1
it "is incremented for each matching request", ->
cy
.route(/foo/, {})
.window().then (win) ->
Promise.all([
win.$.get("/foo")
win.$.get("/foo")
win.$.get("/foo")
])
.then ->
expect(@lastLog.get("numResponses")).to.eq 3
context "consoleProps logs", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "when stubbed", ->
it "says Stubbed: Yes", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
.then ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "zero configuration / zero routes", ->
beforeEach ->
cy
.server({force404: true})
.window().then (win) ->
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
it "calculates duration", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Duration).to.be.a("number")
expect(consoleProps.Duration).to.be.gt(1)
expect(consoleProps.Duration).to.be.lt(1000)
it "sends back regular 404", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "POST"
Status: "404 (Not Found)"
URL: "http://localhost:3500/foo"
XHR: xhr.xhr
})
it "says Stubbed: Yes when sent 404 back", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "whitelisting", ->
it "does not send back 404s on whitelisted routes", ->
cy
.server()
.window().then (win) ->
win.$.get("/fixtures/app.js")
.then (resp) ->
expect(resp).to.eq "{ 'bar' }\n"
describe "route setup", ->
beforeEach ->
cy
.server({force404: true})
.route("/foo", {}).as("anyRequest")
.window().then (win) ->
win.$.get("/bar")
null
it "sends back 404 when request doesnt match route", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Note).to.eq("This request did not match any of your routes. It was automatically sent back '404'. Setting cy.server({force404: false}) will turn off this behavior.")
describe "{force404: false}", ->
beforeEach ->
cy
.server()
.window().then (win) ->
win.$.getJSON("/fixtures/app.json")
it "says Stubbed: No when request isnt forced 404", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("No")
it "logs request + response headers", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Request.headers).to.be.an("object")
expect(consoleProps.Response.headers).to.be.an("object")
it "logs Method, Status, URL, and XHR", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "GET"
URL: "http://localhost:3500/fixtures/app.json"
Status: "200 (OK)"
XHR: xhr.xhr
})
it "logs response", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Response.body).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
it "sets groups Initiator", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
group = consoleProps.groups()[0]
expect(group.name).to.eq("Initiator")
expect(group.label).to.be.false
expect(group.items[0]).to.be.a("string")
expect(group.items[0].split("\n").length).to.gt(1)
context "renderProps", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "in any case", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct message", ->
cy.then ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET 200 /foo")
describe "when response is successful", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("successful")
describe "when response is pending", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", delay: 500, response: {} })
.window().then (win) ->
win.$.get("/foo")
null
## FAILING
it "sends correct message", ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET --- /foo")
it "sends correct indicator", ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("pending")
describe "when response is outside 200 range", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", status: 500, response: {} })
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").fail -> resolve()
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("bad")
context "abort", ->
xhrs = []
beforeEach ->
cy.visit("/fixtures/jquery.html")
it "does not abort xhr's between tests", ->
cy.window().then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhrs.push(xhr)
it "has not aborted the xhrs", ->
_.each xhrs, (xhr) ->
expect(xhr.aborted).not.to.be.false
it "aborts xhrs that haven't been sent", ->
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.abort()
expect(xhr.aborted).to.be.true
it "aborts xhrs currently in flight", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=999")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(log.invoke("renderProps")).to.deep.eq({
message: "GET (aborted) /timeout?ms=999",
indicator: 'aborted',
})
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/3008
it "aborts xhrs even when responseType not '' or 'text'", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.responseType = 'arraybuffer'
xhr.open("GET", "/timeout?ms=1000")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/1652
it "does not set aborted on XHR's that have completed by have had .abort() called", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.onload = ->
xhr.abort()
xhr.foo = "bar"
resolve(xhr)
xhr.send()
.then (xhr) ->
cy
.wrap(null)
.should ->
## ensure this is set to prevent accidental
## race conditions down the road if something
## goes wrong
expect(xhr.foo).to.eq("bar")
expect(xhr.aborted).not.to.be.true
expect(log.get("state")).to.eq("passed")
context "Cypress.on(window:unload)", ->
it "cancels all open XHR's", ->
xhrs = []
cy
.window()
.then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=200")
xhr.send()
xhrs.push(xhr)
.reload()
.then ->
_.each xhrs, (xhr) ->
expect(xhr.canceled).to.be.true
context "Cypress.on(window:before:load)", ->
it "reapplies server + route automatically before window:load", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
.visit("http://localhost:3500/fixtures/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
it "reapplies server + route automatically during page transitions", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
url = "http://localhost:3500/fixtures/generic.html"
$a = win.$("<a href='#{url}'>jquery</a>")
.appendTo(win.document.body)
## synchronous beforeunload
$a.get(0).click()
.url().should("include", "/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
context.skip "#cancel", ->
it "calls server#cancel", (done) ->
cancel = null
@Cypress.once "abort", ->
expect(cancel).to.be.called
done()
cy.server().then ->
cancel = cy.spy cy.state("server"), "cancel"
@Cypress.trigger "abort"
context.skip "#respond", ->
it "calls server#respond", ->
respond = null
cy
.server({delay: 100}).then (server) ->
respond = cy.spy server, "respond"
.window().then (win) ->
win.$.get("/users")
null
.respond().then ->
expect(respond).to.be.calledOnce
describe "errors", ->
beforeEach ->
@allowErrors()
it "errors without a server", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() cannot be invoked before starting the cy.server()"
done()
cy.respond()
it "errors with no pending requests", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() did not find any pending requests to respond to"
done()
cy
.server()
.route(/users/, {})
.window().then (win) ->
## this is waited on to be resolved
## because of jquery promise thenable
win.$.get("/users")
.respond()
## currently this does not fail. we'll wait until someone cares
# it "errors if response was null or undefined", (done) ->
# cy.on "fail", (err) ->
# cy
# .server()
# .route({
# url: /foo/
# respond: false
# })
# .window().then (win) ->
# win.$.get("/foo")
# null
# .respond() | true | _ = Cypress._
$ = Cypress.$
Promise = Cypress.Promise
describe "src/cy/commands/xhr", ->
before ->
cy
.visit("/fixtures/jquery.html")
.then (win) ->
h = $(win.document.head)
h.find("script").remove()
@head = h.prop("outerHTML")
@body = win.document.body.outerHTML
beforeEach ->
doc = cy.state("document")
$(doc.head).empty().html(@head)
$(doc.body).empty().html(@body)
context "#startXhrServer", ->
it "continues to be a defined properties", ->
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
expect(xhr.onload).to.be.a("function")
expect(xhr.onerror).to.be.a("function")
expect(xhr.onreadystatechange).to.be.a("function")
it "prevents infinite recursion", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
xhr = new win.XMLHttpRequest
xhr.addEventListener("readystatechange", wrap, false)
xhr.onreadystatechange = ->
throw new Error("NOOO")
xhr.onreadystatechange
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.open("GET", "/foo")
xhr.onload = ->
throw new Error("NOOO")
xhr.onload
xhr.onload = ->
onloaded = true
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(404)
it "allows multiple readystatechange calls", ->
responseText = null
responseStatuses = 0
cy
.server()
.route({ url: /longtext.txt/ }).as("getLongText")
.task('create:long:file')
.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = ->
responseText = xhr.responseText
if xhr.readyState == 3
responseStatuses++
xhr.open("GET", "/_test-output/longtext.txt?" + Cypress._.random(0, 1e6))
xhr.send()
null
.wait("@getLongText").then (xhr) ->
expect(responseStatuses).to.be.gt(1)
expect(xhr.status).to.eq(200)
it "works with jquery too", ->
failed = false
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
handlers = ["onload", "onerror", "onreadystatechange"]
wrap = ->
xhr = @
handlers.forEach (handler) ->
bak = xhr[handler]
xhr[handler] = ->
if _.isFunction(bak)
bak.apply(xhr, arguments)
open = win.XMLHttpRequest.prototype.open
win.XMLHttpRequest.prototype.open = ->
@addEventListener("readystatechange", wrap, false)
open.apply(@, arguments)
xhr = win.$.get("/foo")
.fail ->
failed = true
.always ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(failed).to.be.true
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls existing onload handlers", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.onload = ->
onloaded = true
xhr.open("GET", "/foo")
xhr.send()
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send", ->
onloaded = false
cy
.server()
.route({url: /foo/}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = ->
onloaded = true
null
.wait("@getFoo").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(404)
it "calls onload handlers attached after xhr#send asynchronously", ->
onloaded = false
cy
.server()
.route({url: /timeout/}).as("getTimeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
_.delay ->
xhr.onload = ->
onloaded = true
, 20
null
.wait("@getTimeout").then (xhr) ->
expect(onloaded).to.be.true
expect(xhr.status).to.eq(200)
it "fallbacks even when onreadystatechange is overriden", ->
onloaded = false
onreadystatechanged = false
cy
.server()
.route({url: /timeout/}).as("get.timeout")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhr.onreadystatechange = ->
onreadystatechanged = true
xhr.onload = ->
onloaded = true
null
.wait("@get.timeout").then (xhr) ->
expect(onloaded).to.be.true
expect(onreadystatechanged).to.be.true
expect(xhr.status).to.eq(200)
describe.skip "filtering requests", ->
beforeEach ->
cy.server()
extensions = {
html: "ajax html"
js: "{foo: \"bar\"}"
css: "body {}"
}
_.each extensions, (val, ext) ->
it "filters out non ajax requests by default for extension: .#{ext}", (done) ->
cy.state("window").$.get("/fixtures/app.#{ext}").done (res) ->
expect(res).to.eq val
done()
it "can disable default filtering", (done) ->
## this should throw since it should return 404 when no
## route matches it
cy.server({ignore: false}).window().then (w) ->
Promise.resolve(w.$.get("/fixtures/app.html")).catch -> done()
describe "url rewriting", ->
it "has a FQDN absolute-relative url", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/foo")
it "has a relative URL", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/fixtures/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "foo")
it "resolves relative urls correctly when base tag is present on nested routes", ->
cy
.server()
.route({
url: /foo/
}).as("getFoo")
.window().then (win) ->
win.$("<base href='/nested/route/path'>").appendTo(win.$("head"))
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("../foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/nested/foo")
expect(@open).to.be.calledWith("GET", "../foo")
it "allows cross origin requests to go out as necessary", ->
cy
.server()
.route(/foo/).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3501/foo")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/foo")
it "rewrites FQDN url's for stubs", ->
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:9999/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:9999/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:9999/foo")
it "rewrites absolute url's for stubs", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites 404's url's for stubs", ->
cy
.server({force404: true})
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("POST", "/__cypress/xhrs/http://localhost:3500/foo")
it "rewrites urls with nested segments", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/fixtures/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/fixtures/phones/phones.json")
it "does not rewrite CORS", ->
cy
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
new Promise (resolve) ->
win.$.get("http://www.google.com/phones/phones.json").fail ->
resolve()
.then ->
xhr = cy.state("requests")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "http://www.google.com/phones/phones.json")
it "can stub real CORS requests too", ->
cy
.server()
.route({
url: /phones/
response: {}
}).as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://www.google.com/phones/phones.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://www.google.com/phones/phones.json")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://www.google.com/phones/phones.json")
it "can stub CORS string routes", ->
cy
.server()
.route("http://localhost:3501/fixtures/app.json").as("getPhones")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("http://localhost:3501/fixtures/app.json")
null
.wait("@getPhones")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3501/fixtures/app.json")
expect(@open).to.be.calledWith("GET", "http://localhost:3501/fixtures/app.json")
# it "can stub root requests to CORS", ->
# cy
# .server()
# .route({
# url: "http://localhost:3501"
# stub: false
# }).as("getPhones")
# .window().then (win) ->
# @open = cy.spy(cy.state("server").options, "onOpen")
# win.$.get("http://localhost:3501")
# null
# .wait("@getPhones")
# .then ->
# xhr = cy.state("responses")[0].xhr
# expect(xhr.url).to.eq("http://localhost:3501")
# expect(@open).to.be.calledWith("GET", "/http://localhost:3501")
it "sets display correctly when there is no remoteOrigin", ->
## this is an example of having cypress act as your webserver
## when the remoteHost is <root>
cy
.server()
.route({
url: /foo/
response: {}
}).as("getFoo")
.window().then (win) ->
## trick cypress into thinking the remoteOrigin is location:9999
cy.stub(cy, "getRemoteLocation").withArgs("origin").returns("")
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.url).to.eq("http://localhost:3500/foo")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/foo")
it "decodes proxy urls", ->
cy
.server()
.route({
url: /users/
response: {}
}).as("getUsers")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/users?q=(id eq 123)")
null
.wait("@getUsers")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/users?q=(id eq 123)")
url = encodeURI("users?q=(id eq 123)")
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
it "decodes proxy urls #2", ->
cy
.server()
.route(/accounts/, {}).as("getAccounts")
.window().then (win) ->
@open = cy.spy(cy.state("server").options, "onOpen")
win.$.get("/accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
null
.wait("@getAccounts")
.then ->
xhr = cy.state("responses")[0].xhr
expect(xhr.url).to.eq("http://localhost:3500/accounts?page=1&$filter=(rowStatus+eq+1)&$orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true")
url = "accounts?page=1&%24filter=(rowStatus+eq+1)&%24orderby=name+asc&includeOpenFoldersCount=true&includeStatusCount=true"
expect(@open).to.be.calledWith("GET", "/__cypress/xhrs/http://localhost:3500/#{url}")
describe "#onResponse", ->
it "calls onResponse callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {foo: "bar"}
onResponse: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.get("/foo")
null
describe "#onAbort", ->
it "calls onAbort callback with cy context + proxy xhr", (done) ->
cy
.server()
.route({
url: /foo/
response: {}
onAbort: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.aborted).to.be.true
done()
})
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.abort()
null
describe "request parsing", ->
it "adds parses requestBody into JSON", (done) ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
onRequest: (xhr) ->
expect(@).to.eq(cy)
expect(xhr.requestBody).to.deep.eq {foo: "bar"}
done()
})
.window().then (win) ->
win.$.ajax
type: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
dataType: "json"
null
## https://github.com/cypress-io/cypress/issues/65
it "provides the correct requestBody on multiple requests", ->
post = (win, obj) ->
win.$.ajax({
type: "POST"
url: "/foo"
data: JSON.stringify(obj)
dataType: "json"
})
return null
cy
.server()
.route("POST", /foo/, {}).as("getFoo")
.window().then (win) ->
post(win, {foo: "bar1"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar1"})
.window().then (win) ->
post(win, {foo: "bar2"})
.wait("@getFoo").its("requestBody").should("deep.eq", {foo: "bar2"})
it "handles arraybuffer", ->
cy
.server()
.route("GET", /buffer/).as("getBuffer")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.responseType = "arraybuffer"
xhr.open("GET", "/buffer")
xhr.send()
null
.wait("@getBuffer").then (xhr) ->
expect(xhr.responseBody.toString()).to.eq("[object ArrayBuffer]")
it "handles xml", ->
cy
.server()
.route("GET", /xml/).as("getXML")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/xml")
xhr.send()
null
.wait("@getXML").its("responseBody").should("eq", "<foo>bar</foo>")
describe "issue #84", ->
it "does not incorrectly match options", ->
cy
.server()
.route({
method: "GET"
url: /answers/
status: 503
response: {}
})
.route(/forms/, []).as("getForm")
.window().then (win) ->
win.$.getJSON("/forms")
null
.wait("@getForm").its("status").should("eq", 200)
describe "#issue #85", ->
it "correctly returns the right XHR alias", ->
cy
.server()
.route({
method: "POST"
url: /foo/
response: {}
}).as("getFoo")
.route(/folders/, {foo: "bar"}).as("getFolders")
.window().then (win) ->
win.$.getJSON("/folders")
win.$.post("/foo", {})
null
.wait("@getFolders")
.wait("@getFoo")
.route(/folders/, {foo: "baz"}).as("getFoldersWithSearch")
.window().then (win) ->
win.$.getJSON("/folders/123/activities?foo=bar")
null
.wait("@getFoldersWithSearch").its("url")
.should("contain", "?foo=bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
context "requests", ->
it "immediately logs xhr obj", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo")
null
.then ->
lastLog = @lastLog
expect(lastLog.pick("name", "displayName", "event", "alias", "aliasType", "state")).to.deep.eq {
name: "xhr"
displayName: "xhr stub"
event: true
alias: "getFoo"
aliasType: "route"
state: "pending"
}
snapshots = lastLog.get("snapshots")
expect(snapshots.length).to.eq(1)
expect(snapshots[0].name).to.eq("request")
expect(snapshots[0].body).to.be.an("object")
it "does not end xhr requests when the associated command ends", ->
logs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (w) ->
w.$.getJSON("foo")
w.$.getJSON("foo")
w.$.getJSON("foo")
null
.then ->
cmd = cy.queue.find({name: "window"})
logs = cmd.get("next").get("logs")
expect(logs.length).to.eq(3)
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("end")).not.to.be.true
.wait(["@getFoo", "@getFoo", "@getFoo"]).then ->
_.each logs, (log) ->
expect(log.get("name")).to.eq("xhr")
expect(log.get("ended")).to.be.true
it "updates log immediately whenever an xhr is aborted", ->
snapshot = null
xhrs = null
cy
.server()
.route({
url: /foo/,
response: {}
delay: 50
}).as("getFoo")
.window().then (win) ->
xhr1 = win.$.getJSON("foo1")
xhr2 = win.$.getJSON("foo2")
xhr1.abort()
null
.then ->
xhrs = cy.queue.logs({name: "xhr"})
expect(xhrs[0].get("state")).to.eq("failed")
expect(xhrs[0].get("error").name).to.eq("AbortError")
expect(xhrs[0].get("snapshots").length).to.eq(2)
expect(xhrs[0].get("snapshots")[0].name).to.eq("request")
expect(xhrs[0].get("snapshots")[0].body).to.be.a("object")
expect(xhrs[0].get("snapshots")[1].name).to.eq("aborted")
expect(xhrs[0].get("snapshots")[1].body).to.be.a("object")
expect(cy.state("requests").length).to.eq(2)
## the abort should have set its response
expect(cy.state("responses").length).to.eq(1)
.wait(["@getFoo", "@getFoo"]).then ->
## should not re-snapshot after the response
expect(xhrs[0].get("snapshots").length).to.eq(2)
it "can access requestHeaders", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.ajax({
method: "GET"
url: "/foo"
headers: {
"x-token": "PI:KEY:<KEY>END_PI"
}
})
null
.wait("@getFoo").its("requestHeaders").should("have.property", "x-token", "PI:KEY:<KEY>END_PI")
context "responses", ->
beforeEach ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar")
null
.wait("@getFoo")
it "logs obj", ->
obj = {
name: "xhr"
displayName: "xhr stub"
event: true
message: ""
type: "parent"
aliasType: "route"
referencesAlias: undefined
alias: "getFoo"
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).to.deep.eq(value, "expected key: #{key} to eq value: #{value}")
it "ends", ->
lastLog = @lastLog
expect(lastLog.get("state")).to.eq("passed")
it "snapshots again", ->
lastLog = @lastLog
expect(lastLog.get("snapshots").length).to.eq(2)
expect(lastLog.get("snapshots")[0].name).to.eq("request")
expect(lastLog.get("snapshots")[0].body).to.be.an("object")
expect(lastLog.get("snapshots")[1].name).to.eq("response")
expect(lastLog.get("snapshots")[1].body).to.be.an("object")
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 200)
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "sets err on log when caused by code errors", (done) ->
finalThenCalled = false
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.window().then (win) ->
new Promise (resolve) ->
win.$.get("http://www.google.com/foo.json")
.fail ->
foo.bar()
it "causes errors caused by onreadystatechange callback function", (done) ->
e = new Error("onreadystatechange caused this error")
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
expect(err).to.eq(e)
done()
cy
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.onreadystatechange = ->
throw e
xhr.send()
context "#server", ->
it "sets serverIsStubbed", ->
cy.server().then ->
expect(cy.state("serverIsStubbed")).to.be.true
it "can disable serverIsStubbed", ->
cy.server({enable: false}).then ->
expect(cy.state("serverIsStubbed")).to.be.false
it "sends enable to server", ->
set = cy.spy cy.state("server"), "set"
cy.server().then ->
expect(set).to.be.calledWithExactly({enable: true})
it "can disable the server after enabling it", ->
set = cy.spy cy.state("server"), "set"
cy
.server()
.route(/app/, {}).as("getJSON")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("deep.eq", {})
.server({enable: false})
.then ->
expect(set).to.be.calledWithExactly({enable: false})
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getJSON").its("responseBody").should("not.deep.eq", {})
it "sets delay at 0 by default", ->
cy
.server()
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(0)
it "passes down options.delay to routes", ->
cy
.server({delay: 100})
.route("*", {})
.then ->
expect(cy.state("server").getRoutes()[0].delay).to.eq(100)
it "passes event argument to xhr.onreadystatechange", (done) ->
cy.window().then (win) ->
xhr = new win.XMLHttpRequest()
xhr.onreadystatechange = (e) ->
expect(e).to.be.an.instanceof(win.Event)
done()
xhr.open("GET", "http://localhost:3500/")
describe "errors", ->
context "argument signature", ->
_.each ["asdf", 123, null, undefined], (arg) ->
it "throws on bad argument: #{arg}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.server() accepts only an object literal as its argument"
done()
cy.server(arg)
it "after turning off server it throws attempting to route", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.eq("cy.route() cannot be invoked before starting the cy.server()")
done()
cy
.server()
.route(/app/, {})
.server({enable: false})
.route(/app/, {})
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
it "provides specific #onFail", (done) ->
cy.on "fail", (err) =>
obj = {
name: "xhr"
referencesAlias: undefined
alias: "getFoo"
aliasType: "route"
type: "parent"
error: err
instrument: "command"
message: ""
event: true
}
lastLog = @lastLog
_.each obj, (value, key) =>
expect(lastLog.get(key)).deep.eq(value, "expected key: #{key} to eq value: #{value}")
done()
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("/foo").done ->
throw new Error("specific ajax error")
context.skip "#server", ->
beforeEach ->
defaults = {
ignore: true
respond: true
delay: 10
beforeRequest: ->
afterResponse: ->
onAbort: ->
onError: ->
onFilter: ->
}
@options = (obj) ->
_.extend obj, defaults
@create = cy.spy @Cypress.Server, "create"
it "can accept an onRequest and onResponse callback", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server(onRequest, onResponse)
it "can accept onRequest and onRespond through options", (done) ->
onRequest = ->
onResponse = ->
cy.on "end", =>
expect(@create.getCall(0).args[1]).to.have.keys _.keys(@options({onRequest: onRequest, onResponse, onResponse}))
done()
cy.server({onRequest: onRequest, onResponse: onResponse})
describe "without sinon present", ->
beforeEach ->
## force us to start from blank window
cy.state("$autIframe").prop("src", "about:blank")
it "can start server with no errors", ->
cy
.server()
.visit("http://localhost:3500/fixtures/sinon.html")
it "can add routes with no errors", ->
cy
.server()
.route(/foo/, {})
.visit("http://localhost:3500/fixtures/sinon.html")
it "routes xhr requests", ->
cy
.server()
.route(/foo/, {foo: "bar"})
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.then (resp) ->
expect(resp).to.deep.eq {foo: "bar"}
it "works with aliases", ->
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/sinon.html")
.window().then (w) ->
w.$.get("/foo")
.wait("@getFoo").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({foo: "bar"})
it "prevents XHR's from going out from sinon.html", ->
cy
.server()
.route(/bar/, {bar: "baz"}).as("getBar")
.visit("http://localhost:3500/fixtures/sinon.html")
.wait("@getBar").then (xhr) ->
expect(xhr.responseText).to.eq JSON.stringify({bar: "baz"})
context "#route", ->
beforeEach ->
@expectOptionsToBe = (opts) =>
options = @route.getCall(0).args[0]
_.each opts, (value, key) ->
expect(options[key]).to.deep.eq(opts[key], "failed on property: (#{key})")
cy.server().then ->
@route = cy.spy(cy.state("server"), "route")
it "accepts url, response", ->
cy.route("/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts regex url, response", ->
cy.route(/foo/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /foo/
response: {}
})
it "does not mutate other routes when using shorthand", ->
cy
.route("POST", /foo/, {}).as("getFoo")
.route(/bar/, {}).as("getBar")
.then ->
expect(@route.firstCall.args[0].method).to.eq("POST")
expect(@route.secondCall.args[0].method).to.eq("GET")
it "accepts url, response, onRequest", ->
onRequest = ->
cy.route({
url: "/foo",
response: {},
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "accepts method, url, response", ->
cy.route("GET", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts method, url, response, onRequest", ->
onRequest = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: undefined
})
it "accepts method, url, response, onRequest, onResponse", ->
onRequest = ->
onResponse = ->
cy.route({
method: "GET"
url: "/foo"
response: {}
onRequest: onRequest
onResponse: onResponse
}).then ->
@expectOptionsToBe({
method: "GET"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
})
it "uppercases method", ->
cy.route("get", "/foo", {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: "/foo"
response: {}
})
it "accepts string or regex as the url", ->
cy.route("get", /.*/, {}).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
response: {}
})
it "does not require response or method when not stubbing", ->
cy
.server()
.route(/users/).as("getUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "GET"
url: /users/
})
it "does not require response when not stubbing", ->
cy
.server()
.route("POST", /users/).as("createUsers")
.then ->
@expectOptionsToBe({
status: 200
method: "POST"
url: /users/
})
it "accepts an object literal as options", ->
onRequest = ->
onResponse = ->
opts = {
method: "PUT"
url: "/foo"
status: 200
response: {}
onRequest: onRequest
onResponse: onResponse
}
cy.route(opts).then ->
@expectOptionsToBe(opts)
it "can accept wildcard * as URL and converts to /.*/ regex", ->
opts = {
url: "*"
response: {}
}
cy.route(opts).then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /.*/
originalUrl: "*"
response: {}
})
## FIXME
it.skip "can explicitly done() in onRequest function from options", (done) ->
cy
.server()
.route({
method: "POST"
url: "/users"
response: {}
onRequest: -> done()
})
.then ->
cy.state("window").$.post("/users", "name=brian")
it "can accept response as a function", ->
users = [{}, {}]
getUsers = -> users
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "invokes response function with runnable.ctx", ->
ctx = @
users = [{}, {}]
getUsers = ->
expect(@ is ctx).to.be.true
cy.route(/users/, getUsers)
it "passes options as argument", ->
ctx = @
users = [{}, {}]
getUsers = (opts) ->
expect(opts).to.be.an("object")
expect(opts.method).to.eq("GET")
cy.route(/users/, getUsers)
it "can accept response as a function which returns a promise", ->
users = [{}, {}]
getUsers = ->
new Promise (resolve, reject) ->
setTimeout ->
resolve(users)
, 10
cy.route(/users/, getUsers)
.then ->
@expectOptionsToBe({
method: "GET"
status: 200
url: /users/
response: users
})
it "can accept a function which returns options", ->
users = [{}, {}]
getRoute = ->
{
method: "GET"
url: /users/
status: 201
response: -> Promise.resolve(users)
}
cy.route(getRoute)
.then ->
@expectOptionsToBe({
method: "GET"
status: 201
url: /users/
response: users
})
it "invokes route function with runnable.ctx", ->
ctx = @
getUsers = ->
expect(@ is ctx).to.be.true
{
url: /foo/
}
cy.route(getUsers)
it.skip "adds multiple routes to the responses array", ->
cy
.route("foo", {})
.route("bar", {})
.then ->
expect(cy.state("sandbox").server.responses).to.have.length(2)
it "can use regular strings as response", ->
cy
.route("/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.get("/foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it "can stub requests with uncommon HTTP methods", ->
cy
.route("PROPFIND", "/foo", "foo bar baz").as("getFoo")
.window().then (win) ->
win.$.ajax({
url: "/foo"
method: "PROPFIND"
})
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.eq "foo bar baz"
it.skip "does not error when response is null but respond is false", ->
cy.route
url: /foo/
respond: false
describe "deprecations", ->
beforeEach ->
@warn = cy.spy(window.top.console, "warn")
it "logs on {force404: false}", ->
cy
.server({force404: false})
.then ->
expect(@warn).to.be.calledWith("Cypress Warning: Passing cy.server({force404: false}) is now the default behavior of cy.server(). You can safely remove this option.")
it "does not log on {force404: true}", ->
cy
.server({force404: true})
.then ->
expect(@warn).not.to.be.called
describe "request response alias", ->
it "matches xhrs with lowercase methods", ->
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
xhr = new win.XMLHttpRequest
xhr.open("get", "/foo")
xhr.send()
.wait("@getFoo")
it "can pass an alias reference to route", ->
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, "@foo").as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can pass an alias when using a response function", ->
getFoo = ->
Promise.resolve("@foo")
cy
.noop({foo: "bar"}).as("foo")
.route(/foo/, getFoo).as("getFoo")
.window().then (win) ->
win.$.getJSON("foo")
null
.wait("@getFoo").then (xhr) ->
expect(xhr.responseBody).to.deep.eq {foo: "bar"}
expect(xhr.responseBody).to.deep.eq @foo
it "can alias a route without stubbing it", ->
cy
.route(/fixtures\/app/).as("getFoo")
.window().then (win) ->
win.$.get("/fixtures/app.json")
null
.wait("@getFoo").then (xhr) ->
log = cy.queue.logs({name: "xhr"})[0]
expect(log.get("displayName")).to.eq("xhr")
expect(log.get("alias")).to.eq("getFoo")
expect(xhr.responseBody).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
describe "errors", ->
beforeEach ->
Cypress.config("defaultCommandTimeout", 50)
@logs = []
cy.on "log:added", (attrs, log) =>
@lastLog = log
@logs.push(log)
return null
it "throws if cy.server() hasnt been invoked", (done) ->
cy.state("serverIsStubbed", false)
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot be invoked before starting the cy.server()"
done()
cy.route()
it "url must be a string or regexp", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
cy.route({
url: {}
})
it "url must be a string or regexp when a function", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid url. Url must be either a string or regular expression."
done()
getUrl = ->
Promise.resolve({url: {}})
cy.route(getUrl)
it "fails when functions reject", (done) ->
error = new Error
cy.on "fail", (err) ->
expect(err).to.eq(error)
done()
getUrl = ->
Promise.reject(error)
cy.route(getUrl)
it "fails when method is invalid", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was called with an invalid method: 'POSTS'."
done()
cy.route("posts", "/foo", {})
it "requires a url when given a response", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() must be called with a url. It can be a string or regular expression."
done()
cy.route({})
_.each [null, undefined], (val) ->
it "throws if response options was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route({url: /foo/, response: val})
it "throws if response argument was explicitly set to #{val}", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() cannot accept an undefined or null response. It must be set to something, even an empty string will work."
done()
cy.route(/foo/, val)
it "requires arguments", (done) ->
cy.on "fail", (err) ->
expect(err.message).to.include "cy.route() was not provided any arguments. You must provide valid arguments."
done()
cy.route()
it "sets err on log when caused by the XHR response", (done) ->
@route.restore()
cy.on "fail", (err) =>
lastLog = @lastLog
## route + window + xhr log === 3
expect(@logs.length).to.eq(3)
expect(lastLog.get("name")).to.eq("xhr")
expect(lastLog.get("error")).to.eq err
done()
cy
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
win.$.get("foo_bar").done ->
foo.bar()
it.skip "explodes if response fixture signature errors", (done) ->
@trigger = cy.stub(@Cypress, "trigger").withArgs("fixture").callsArgWithAsync(2, {__error: "some error"})
logs = []
_this = @
## we have to restore the trigger when commandErr is called
## so that something logs out!
cy.commandErr = _.wrap cy.commandErr, (orig, err) ->
_this.Cypress.trigger.restore()
orig.call(@, err)
cy.on "log:added", (attrs, @log) =>
logs.push @log
cy.on "fail", (err) =>
expect(err.message).to.eq "some error"
expect(@logs.length).to.eq(1)
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, fixture:bar"
done()
cy
.route(/foo/, "fixture:bar")
## TODO: handle this uncaught exception failure
it.skip "does not retry (cancels existing promise) when xhr errors", (done) ->
cancel = cy.spy(Promise.prototype, "cancel")
cy.on "command:retry", =>
if cy.state("error")
done("should have cancelled and not retried after failing")
cy.on "fail", (err) =>
p = cy.state("promise")
_.delay =>
expect(cancel).to.be.calledOn(p)
done()
, 100
cy
.route({
url: /foo/,
response: {}
delay: 100
})
.window().then (win) ->
win.$.getJSON("/foo").done ->
throw new Error("foo failed")
null
.get("button").should("have.class", "does-not-exist")
it "explodes if response alias cannot be found", (done) ->
cy.on "fail", (err) =>
lastLog = @lastLog
expect(@logs.length).to.eq(2)
expect(err.message).to.eq "cy.route() could not find a registered alias for: '@bar'.\nAvailable aliases are: 'foo'."
expect(lastLog.get("name")).to.eq "route"
expect(lastLog.get("error")).to.eq err
expect(lastLog.get("message")).to.eq "/foo/, @bar"
done()
cy
.wrap({foo: "bar"}).as("foo")
.route(/foo/, "@bar")
describe ".log", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.instrument is "route"
@lastLog = log
@logs.push(log)
return null
it "has name of route", ->
cy.route("/foo", {}).then ->
lastLog = @lastLog
expect(lastLog.get("name")).to.eq "route"
it "uses the wildcard URL", ->
cy.route("*", {}).then ->
lastLog = @lastLog
expect(lastLog.get("url")).to.eq("*")
it "#consoleProps", ->
cy.route("*", {foo: "bar"}).as("foo").then ->
expect(@lastLog.invoke("consoleProps")).to.deep.eq {
Command: "route"
Method: "GET"
URL: "*"
Status: 200
Response: {foo: "bar"}
Alias: "foo"
# Responded: 1 time
# "-------": ""
# Responses: []
}
describe "numResponses", ->
it "is initially 0", ->
cy.route(/foo/, {}).then =>
lastLog = @lastLog
expect(lastLog.get("numResponses")).to.eq 0
it "is incremented to 2", ->
cy
.route(/foo/, {})
.window().then (win) ->
win.$.get("/foo")
.then ->
expect(@lastLog.get("numResponses")).to.eq 1
it "is incremented for each matching request", ->
cy
.route(/foo/, {})
.window().then (win) ->
Promise.all([
win.$.get("/foo")
win.$.get("/foo")
win.$.get("/foo")
])
.then ->
expect(@lastLog.get("numResponses")).to.eq 3
context "consoleProps logs", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "when stubbed", ->
it "says Stubbed: Yes", ->
cy
.server()
.route(/foo/, {}).as("getFoo")
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
.then ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "zero configuration / zero routes", ->
beforeEach ->
cy
.server({force404: true})
.window().then (win) ->
new Promise (resolve) ->
win.$.ajax({
method: "POST"
url: "/foo"
data: JSON.stringify({foo: "bar"})
}).fail ->
resolve()
it "calculates duration", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Duration).to.be.a("number")
expect(consoleProps.Duration).to.be.gt(1)
expect(consoleProps.Duration).to.be.lt(1000)
it "sends back regular 404", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "POST"
Status: "404 (Not Found)"
URL: "http://localhost:3500/foo"
XHR: xhr.xhr
})
it "says Stubbed: Yes when sent 404 back", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("Yes")
describe "whitelisting", ->
it "does not send back 404s on whitelisted routes", ->
cy
.server()
.window().then (win) ->
win.$.get("/fixtures/app.js")
.then (resp) ->
expect(resp).to.eq "{ 'bar' }\n"
describe "route setup", ->
beforeEach ->
cy
.server({force404: true})
.route("/foo", {}).as("anyRequest")
.window().then (win) ->
win.$.get("/bar")
null
it "sends back 404 when request doesnt match route", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Note).to.eq("This request did not match any of your routes. It was automatically sent back '404'. Setting cy.server({force404: false}) will turn off this behavior.")
describe "{force404: false}", ->
beforeEach ->
cy
.server()
.window().then (win) ->
win.$.getJSON("/fixtures/app.json")
it "says Stubbed: No when request isnt forced 404", ->
expect(@lastLog.invoke("consoleProps").Stubbed).to.eq("No")
it "logs request + response headers", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Request.headers).to.be.an("object")
expect(consoleProps.Response.headers).to.be.an("object")
it "logs Method, Status, URL, and XHR", ->
cy.then ->
xhr = cy.state("responses")[0].xhr
consoleProps = _.pick @lastLog.invoke("consoleProps"), "Method", "Status", "URL", "XHR"
expect(consoleProps).to.deep.eq({
Method: "GET"
URL: "http://localhost:3500/fixtures/app.json"
Status: "200 (OK)"
XHR: xhr.xhr
})
it "logs response", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
expect(consoleProps.Response.body).to.deep.eq({
some: "json"
foo: {
bar: "baz"
}
})
it "sets groups Initiator", ->
cy.then ->
consoleProps = @lastLog.invoke("consoleProps")
group = consoleProps.groups()[0]
expect(group.name).to.eq("Initiator")
expect(group.label).to.be.false
expect(group.items[0]).to.be.a("string")
expect(group.items[0].split("\n").length).to.gt(1)
context "renderProps", ->
beforeEach ->
@logs = []
cy.on "log:added", (attrs, log) =>
if attrs.name is "xhr"
@lastLog = log
@logs.push(log)
return null
describe "in any case", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct message", ->
cy.then ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET 200 /foo")
describe "when response is successful", ->
beforeEach ->
cy
.server()
.route(/foo/, {})
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").done(resolve)
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("successful")
describe "when response is pending", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", delay: 500, response: {} })
.window().then (win) ->
win.$.get("/foo")
null
## FAILING
it "sends correct message", ->
expect(@lastLog.invoke("renderProps").message).to.equal("GET --- /foo")
it "sends correct indicator", ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("pending")
describe "when response is outside 200 range", ->
beforeEach ->
cy
.server()
.route({ url: "/foo", status: 500, response: {} })
.window().then (win) ->
new Promise (resolve) ->
win.$.get("/foo").fail -> resolve()
it "sends correct indicator", ->
cy.then ->
expect(@lastLog.invoke("renderProps").indicator).to.equal("bad")
context "abort", ->
xhrs = []
beforeEach ->
cy.visit("/fixtures/jquery.html")
it "does not abort xhr's between tests", ->
cy.window().then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=100")
xhr.send()
xhrs.push(xhr)
it "has not aborted the xhrs", ->
_.each xhrs, (xhr) ->
expect(xhr.aborted).not.to.be.false
it "aborts xhrs that haven't been sent", ->
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.abort()
expect(xhr.aborted).to.be.true
it "aborts xhrs currently in flight", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=999")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(log.invoke("renderProps")).to.deep.eq({
message: "GET (aborted) /timeout?ms=999",
indicator: 'aborted',
})
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/3008
it "aborts xhrs even when responseType not '' or 'text'", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
xhr = new win.XMLHttpRequest()
xhr.responseType = 'arraybuffer'
xhr.open("GET", "/timeout?ms=1000")
xhr.send()
xhr.abort()
cy.wrap(null).should ->
expect(log.get("state")).to.eq("failed")
expect(xhr.aborted).to.be.true
## https://github.com/cypress-io/cypress/issues/1652
it "does not set aborted on XHR's that have completed by have had .abort() called", ->
log = null
cy.on "log:changed", (attrs, l) =>
if attrs.name is "xhr"
if not log
log = l
cy
.window()
.then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest()
xhr.open("GET", "/timeout?ms=0")
xhr.onload = ->
xhr.abort()
xhr.foo = "bar"
resolve(xhr)
xhr.send()
.then (xhr) ->
cy
.wrap(null)
.should ->
## ensure this is set to prevent accidental
## race conditions down the road if something
## goes wrong
expect(xhr.foo).to.eq("bar")
expect(xhr.aborted).not.to.be.true
expect(log.get("state")).to.eq("passed")
context "Cypress.on(window:unload)", ->
it "cancels all open XHR's", ->
xhrs = []
cy
.window()
.then (win) ->
_.times 2, ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/timeout?ms=200")
xhr.send()
xhrs.push(xhr)
.reload()
.then ->
_.each xhrs, (xhr) ->
expect(xhr.canceled).to.be.true
context "Cypress.on(window:before:load)", ->
it "reapplies server + route automatically before window:load", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
.visit("http://localhost:3500/fixtures/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
it "reapplies server + route automatically during page transitions", ->
## this tests that the server + routes are automatically reapplied
## after the 2nd visit - which is an example of the remote iframe
## causing an onBeforeLoad event
cy
.server()
.route(/foo/, {foo: "bar"}).as("getFoo")
.visit("http://localhost:3500/fixtures/jquery.html")
.window().then (win) ->
url = "http://localhost:3500/fixtures/generic.html"
$a = win.$("<a href='#{url}'>jquery</a>")
.appendTo(win.document.body)
## synchronous beforeunload
$a.get(0).click()
.url().should("include", "/generic.html")
.window().then (win) ->
new Promise (resolve) ->
xhr = new win.XMLHttpRequest
xhr.open("GET", "/foo")
xhr.send()
xhr.onload = resolve
.wait("@getFoo").its("url").should("include", "/foo")
context.skip "#cancel", ->
it "calls server#cancel", (done) ->
cancel = null
@Cypress.once "abort", ->
expect(cancel).to.be.called
done()
cy.server().then ->
cancel = cy.spy cy.state("server"), "cancel"
@Cypress.trigger "abort"
context.skip "#respond", ->
it "calls server#respond", ->
respond = null
cy
.server({delay: 100}).then (server) ->
respond = cy.spy server, "respond"
.window().then (win) ->
win.$.get("/users")
null
.respond().then ->
expect(respond).to.be.calledOnce
describe "errors", ->
beforeEach ->
@allowErrors()
it "errors without a server", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() cannot be invoked before starting the cy.server()"
done()
cy.respond()
it "errors with no pending requests", (done) ->
cy.on "fail", (err) =>
expect(err.message).to.eq "cy.respond() did not find any pending requests to respond to"
done()
cy
.server()
.route(/users/, {})
.window().then (win) ->
## this is waited on to be resolved
## because of jquery promise thenable
win.$.get("/users")
.respond()
## currently this does not fail. we'll wait until someone cares
# it "errors if response was null or undefined", (done) ->
# cy.on "fail", (err) ->
# cy
# .server()
# .route({
# url: /foo/
# respond: false
# })
# .window().then (win) ->
# win.$.get("/foo")
# null
# .respond() |
[
{
"context": "nsteonOpenCloseSensorNode extends IsyNode\n key: 'insteonOpenCloseSensor'\n types: [[16, 2], [16, 17]]\n\n aspects:\n ope",
"end": 131,
"score": 0.8739693760871887,
"start": 109,
"tag": "KEY",
"value": "insteonOpenCloseSensor"
}
] | lib/adapters/isy/IsyInsteonOpenCloseSensorNode.coffee | monitron/jarvis-ha | 1 | IsyNode = require('./IsyNode')
module.exports = class IsyInsteonOpenCloseSensorNode extends IsyNode
key: 'insteonOpenCloseSensor'
types: [[16, 2], [16, 17]]
aspects:
openCloseSensor: {}
processData: (data) ->
if data.ST?
value = if data.ST == ' '
undefined
else if data.ST == '0'
false
else
true
if @configuration('invert') and value? then value = !value
@getAspect('openCloseSensor').setData state: value
| 179203 | IsyNode = require('./IsyNode')
module.exports = class IsyInsteonOpenCloseSensorNode extends IsyNode
key: '<KEY>'
types: [[16, 2], [16, 17]]
aspects:
openCloseSensor: {}
processData: (data) ->
if data.ST?
value = if data.ST == ' '
undefined
else if data.ST == '0'
false
else
true
if @configuration('invert') and value? then value = !value
@getAspect('openCloseSensor').setData state: value
| true | IsyNode = require('./IsyNode')
module.exports = class IsyInsteonOpenCloseSensorNode extends IsyNode
key: 'PI:KEY:<KEY>END_PI'
types: [[16, 2], [16, 17]]
aspects:
openCloseSensor: {}
processData: (data) ->
if data.ST?
value = if data.ST == ' '
undefined
else if data.ST == '0'
false
else
true
if @configuration('invert') and value? then value = !value
@getAspect('openCloseSensor').setData state: value
|
[
{
"context": " player.item = undefined\n\n key = \"#{player.playerType}_bomb-#{bombId++}\"\n\n ",
"end": 676,
"score": 0.9559305906295776,
"start": 673,
"tag": "KEY",
"value": "\"#{"
},
{
"context": "em = undefined\n\n key = \"#{player.playerType}_bomb-#{bombId++}\"\n\n sound.play('placeBomb')\n\n ",
"end": 712,
"score": 0.961863100528717,
"start": 689,
"tag": "KEY",
"value": "Type}_bomb-#{bombId++}\""
}
] | js/coffee/item_bomb.coffee | wjagodfrey/spaceJoust | 0 | bombId = 0
class item.Bomb extends Item
constructor: (@container, @key, @spawner, @xBounce, @yBounce) ->
color: colors.bomb.background
onHit: (col, ent) ->
if ent.type is 'Player' and !ent.item?
super(col, ent)
applyItem: (player) ->
if !player.item? #if player doesn't have an item
player.item =
use: =>
if not hasBoxHit(
player.x + player.width/2 - 3
player.y + player.height/2 - 3
6
6
player.spawn.x
player.spawn.y
player.spawn.width
player.spawn.height
)
player.item = undefined
key = "#{player.playerType}_bomb-#{bombId++}"
sound.play('placeBomb')
level.midground[key] = new entity.Bomb player, key, @xBounce, @yBounce
draw: (ctx) =>
width = 5
height = 5
ctx
.save()
.fillStyle(@color)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2)
width
height
)
.restore()
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2+2)
width
height-4
)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2+2)
Math.round(player.y+(player.height-height)/2)
width-4
height
)
.restore()
draw: (ctx) ->
super ctx
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x), Math.round(@y+2), @width, @height-4)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x+2), Math.round(@y), @width-4, @height)
.restore()
| 153874 | bombId = 0
class item.Bomb extends Item
constructor: (@container, @key, @spawner, @xBounce, @yBounce) ->
color: colors.bomb.background
onHit: (col, ent) ->
if ent.type is 'Player' and !ent.item?
super(col, ent)
applyItem: (player) ->
if !player.item? #if player doesn't have an item
player.item =
use: =>
if not hasBoxHit(
player.x + player.width/2 - 3
player.y + player.height/2 - 3
6
6
player.spawn.x
player.spawn.y
player.spawn.width
player.spawn.height
)
player.item = undefined
key = <KEY>player.player<KEY>
sound.play('placeBomb')
level.midground[key] = new entity.Bomb player, key, @xBounce, @yBounce
draw: (ctx) =>
width = 5
height = 5
ctx
.save()
.fillStyle(@color)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2)
width
height
)
.restore()
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2+2)
width
height-4
)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2+2)
Math.round(player.y+(player.height-height)/2)
width-4
height
)
.restore()
draw: (ctx) ->
super ctx
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x), Math.round(@y+2), @width, @height-4)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x+2), Math.round(@y), @width-4, @height)
.restore()
| true | bombId = 0
class item.Bomb extends Item
constructor: (@container, @key, @spawner, @xBounce, @yBounce) ->
color: colors.bomb.background
onHit: (col, ent) ->
if ent.type is 'Player' and !ent.item?
super(col, ent)
applyItem: (player) ->
if !player.item? #if player doesn't have an item
player.item =
use: =>
if not hasBoxHit(
player.x + player.width/2 - 3
player.y + player.height/2 - 3
6
6
player.spawn.x
player.spawn.y
player.spawn.width
player.spawn.height
)
player.item = undefined
key = PI:KEY:<KEY>END_PIplayer.playerPI:KEY:<KEY>END_PI
sound.play('placeBomb')
level.midground[key] = new entity.Bomb player, key, @xBounce, @yBounce
draw: (ctx) =>
width = 5
height = 5
ctx
.save()
.fillStyle(@color)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2)
width
height
)
.restore()
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2)
Math.round(player.y+(player.height-height)/2+2)
width
height-4
)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(
Math.round(player.x+(player.width-width)/2+2)
Math.round(player.y+(player.height-height)/2)
width-4
height
)
.restore()
draw: (ctx) ->
super ctx
if @xBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x), Math.round(@y+2), @width, @height-4)
.restore()
if @yBounce
ctx
.save()
.fillStyle(colors.bomb.off)
.fillRect(Math.round(@x+2), Math.round(@y), @width-4, @height)
.restore()
|
[
{
"context": "c-4fb9-b9db-5583351f606e'\n DEFAULT_APP_SECRET = 'rtRPuoJZeGT5yiKQH6mFt9LZ1zMWFPb4z9olkJspfnPDygWukK_vjWuP'\n\n constructor: (options = {}) ->\n @_id = opt",
"end": 150,
"score": 0.9997045993804932,
"start": 94,
"tag": "KEY",
"value": "rtRPuoJZeGT5yiKQH6mFt9LZ1zMWFPb4z9olkJspfnPDygWukK_vjWuP"
}
] | cmu.coffee | ryhan/cmujs | 1 | class @API
DEFAULT_APP_ID = '9c55f614-c85c-4fb9-b9db-5583351f606e'
DEFAULT_APP_SECRET = 'rtRPuoJZeGT5yiKQH6mFt9LZ1zMWFPb4z9olkJspfnPDygWukK_vjWuP'
constructor: (options = {}) ->
@_id = options.id || DEFAULT_APP_ID
@_secret = options.secret || DEFAULT_APP_SECRET
@_api_endpoint = 'https://apis.scottylabs.org/v1/'
date = new Date()
@_current_semester = if date.getMonth() < 5 then 'S' else 'F'
@_current_semester += date.getFullYear() - 2000
_get: (request) ->
url = "#{ @_api_endpoint + request }?app_id=#{ @_id }&app_secret_key=#{ @_secret }"
xmlHttp = null
xmlHttp = new XMLHttpRequest
xmlHttp.open 'GET', url, false
xmlHttp.send null
JSON.parse xmlHttp.responseText
class @ScheduleAPI extends API
findCourse: (course_number, semester) ->
semester = @_current_semester unless semester?
response = @_get "schedule/#{ encodeURIComponent semester }/courses/#{ encodeURIComponent course_number }"
response.course if response?
class @DirectoryAPI extends API
findAndrewId: (andrewID) ->
response = @_get "directory/andrewid/#{ encodeURIComponent andrewID }"
response.person if response?
class @CMUApi
constructor: (options) ->
@schedule = new ScheduleAPI options
@directory = new DirectoryAPI options | 89708 | class @API
DEFAULT_APP_ID = '9c55f614-c85c-4fb9-b9db-5583351f606e'
DEFAULT_APP_SECRET = '<KEY>'
constructor: (options = {}) ->
@_id = options.id || DEFAULT_APP_ID
@_secret = options.secret || DEFAULT_APP_SECRET
@_api_endpoint = 'https://apis.scottylabs.org/v1/'
date = new Date()
@_current_semester = if date.getMonth() < 5 then 'S' else 'F'
@_current_semester += date.getFullYear() - 2000
_get: (request) ->
url = "#{ @_api_endpoint + request }?app_id=#{ @_id }&app_secret_key=#{ @_secret }"
xmlHttp = null
xmlHttp = new XMLHttpRequest
xmlHttp.open 'GET', url, false
xmlHttp.send null
JSON.parse xmlHttp.responseText
class @ScheduleAPI extends API
findCourse: (course_number, semester) ->
semester = @_current_semester unless semester?
response = @_get "schedule/#{ encodeURIComponent semester }/courses/#{ encodeURIComponent course_number }"
response.course if response?
class @DirectoryAPI extends API
findAndrewId: (andrewID) ->
response = @_get "directory/andrewid/#{ encodeURIComponent andrewID }"
response.person if response?
class @CMUApi
constructor: (options) ->
@schedule = new ScheduleAPI options
@directory = new DirectoryAPI options | true | class @API
DEFAULT_APP_ID = '9c55f614-c85c-4fb9-b9db-5583351f606e'
DEFAULT_APP_SECRET = 'PI:KEY:<KEY>END_PI'
constructor: (options = {}) ->
@_id = options.id || DEFAULT_APP_ID
@_secret = options.secret || DEFAULT_APP_SECRET
@_api_endpoint = 'https://apis.scottylabs.org/v1/'
date = new Date()
@_current_semester = if date.getMonth() < 5 then 'S' else 'F'
@_current_semester += date.getFullYear() - 2000
_get: (request) ->
url = "#{ @_api_endpoint + request }?app_id=#{ @_id }&app_secret_key=#{ @_secret }"
xmlHttp = null
xmlHttp = new XMLHttpRequest
xmlHttp.open 'GET', url, false
xmlHttp.send null
JSON.parse xmlHttp.responseText
class @ScheduleAPI extends API
findCourse: (course_number, semester) ->
semester = @_current_semester unless semester?
response = @_get "schedule/#{ encodeURIComponent semester }/courses/#{ encodeURIComponent course_number }"
response.course if response?
class @DirectoryAPI extends API
findAndrewId: (andrewID) ->
response = @_get "directory/andrewid/#{ encodeURIComponent andrewID }"
response.person if response?
class @CMUApi
constructor: (options) ->
@schedule = new ScheduleAPI options
@directory = new DirectoryAPI options |
[
{
"context": "em_id = data.item_id\n\t\tfield = data.field\n\n\t\tkey = collection_name + item_id + field\n\t\tdownload_object = Session.get key\n\n\t\tif",
"end": 467,
"score": 0.7562378644943237,
"start": 442,
"tag": "KEY",
"value": "collection_name + item_id"
}
] | client/components/script/avatar.coffee | MooqitaSFH/worklearn | 0 | ###############################################################################
# Avatar
###############################################################################
###############################################################################
Template.avatar.helpers
is_downloading: () ->
inst = Template.instance()
data = inst.data
collection_name = data.collection_name
item_id = data.item_id
field = data.field
key = collection_name + item_id + field
download_object = Session.get key
if download_object
return true
return false
| 191422 | ###############################################################################
# Avatar
###############################################################################
###############################################################################
Template.avatar.helpers
is_downloading: () ->
inst = Template.instance()
data = inst.data
collection_name = data.collection_name
item_id = data.item_id
field = data.field
key = <KEY> + field
download_object = Session.get key
if download_object
return true
return false
| true | ###############################################################################
# Avatar
###############################################################################
###############################################################################
Template.avatar.helpers
is_downloading: () ->
inst = Template.instance()
data = inst.data
collection_name = data.collection_name
item_id = data.item_id
field = data.field
key = PI:KEY:<KEY>END_PI + field
download_object = Session.get key
if download_object
return true
return false
|
[
{
"context": "= \"https://www.googleapis.com/customsearch/v1?key=AIzaSyD79lKLhaHzunEXAj_bYMkwVI3lx-dYdxM&cx=007414649059824118455:2qstsvfxe1o&searchType=i",
"end": 201,
"score": 0.9997024536132812,
"start": 162,
"tag": "KEY",
"value": "AIzaSyD79lKLhaHzunEXAj_bYMkwVI3lx-dYdxM"
}
] | app/controllers/requestHandlers.server.coffee | JanMP/image-search-abstraction-layer | 0 | request = require "request"
path = process.cwd()
SearchEntry = require path + "/app/models/searches.js"
urlStr = "https://www.googleapis.com/customsearch/v1?key=AIzaSyD79lKLhaHzunEXAj_bYMkwVI3lx-dYdxM&cx=007414649059824118455:2qstsvfxe1o&searchType=image&q="
module.exports = ->
search : (req, res) ->
query= req.params.query
offset = req.query.offset ? 0
request
url : urlStr + query + "?startIndex=" + offset
json : true
(err, msg, data) ->
if err then throw err
res.send data.items?.map (d) ->
imageURL : d.link
thumpnailURL : d.image.thumbnailLink
altText : d.snippet
pageURL : d.image.contextLink
searchEntry = new SearchEntry
query : query
offset : Number offset
date : new Date()
searchEntry.save (err, entry) ->
if err then throw err
console.log "#{entry.date} : #{entry.query} : #{offset}"
list : (req, res) ->
SearchEntry.find {}
.sort date : -1
.limit 10
.exec (err, list) ->
if err then throw err
res.send list.map (d) ->
query : d.query
offset : d.offset
date : d.date
| 169677 | request = require "request"
path = process.cwd()
SearchEntry = require path + "/app/models/searches.js"
urlStr = "https://www.googleapis.com/customsearch/v1?key=<KEY>&cx=007414649059824118455:2qstsvfxe1o&searchType=image&q="
module.exports = ->
search : (req, res) ->
query= req.params.query
offset = req.query.offset ? 0
request
url : urlStr + query + "?startIndex=" + offset
json : true
(err, msg, data) ->
if err then throw err
res.send data.items?.map (d) ->
imageURL : d.link
thumpnailURL : d.image.thumbnailLink
altText : d.snippet
pageURL : d.image.contextLink
searchEntry = new SearchEntry
query : query
offset : Number offset
date : new Date()
searchEntry.save (err, entry) ->
if err then throw err
console.log "#{entry.date} : #{entry.query} : #{offset}"
list : (req, res) ->
SearchEntry.find {}
.sort date : -1
.limit 10
.exec (err, list) ->
if err then throw err
res.send list.map (d) ->
query : d.query
offset : d.offset
date : d.date
| true | request = require "request"
path = process.cwd()
SearchEntry = require path + "/app/models/searches.js"
urlStr = "https://www.googleapis.com/customsearch/v1?key=PI:KEY:<KEY>END_PI&cx=007414649059824118455:2qstsvfxe1o&searchType=image&q="
module.exports = ->
search : (req, res) ->
query= req.params.query
offset = req.query.offset ? 0
request
url : urlStr + query + "?startIndex=" + offset
json : true
(err, msg, data) ->
if err then throw err
res.send data.items?.map (d) ->
imageURL : d.link
thumpnailURL : d.image.thumbnailLink
altText : d.snippet
pageURL : d.image.contextLink
searchEntry = new SearchEntry
query : query
offset : Number offset
date : new Date()
searchEntry.save (err, entry) ->
if err then throw err
console.log "#{entry.date} : #{entry.query} : #{offset}"
list : (req, res) ->
SearchEntry.find {}
.sort date : -1
.limit 10
.exec (err, list) ->
if err then throw err
res.send list.map (d) ->
query : d.query
offset : d.offset
date : d.date
|
[
{
"context": "###\n# @author Will Steinmetz\n# jQuery notification plug-in inspired by the not",
"end": 28,
"score": 0.9998548626899719,
"start": 14,
"tag": "NAME",
"value": "Will Steinmetz"
},
{
"context": "ation style of Windows 8\n# Copyright (c)2013-2015, Will Steinmetz\n# Licensed under the BSD license.\n# http://openso",
"end": 147,
"score": 0.9998247027397156,
"start": 133,
"tag": "NAME",
"value": "Will Steinmetz"
}
] | public/third_party/notific8/Gruntfile.coffee | pvndn/spa | 130 | ###
# @author Will Steinmetz
# jQuery notification plug-in inspired by the notification style of Windows 8
# Copyright (c)2013-2015, Will Steinmetz
# Licensed under the BSD license.
# http://opensource.org/licenses/BSD-3-Clause
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
)
grunt.loadTasks 'grunt'
grunt.registerTask 'default', [
'clean:all',
'sass',
'cssmin',
'coffee',
'uglify',
'copy:font'
'copy:code'
]
| 18136 | ###
# @author <NAME>
# jQuery notification plug-in inspired by the notification style of Windows 8
# Copyright (c)2013-2015, <NAME>
# Licensed under the BSD license.
# http://opensource.org/licenses/BSD-3-Clause
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
)
grunt.loadTasks 'grunt'
grunt.registerTask 'default', [
'clean:all',
'sass',
'cssmin',
'coffee',
'uglify',
'copy:font'
'copy:code'
]
| true | ###
# @author PI:NAME:<NAME>END_PI
# jQuery notification plug-in inspired by the notification style of Windows 8
# Copyright (c)2013-2015, PI:NAME:<NAME>END_PI
# Licensed under the BSD license.
# http://opensource.org/licenses/BSD-3-Clause
###
module.exports = (grunt) ->
grunt.initConfig(
pkg: require('./package.json')
)
grunt.loadTasks 'grunt'
grunt.registerTask 'default', [
'clean:all',
'sass',
'cssmin',
'coffee',
'uglify',
'copy:font'
'copy:code'
]
|
[
{
"context": ": lod curves + phe x gen (as mean +/- 2 SE) plot\n# Karl W Broman\n\niplotScanone_ci = (lod_data, pxg_data, chartOpts",
"end": 81,
"score": 0.9998499155044556,
"start": 68,
"tag": "NAME",
"value": "Karl W Broman"
}
] | inst/charts/iplotScanone_ci.coffee | FourchettesDeInterActive/qtlcharts | 0 | # iplotScanone_ci: lod curves + phe x gen (as mean +/- 2 SE) plot
# Karl W Broman
iplotScanone_ci = (lod_data, pxg_data, chartOpts) ->
markers = (x for x of pxg_data.chrByMarkers)
# chartOpts start
height = chartOpts?.height ? 450 # height of image in pixels
wleft = chartOpts?.wleft ? 700 # width of left panel in pixels
wright = chartOpts?.wright ? 300 # width of right panel in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:40, bottom: 40, inner:5} # margins in pixels (left, top, right, bottom, inner)
lod_axispos = chartOpts?.lod_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in LOD curve panel
lod_titlepos = chartOpts?.lod_titlepos ? chartOpts?.titlepos ? 20 # position of title for LOD curve panel, in pixels
chrGap = chartOpts?.chrGap ? 8 # gap between chromosomes
darkrect = chartOpts?.darkrect ? "#C8C8C8" # color of darker background rectangle
lightrect = chartOpts?.lightrect ? "#E6E6E6" # color of lighter background rectangle
lod_ylim = chartOpts?.lod_ylim ? null # y-axis limits in LOD curve panel
lod_nyticks = chartOpts?.lod_nyticks ? 5 # number of ticks in y-axis in LOD curve panel
lod_yticks = chartOpts?.lod_yticks ? null # vector of tick positions for y-axis in LOD curve panel
lod_linecolor = chartOpts?.lod_linecolor ? "darkslateblue" # line color for LOD curves
lod_linewidth = chartOpts?.lod_linewidth ? 2 # line width for LOD curves
lod_pointcolor = chartOpts?.lod_pointcolor ? "#E9CFEC" # color for points at markers in LOD curve panel
lod_pointsize = chartOpts?.lod_pointsize ? 0 # size of points at markers (default = 0 corresponding to no visible points at markers)
lod_pointstroke = chartOpts?.lod_pointstroke ? "black" # color of outer circle for points at markers in LOD curve panel
lod_title = chartOpts?.lod_title ? "" # title of LOD curve panel
lod_xlab = chartOpts?.lod_xlab ? "Chromosome" # x-axis label for LOD curve panel
lod_ylab = chartOpts?.lod_ylab ? "LOD score" # y-axis label for LOD curve panel
lod_rotate_ylab = chartOpts?.lod_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in LOD curve panel
eff_ylim = chartOpts?.eff_ylim ? null # y-axis limits in effect plot panel
eff_nyticks = chartOpts?.eff_nyticks ? 5 # number of ticks in y-axis in effect plot panel
eff_yticks = chartOpts?.eff_yticks ? null # vector of tick positions for y-axis in effect plot panel
eff_linecolor = chartOpts?.eff_linecolor ? "slateblue" # line color in effect plot panel
eff_linewidth = chartOpts?.eff_linewidth ? "3" # line width in effect plot panel
eff_xlab = chartOpts?.eff_xlab ? "Genotype" # x-axis label in effect plot panel
eff_ylab = chartOpts?.eff_ylab ? "Phenotype" # y-axis label in effect plot panel
eff_rotate_ylab = chartOpts?.eff_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in effect plot panel
eff_segwidth = chartOpts?.eff_segwidth ? null # width of line segments in effect plot panel, in pixels
eff_axispos = chartOpts?.eff_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in effect plot panel
eff_titlepos = chartOpts?.eff_titlepos ? chartOpts?.titlepos ? 20 # position of title for effect plot panel, in pixels
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
totalh = height + margin.top + margin.bottom
totalw = wleft + wright + (margin.left + margin.right)*2
mylodchart = lodchart().lodvarname("lod")
.height(height)
.width(wleft)
.margin(margin)
.axispos(lod_axispos)
.titlepos(lod_titlepos)
.chrGap(chrGap)
.darkrect(darkrect)
.lightrect(lightrect)
.ylim(lod_ylim)
.nyticks(lod_nyticks)
.yticks(lod_yticks)
.linecolor(lod_linecolor)
.linewidth(lod_linewidth)
.pointcolor(lod_pointcolor)
.pointsize(lod_pointsize)
.pointstroke(lod_pointstroke)
.title(lod_title)
.xlab(lod_xlab)
.ylab(lod_ylab)
.rotate_ylab(lod_rotate_ylab)
svg = d3.select("div##{chartdivid}")
.append("svg")
.attr("height", totalh)
.attr("width", totalw)
g_lod = svg.append("g")
.attr("id", "lodchart")
.datum(lod_data)
.call(mylodchart)
plotCI = (markername, markerindex) ->
svg.select("g#cichart").remove()
g = pxg_data.geno[markerindex]
gabs = (Math.abs(x) for x in g)
chr = pxg_data.chrByMarkers[markername]
chrtype = pxg_data.chrtype[chr]
genonames = pxg_data.genonames[chrtype]
means = []
se = []
for j in [1..genonames.length]
phesub = (p for p,i in pxg_data.pheno when gabs[i] == j)
if phesub.length>0
ave = (phesub.reduce (a,b) -> a+b)/phesub.length
means.push(ave)
else means.push(null)
if phesub.length>1
variance = (phesub.reduce (a,b) -> a+Math.pow(b-ave, 2))/(phesub.length-1)
se.push((Math.sqrt(variance/phesub.length)))
else
se.push(null)
low = (means[i]-2*se[i] for i of means)
high = (means[i]+2*se[i] for i of means)
range = [d3.min(low), d3.max(high)]
if eff_ylim?
eff_ylim = [d3.min([range[0],eff_ylim[0]]), d3.max([range[1],eff_ylim[1]])]
else
eff_ylim = range
mycichart = cichart().height(height)
.width(wright)
.margin(margin)
.axispos(eff_axispos)
.titlepos(eff_titlepos)
.title(markername)
.xlab(eff_xlab)
.ylab(eff_ylab)
.rotate_ylab(eff_rotate_ylab)
.ylim(eff_ylim)
.nyticks(eff_nyticks)
.yticks(eff_yticks)
.segcolor(eff_linecolor)
.vertsegcolor(eff_linecolor)
.segstrokewidth(eff_linewidth)
.segwidth(eff_segwidth)
.rectcolor(lightrect)
svg.append("g")
.attr("id", "cichart")
.attr("transform", "translate(#{wleft+margin.left+margin.right},0)")
.datum({'means':means, 'low':low, 'high':high, 'categories':genonames})
.call(mycichart)
# animate points at markers on click
mylodchart.markerSelect()
.on "click", (d,i) ->
plotCI(markers[i], i)
| 8159 | # iplotScanone_ci: lod curves + phe x gen (as mean +/- 2 SE) plot
# <NAME>
iplotScanone_ci = (lod_data, pxg_data, chartOpts) ->
markers = (x for x of pxg_data.chrByMarkers)
# chartOpts start
height = chartOpts?.height ? 450 # height of image in pixels
wleft = chartOpts?.wleft ? 700 # width of left panel in pixels
wright = chartOpts?.wright ? 300 # width of right panel in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:40, bottom: 40, inner:5} # margins in pixels (left, top, right, bottom, inner)
lod_axispos = chartOpts?.lod_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in LOD curve panel
lod_titlepos = chartOpts?.lod_titlepos ? chartOpts?.titlepos ? 20 # position of title for LOD curve panel, in pixels
chrGap = chartOpts?.chrGap ? 8 # gap between chromosomes
darkrect = chartOpts?.darkrect ? "#C8C8C8" # color of darker background rectangle
lightrect = chartOpts?.lightrect ? "#E6E6E6" # color of lighter background rectangle
lod_ylim = chartOpts?.lod_ylim ? null # y-axis limits in LOD curve panel
lod_nyticks = chartOpts?.lod_nyticks ? 5 # number of ticks in y-axis in LOD curve panel
lod_yticks = chartOpts?.lod_yticks ? null # vector of tick positions for y-axis in LOD curve panel
lod_linecolor = chartOpts?.lod_linecolor ? "darkslateblue" # line color for LOD curves
lod_linewidth = chartOpts?.lod_linewidth ? 2 # line width for LOD curves
lod_pointcolor = chartOpts?.lod_pointcolor ? "#E9CFEC" # color for points at markers in LOD curve panel
lod_pointsize = chartOpts?.lod_pointsize ? 0 # size of points at markers (default = 0 corresponding to no visible points at markers)
lod_pointstroke = chartOpts?.lod_pointstroke ? "black" # color of outer circle for points at markers in LOD curve panel
lod_title = chartOpts?.lod_title ? "" # title of LOD curve panel
lod_xlab = chartOpts?.lod_xlab ? "Chromosome" # x-axis label for LOD curve panel
lod_ylab = chartOpts?.lod_ylab ? "LOD score" # y-axis label for LOD curve panel
lod_rotate_ylab = chartOpts?.lod_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in LOD curve panel
eff_ylim = chartOpts?.eff_ylim ? null # y-axis limits in effect plot panel
eff_nyticks = chartOpts?.eff_nyticks ? 5 # number of ticks in y-axis in effect plot panel
eff_yticks = chartOpts?.eff_yticks ? null # vector of tick positions for y-axis in effect plot panel
eff_linecolor = chartOpts?.eff_linecolor ? "slateblue" # line color in effect plot panel
eff_linewidth = chartOpts?.eff_linewidth ? "3" # line width in effect plot panel
eff_xlab = chartOpts?.eff_xlab ? "Genotype" # x-axis label in effect plot panel
eff_ylab = chartOpts?.eff_ylab ? "Phenotype" # y-axis label in effect plot panel
eff_rotate_ylab = chartOpts?.eff_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in effect plot panel
eff_segwidth = chartOpts?.eff_segwidth ? null # width of line segments in effect plot panel, in pixels
eff_axispos = chartOpts?.eff_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in effect plot panel
eff_titlepos = chartOpts?.eff_titlepos ? chartOpts?.titlepos ? 20 # position of title for effect plot panel, in pixels
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
totalh = height + margin.top + margin.bottom
totalw = wleft + wright + (margin.left + margin.right)*2
mylodchart = lodchart().lodvarname("lod")
.height(height)
.width(wleft)
.margin(margin)
.axispos(lod_axispos)
.titlepos(lod_titlepos)
.chrGap(chrGap)
.darkrect(darkrect)
.lightrect(lightrect)
.ylim(lod_ylim)
.nyticks(lod_nyticks)
.yticks(lod_yticks)
.linecolor(lod_linecolor)
.linewidth(lod_linewidth)
.pointcolor(lod_pointcolor)
.pointsize(lod_pointsize)
.pointstroke(lod_pointstroke)
.title(lod_title)
.xlab(lod_xlab)
.ylab(lod_ylab)
.rotate_ylab(lod_rotate_ylab)
svg = d3.select("div##{chartdivid}")
.append("svg")
.attr("height", totalh)
.attr("width", totalw)
g_lod = svg.append("g")
.attr("id", "lodchart")
.datum(lod_data)
.call(mylodchart)
plotCI = (markername, markerindex) ->
svg.select("g#cichart").remove()
g = pxg_data.geno[markerindex]
gabs = (Math.abs(x) for x in g)
chr = pxg_data.chrByMarkers[markername]
chrtype = pxg_data.chrtype[chr]
genonames = pxg_data.genonames[chrtype]
means = []
se = []
for j in [1..genonames.length]
phesub = (p for p,i in pxg_data.pheno when gabs[i] == j)
if phesub.length>0
ave = (phesub.reduce (a,b) -> a+b)/phesub.length
means.push(ave)
else means.push(null)
if phesub.length>1
variance = (phesub.reduce (a,b) -> a+Math.pow(b-ave, 2))/(phesub.length-1)
se.push((Math.sqrt(variance/phesub.length)))
else
se.push(null)
low = (means[i]-2*se[i] for i of means)
high = (means[i]+2*se[i] for i of means)
range = [d3.min(low), d3.max(high)]
if eff_ylim?
eff_ylim = [d3.min([range[0],eff_ylim[0]]), d3.max([range[1],eff_ylim[1]])]
else
eff_ylim = range
mycichart = cichart().height(height)
.width(wright)
.margin(margin)
.axispos(eff_axispos)
.titlepos(eff_titlepos)
.title(markername)
.xlab(eff_xlab)
.ylab(eff_ylab)
.rotate_ylab(eff_rotate_ylab)
.ylim(eff_ylim)
.nyticks(eff_nyticks)
.yticks(eff_yticks)
.segcolor(eff_linecolor)
.vertsegcolor(eff_linecolor)
.segstrokewidth(eff_linewidth)
.segwidth(eff_segwidth)
.rectcolor(lightrect)
svg.append("g")
.attr("id", "cichart")
.attr("transform", "translate(#{wleft+margin.left+margin.right},0)")
.datum({'means':means, 'low':low, 'high':high, 'categories':genonames})
.call(mycichart)
# animate points at markers on click
mylodchart.markerSelect()
.on "click", (d,i) ->
plotCI(markers[i], i)
| true | # iplotScanone_ci: lod curves + phe x gen (as mean +/- 2 SE) plot
# PI:NAME:<NAME>END_PI
iplotScanone_ci = (lod_data, pxg_data, chartOpts) ->
markers = (x for x of pxg_data.chrByMarkers)
# chartOpts start
height = chartOpts?.height ? 450 # height of image in pixels
wleft = chartOpts?.wleft ? 700 # width of left panel in pixels
wright = chartOpts?.wright ? 300 # width of right panel in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:40, bottom: 40, inner:5} # margins in pixels (left, top, right, bottom, inner)
lod_axispos = chartOpts?.lod_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in LOD curve panel
lod_titlepos = chartOpts?.lod_titlepos ? chartOpts?.titlepos ? 20 # position of title for LOD curve panel, in pixels
chrGap = chartOpts?.chrGap ? 8 # gap between chromosomes
darkrect = chartOpts?.darkrect ? "#C8C8C8" # color of darker background rectangle
lightrect = chartOpts?.lightrect ? "#E6E6E6" # color of lighter background rectangle
lod_ylim = chartOpts?.lod_ylim ? null # y-axis limits in LOD curve panel
lod_nyticks = chartOpts?.lod_nyticks ? 5 # number of ticks in y-axis in LOD curve panel
lod_yticks = chartOpts?.lod_yticks ? null # vector of tick positions for y-axis in LOD curve panel
lod_linecolor = chartOpts?.lod_linecolor ? "darkslateblue" # line color for LOD curves
lod_linewidth = chartOpts?.lod_linewidth ? 2 # line width for LOD curves
lod_pointcolor = chartOpts?.lod_pointcolor ? "#E9CFEC" # color for points at markers in LOD curve panel
lod_pointsize = chartOpts?.lod_pointsize ? 0 # size of points at markers (default = 0 corresponding to no visible points at markers)
lod_pointstroke = chartOpts?.lod_pointstroke ? "black" # color of outer circle for points at markers in LOD curve panel
lod_title = chartOpts?.lod_title ? "" # title of LOD curve panel
lod_xlab = chartOpts?.lod_xlab ? "Chromosome" # x-axis label for LOD curve panel
lod_ylab = chartOpts?.lod_ylab ? "LOD score" # y-axis label for LOD curve panel
lod_rotate_ylab = chartOpts?.lod_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in LOD curve panel
eff_ylim = chartOpts?.eff_ylim ? null # y-axis limits in effect plot panel
eff_nyticks = chartOpts?.eff_nyticks ? 5 # number of ticks in y-axis in effect plot panel
eff_yticks = chartOpts?.eff_yticks ? null # vector of tick positions for y-axis in effect plot panel
eff_linecolor = chartOpts?.eff_linecolor ? "slateblue" # line color in effect plot panel
eff_linewidth = chartOpts?.eff_linewidth ? "3" # line width in effect plot panel
eff_xlab = chartOpts?.eff_xlab ? "Genotype" # x-axis label in effect plot panel
eff_ylab = chartOpts?.eff_ylab ? "Phenotype" # y-axis label in effect plot panel
eff_rotate_ylab = chartOpts?.eff_rotate_ylab ? null # indicates whether to rotate the y-axis label 90 degrees, in effect plot panel
eff_segwidth = chartOpts?.eff_segwidth ? null # width of line segments in effect plot panel, in pixels
eff_axispos = chartOpts?.eff_axispos ? chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel) in effect plot panel
eff_titlepos = chartOpts?.eff_titlepos ? chartOpts?.titlepos ? 20 # position of title for effect plot panel, in pixels
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
totalh = height + margin.top + margin.bottom
totalw = wleft + wright + (margin.left + margin.right)*2
mylodchart = lodchart().lodvarname("lod")
.height(height)
.width(wleft)
.margin(margin)
.axispos(lod_axispos)
.titlepos(lod_titlepos)
.chrGap(chrGap)
.darkrect(darkrect)
.lightrect(lightrect)
.ylim(lod_ylim)
.nyticks(lod_nyticks)
.yticks(lod_yticks)
.linecolor(lod_linecolor)
.linewidth(lod_linewidth)
.pointcolor(lod_pointcolor)
.pointsize(lod_pointsize)
.pointstroke(lod_pointstroke)
.title(lod_title)
.xlab(lod_xlab)
.ylab(lod_ylab)
.rotate_ylab(lod_rotate_ylab)
svg = d3.select("div##{chartdivid}")
.append("svg")
.attr("height", totalh)
.attr("width", totalw)
g_lod = svg.append("g")
.attr("id", "lodchart")
.datum(lod_data)
.call(mylodchart)
plotCI = (markername, markerindex) ->
svg.select("g#cichart").remove()
g = pxg_data.geno[markerindex]
gabs = (Math.abs(x) for x in g)
chr = pxg_data.chrByMarkers[markername]
chrtype = pxg_data.chrtype[chr]
genonames = pxg_data.genonames[chrtype]
means = []
se = []
for j in [1..genonames.length]
phesub = (p for p,i in pxg_data.pheno when gabs[i] == j)
if phesub.length>0
ave = (phesub.reduce (a,b) -> a+b)/phesub.length
means.push(ave)
else means.push(null)
if phesub.length>1
variance = (phesub.reduce (a,b) -> a+Math.pow(b-ave, 2))/(phesub.length-1)
se.push((Math.sqrt(variance/phesub.length)))
else
se.push(null)
low = (means[i]-2*se[i] for i of means)
high = (means[i]+2*se[i] for i of means)
range = [d3.min(low), d3.max(high)]
if eff_ylim?
eff_ylim = [d3.min([range[0],eff_ylim[0]]), d3.max([range[1],eff_ylim[1]])]
else
eff_ylim = range
mycichart = cichart().height(height)
.width(wright)
.margin(margin)
.axispos(eff_axispos)
.titlepos(eff_titlepos)
.title(markername)
.xlab(eff_xlab)
.ylab(eff_ylab)
.rotate_ylab(eff_rotate_ylab)
.ylim(eff_ylim)
.nyticks(eff_nyticks)
.yticks(eff_yticks)
.segcolor(eff_linecolor)
.vertsegcolor(eff_linecolor)
.segstrokewidth(eff_linewidth)
.segwidth(eff_segwidth)
.rectcolor(lightrect)
svg.append("g")
.attr("id", "cichart")
.attr("transform", "translate(#{wleft+margin.left+margin.right},0)")
.datum({'means':means, 'low':low, 'high':high, 'categories':genonames})
.call(mycichart)
# animate points at markers on click
mylodchart.markerSelect()
.on "click", (d,i) ->
plotCI(markers[i], i)
|
[
{
"context": "window\n domain = tld document.domain\n key = '_hza'\n\n state = ->\n value = cookies.get key\n if",
"end": 324,
"score": 0.999322235584259,
"start": 319,
"tag": "KEY",
"value": "'_hza"
}
] | src/session.coffee | hanzo-io/track.js | 16 | {safariPrivateBrowsing, tld} = require './utils'
{document, window} = require './browser'
# Default session storage
localStorage = -> require 'store'
# Fallback to cookie storage to handle safari private browsing mode
cookies = ->
cookies = (require 'cookies-js') window
domain = tld document.domain
key = '_hza'
state = ->
value = cookies.get key
if value?
JSON.parse value
else
{}
get: (k) ->
state()[k]
set: (k, v) ->
s = state()
s[k] = v
cookies.set key, JSON.stringify s,
domain: domain
secure: true
expires: Infinity
remove: (k) ->
@set k, undefined
clear: ->
cookies.expire key
module.exports = if safariPrivateBrowsing() then cookies() else localStorage()
| 119363 | {safariPrivateBrowsing, tld} = require './utils'
{document, window} = require './browser'
# Default session storage
localStorage = -> require 'store'
# Fallback to cookie storage to handle safari private browsing mode
cookies = ->
cookies = (require 'cookies-js') window
domain = tld document.domain
key = <KEY>'
state = ->
value = cookies.get key
if value?
JSON.parse value
else
{}
get: (k) ->
state()[k]
set: (k, v) ->
s = state()
s[k] = v
cookies.set key, JSON.stringify s,
domain: domain
secure: true
expires: Infinity
remove: (k) ->
@set k, undefined
clear: ->
cookies.expire key
module.exports = if safariPrivateBrowsing() then cookies() else localStorage()
| true | {safariPrivateBrowsing, tld} = require './utils'
{document, window} = require './browser'
# Default session storage
localStorage = -> require 'store'
# Fallback to cookie storage to handle safari private browsing mode
cookies = ->
cookies = (require 'cookies-js') window
domain = tld document.domain
key = PI:KEY:<KEY>END_PI'
state = ->
value = cookies.get key
if value?
JSON.parse value
else
{}
get: (k) ->
state()[k]
set: (k, v) ->
s = state()
s[k] = v
cookies.set key, JSON.stringify s,
domain: domain
secure: true
expires: Infinity
remove: (k) ->
@set k, undefined
clear: ->
cookies.expire key
module.exports = if safariPrivateBrowsing() then cookies() else localStorage()
|
[
{
"context": "foreEach ->\n @heroku = new Heroku\n key : \"deadbeef\"\n\n spyOn @heroku, \"request\"\n\n it \"should be a",
"end": 122,
"score": 0.8956020474433899,
"start": 114,
"tag": "KEY",
"value": "deadbeef"
}
] | node_modules/heroku/spec/apps.spec.coffee | equirk/CheckMate | 5 | describe "Apps", ->
{Heroku} = require "../src/heroku"
beforeEach ->
@heroku = new Heroku
key : "deadbeef"
spyOn @heroku, "request"
it "should be able to create a properly structured JSON file from params", ->
expected =
app :
name : "the-test-case"
actual = @heroku.app_params
name: "the-test-case"
expect(actual).toEqual expected
it "should be able to send a properly constructed JSON POST request to create", ->
fn = ->
@heroku.post_app { name: "the-test-case" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
expects : 202
method : "POST"
path : "/apps"
query :
app :
name : "the-test-case"
}, fn
it "should be to send an update request to change the name of an app", ->
fn = ->
@heroku.put_app "the-test-case", { name: "the-test-case-2" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
method : "PUT"
path : "/apps/the-test-case"
query :
app :
name : "the-test-case-2"
}, fn
| 6467 | describe "Apps", ->
{Heroku} = require "../src/heroku"
beforeEach ->
@heroku = new Heroku
key : "<KEY>"
spyOn @heroku, "request"
it "should be able to create a properly structured JSON file from params", ->
expected =
app :
name : "the-test-case"
actual = @heroku.app_params
name: "the-test-case"
expect(actual).toEqual expected
it "should be able to send a properly constructed JSON POST request to create", ->
fn = ->
@heroku.post_app { name: "the-test-case" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
expects : 202
method : "POST"
path : "/apps"
query :
app :
name : "the-test-case"
}, fn
it "should be to send an update request to change the name of an app", ->
fn = ->
@heroku.put_app "the-test-case", { name: "the-test-case-2" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
method : "PUT"
path : "/apps/the-test-case"
query :
app :
name : "the-test-case-2"
}, fn
| true | describe "Apps", ->
{Heroku} = require "../src/heroku"
beforeEach ->
@heroku = new Heroku
key : "PI:KEY:<KEY>END_PI"
spyOn @heroku, "request"
it "should be able to create a properly structured JSON file from params", ->
expected =
app :
name : "the-test-case"
actual = @heroku.app_params
name: "the-test-case"
expect(actual).toEqual expected
it "should be able to send a properly constructed JSON POST request to create", ->
fn = ->
@heroku.post_app { name: "the-test-case" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
expects : 202
method : "POST"
path : "/apps"
query :
app :
name : "the-test-case"
}, fn
it "should be to send an update request to change the name of an app", ->
fn = ->
@heroku.put_app "the-test-case", { name: "the-test-case-2" }, fn
expect(@heroku.request).toHaveBeenCalledWith {
method : "PUT"
path : "/apps/the-test-case"
query :
app :
name : "the-test-case-2"
}, fn
|
[
{
"context": "\tresult.code\n\nheader = '''/*\n\ttoolkit.js\n\n\tAuthor: Robin Saxifrage\n\tLicense: Apache 2.0\n*/\n\n'''\n\nes5 = compileWithIn",
"end": 644,
"score": 0.9998216032981873,
"start": 629,
"tag": "NAME",
"value": "Robin Saxifrage"
}
] | build.coffee | robinsax/toolkit.js | 0 | # Build script.
#
# Will place toolkit.js and toolkit.min.js in the repository root.
#
fs = require 'fs'
ugly = require 'uglify-js'
coffee = require 'coffeescript'
babel = require 'babel-core'
compileWithIncludes = (filename) ->
contents = fs.readFileSync 'src/' + filename + '.coffee', 'utf-8'
contents = contents.replace /#\s+::include\s+(.+)\s*?\n/g, (match, includeFilename) ->
fs.readFileSync 'src/' + includeFilename + '.coffee', 'utf-8'
transpileOpts =
presets: ['es2015']
compiled = coffee.compile contents
result = babel.transform compiled, transpileOpts
result.code
header = '''/*
toolkit.js
Author: Robin Saxifrage
License: Apache 2.0
*/
'''
es5 = compileWithIncludes 'toolkit'
fs.writeFileSync 'toolkit.js', header + es5
fs.writeFileSync 'toolkit.min.js', header + (ugly.minify es5).code
console.log 'Done' | 175183 | # Build script.
#
# Will place toolkit.js and toolkit.min.js in the repository root.
#
fs = require 'fs'
ugly = require 'uglify-js'
coffee = require 'coffeescript'
babel = require 'babel-core'
compileWithIncludes = (filename) ->
contents = fs.readFileSync 'src/' + filename + '.coffee', 'utf-8'
contents = contents.replace /#\s+::include\s+(.+)\s*?\n/g, (match, includeFilename) ->
fs.readFileSync 'src/' + includeFilename + '.coffee', 'utf-8'
transpileOpts =
presets: ['es2015']
compiled = coffee.compile contents
result = babel.transform compiled, transpileOpts
result.code
header = '''/*
toolkit.js
Author: <NAME>
License: Apache 2.0
*/
'''
es5 = compileWithIncludes 'toolkit'
fs.writeFileSync 'toolkit.js', header + es5
fs.writeFileSync 'toolkit.min.js', header + (ugly.minify es5).code
console.log 'Done' | true | # Build script.
#
# Will place toolkit.js and toolkit.min.js in the repository root.
#
fs = require 'fs'
ugly = require 'uglify-js'
coffee = require 'coffeescript'
babel = require 'babel-core'
compileWithIncludes = (filename) ->
contents = fs.readFileSync 'src/' + filename + '.coffee', 'utf-8'
contents = contents.replace /#\s+::include\s+(.+)\s*?\n/g, (match, includeFilename) ->
fs.readFileSync 'src/' + includeFilename + '.coffee', 'utf-8'
transpileOpts =
presets: ['es2015']
compiled = coffee.compile contents
result = babel.transform compiled, transpileOpts
result.code
header = '''/*
toolkit.js
Author: PI:NAME:<NAME>END_PI
License: Apache 2.0
*/
'''
es5 = compileWithIncludes 'toolkit'
fs.writeFileSync 'toolkit.js', header + es5
fs.writeFileSync 'toolkit.min.js', header + (ugly.minify es5).code
console.log 'Done' |
[
{
"context": "tagelabs.com; mailto:' + (API.settings.log?.to ? 'mark@cottagelabs.com') + ')'\n 'User-Agent': 'OAB; mailto: joe@openacc",
"end": 492,
"score": 0.9999243021011353,
"start": 472,
"tag": "EMAIL",
"value": "mark@cottagelabs.com"
},
{
"context": "tagelabs.com') + ')'\n 'User-Agent': 'OAB; mailto: joe@openaccessbutton.org'\n}\n\nAPI.use ?= {}\nAPI.use.crossref = {works:{},jo",
"end": 555,
"score": 0.9999138712882996,
"start": 531,
"tag": "EMAIL",
"value": "joe@openaccessbutton.org"
},
{
"context": "l.send\n service: 'openaccessbutton'\n from: 'natalia.norori@openaccessbutton.org'\n to: if broken then 'alert@cottagelabs.com' e",
"end": 24462,
"score": 0.9999279975891113,
"start": 24427,
"tag": "EMAIL",
"value": "natalia.norori@openaccessbutton.org"
},
{
"context": "ori@openaccessbutton.org'\n to: if broken then 'alert@cottagelabs.com' else 'mark@cottagelabs.com'\n subject: 'Crossr",
"end": 24509,
"score": 0.9999284148216248,
"start": 24488,
"tag": "EMAIL",
"value": "alert@cottagelabs.com"
},
{
"context": " to: if broken then 'alert@cottagelabs.com' else 'mark@cottagelabs.com'\n subject: 'Crossref index check ' + (if broke",
"end": 24537,
"score": 0.9999293088912964,
"start": 24517,
"tag": "EMAIL",
"value": "mark@cottagelabs.com"
}
] | server/use/crossref.coffee | leviathanindustries/noddy | 2 |
import moment from 'moment'
import Future from 'fibers/future'
# a crossref API client
# https://github.com/CrossRef/rest-api-doc/blob/master/rest_api.md
# http://api.crossref.org/works/10.1016/j.paid.2009.02.013
# crossref now prefers some identifying headers
header = {
#'User-Agent': (API.settings.name ? 'noddy') + ' v' + (API.settings.version ? '0.0.1') + (if API.settings.dev then 'd' else '') + ' (https://cottagelabs.com; mailto:' + (API.settings.log?.to ? 'mark@cottagelabs.com') + ')'
'User-Agent': 'OAB; mailto: joe@openaccessbutton.org'
}
API.use ?= {}
API.use.crossref = {works:{},journals:{}, publishers: {}, funders: {}}
@crossref_journal = new API.collection {index:"crossref", type:"journal"}
@crossref_works = new API.collection {index:"crossref", type:"works", devislive:true}
@crossref_extra = new API.collection {index:"crossref", type:"extra", devislive:true}
API.add 'use/crossref/works',
get: () ->
if this.queryParams.title and ct = API.use.crossref.works.title this.queryParams.title, this.queryParams.format
return ct
else if this.queryParams.doi and dt = API.use.crossref.works.doi this.queryParams.doi, this.queryParams.format
return dt
else
return crossref_works.search this.queryParams
API.add 'use/crossref/works/:doipre/:doipost',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost, this.queryParams.format
# there are DOIs that can have slashes within their second part and are valid. Possibly could have more than one slash
# and there does not seem to be a way to pass wildcards to the urlparams to match multiple / routes
# so this will do for now...
API.add 'use/crossref/works/:doipre/:doipost/:doimore',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore, this.queryParams.format
API.add 'use/crossref/works/extra', () -> return crossref_extra.search this.queryParams
API.add 'use/crossref/works/search',
get: () -> return API.use.crossref.works.search (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby', # can be published, indexed, deposited, created
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate/:enddate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, this.urlParams.enddate, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/index',
get: () ->
Meteor.setTimeout (() => API.use.crossref.works.index(this.queryParams.lts, this.queryParams.by)), 1
return true
API.add 'use/crossref/works/lastindex', get: () -> return API.use.crossref.works.lastindex()
API.add 'use/crossref/works/lastindex/count', get: () -> return API.use.crossref.works.lastindex true
#API.add 'use/crossref/works/import', post: () -> return API.use.crossref.works.import this.request.body
API.add 'use/crossref/types',
get: () -> return API.use.crossref.types()
API.add 'use/crossref/journals', () -> crossref_journal.search this
API.add 'use/crossref/journals/import',
get:
roleRequired: if API.settings.dev then undefined else 'crossref.admin'
action:() ->
Meteor.setTimeout (() => API.use.crossref.journals.import()), 1
return true
API.add 'use/crossref/journals/:issn',
get: () -> return API.use.crossref.journals.issn this.urlParams.issn
API.add 'use/crossref/journals/:issn/works',
get: () -> return API.use.crossref.works.issn this.urlParams.issn, this.queryParams
API.add 'use/crossref/journals/:issn/doi',
get: () -> return API.use.crossref.journals.doi this.urlParams.issn
API.add 'use/crossref/journals/:issn/dois',
get: () -> return API.use.crossref.journals.dois this.urlParams.issn, this.queryParams.from
API.add 'use/crossref/publishers',
get: () -> return API.use.crossref.publishers.search (this.queryParams.q ? this.queryParams.query), (this.queryParams.from ? this.queryParams.offset), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter
API.add 'use/crossref/reverse',
get: () -> return API.use.crossref.reverse [this.queryParams.q ? this.queryParams.query ? this.queryParams.title], this.queryParams.score, this.queryParams.format
post: () -> return API.use.crossref.reverse this.request.body
API.add 'use/crossref/resolve', get: () -> return API.use.crossref.resolve this.queryParams.doi
API.add 'use/crossref/resolve/:doipre/:doipost', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost
API.add 'use/crossref/resolve/:doipre/:doipost/:doimore', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore
API.use.crossref.types = () ->
url = 'https://api.crossref.org/types'
API.log 'Using crossref for ' + url
try
res = HTTP.call 'GET', url, {headers: header}
if res.statusCode is 200
return res.data.message.items
else
return undefined
API.use.crossref.reverse = (citations, score=85, format=false) ->
citations = [citations] if typeof citations is 'string'
url = 'https://api.crossref.org/reverse'
API.log 'Using crossref for ' + url + ' with citation ' + JSON.stringify citations
try
res = HTTP.call 'POST', url, {data:citations, headers: header}
if res.statusCode is 200
if res?.data?.message?.DOI and res.data.message.score and res.data.message.type is 'journal-article'
sc = res.data.message.score
if sc < score
ignore = ["a","an","and","are","as","at","but","be","by","do","for","if","in","is","it","or","so","the","to"]
titleparts = res.data.message.title[0].toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').split(' ')
titles = []
for f in titleparts
titles.push(f) if ignore.indexOf(f.split("'")[0]) is -1 and f.length > 0
citeparts = citations.join(' ').toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').replace(/ /g,' ').split(' ')
cites = []
for c in citeparts
cites.push(c) if ignore.indexOf(c.split("'")[0]) is -1 and c.length > 1
bonus = (score - sc)/titles.length + 1
found = []
for w in titles
found.push(w) if w in cites
sc += bonus * found.length if titles.length is found.length and found.join() is titles.join()
if sc >= score
if format
return API.use.crossref.works.format res.data.message
else
return { data: {doi:res.data.message.DOI, title:res.data.message.title[0], received:res.data.message.score, adjusted: sc}, original:res.data}
else
return { data: {info: 'below score', received:res.data.message.score, adjusted: sc}, original:res.data}
else
return {}
else
return { status: 'error', data: res }
catch err
return { status: 'error', error: err.toString() }
API.use.crossref.resolve = (doi) ->
doi = doi.replace('http://','').replace('https://','').replace('dx.doi.org/','').replace('doi.org/','')
cached = API.http.cache doi, 'crossref_resolve'
if cached
return cached
else
url = false
try
# TODO NOTE that the URL given by crossref doi resolver may NOT be the final resolved URL. The publisher may still redirect to a different one
resp = HTTP.call 'GET', 'https://doi.org/api/handles/' + doi, {headers: header}
for r in resp.data?.values
if r.type.toLowerCase() is 'url'
url = r.data.value
# like these weird chinese ones, which end up throwing 404 anyway, but, just in case - https://doi.org/api/handles/10.7688/j.issn.1000-1646.2014.05.20
url = new Buffer(url,'base64').toString('utf-8') if r.data.format is 'base64'
API.http.cache doi, 'crossref_resolve', url
return url
API.use.crossref.journals.issn = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
return crossref_journal.find 'ISSN.exact:"' + issn.join('" OR ISSN.exact:"') + '"'
API.use.crossref.journals.doi = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
try
return crossref_works.find('ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {include: 'DOI', sort: {publishedAt:{order:'asc'}}}).DOI
catch
return undefined
API.use.crossref.journals.dois = (issn, from=0) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
dois = []
crossref_works.each 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {from: from, include: 'DOI', sort: {publishedAt:{order:'desc'}}}, (rec) ->
dois.push rec.DOI
if dois.length >= 10000
return 'break'
return dois
API.use.crossref.journals.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/journals?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'').replace(/ AND /g,'+').replace(/ OR /g,' ').replace(/ NOT /g,'-').replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
return if res?.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.journals.import = () ->
started = Date.now()
size = 1000
total = 0
counter = 0
journals = 0
batch = []
while total is 0 or counter < total
if batch.length >= 10000
crossref_journal.insert batch
batch = []
try
crls = API.use.crossref.journals.search undefined, counter, size
total = crls.total if total is 0
for crl in crls?.data ? []
journals += 1
batch.push crl
counter += size
catch err
console.log 'crossref journals import process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
crossref_journal.insert(batch) if batch.length
crossref_journal.remove 'createdAt:<' + started
API.log 'Retrieved and imported ' + journals + ' crossref journals'
return journals
API.use.crossref.publishers.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/members?'
if qrystr and qrystr isnt 'all'
url += 'query=' + encodeURIComponent qrystr
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.funders.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/funders?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.works.issn = (issn, q={}) ->
q = {format: true} if q is true
format = if q.format then true else false
delete q.format
issn = [issn] if typeof issn is 'string'
return crossref_works.search q, restrict: [{query_string: {query: 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"'}}]
API.use.crossref.works.doi = (doi, format) ->
ret = crossref_works.get doi.toLowerCase().replace /\//g, '_'
if not ret?
url = 'https://api.crossref.org/works/' + doi
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200 and res.data?.message?.DOI?
rec = res.data.message
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
crossref_extra.insert rt
ret = API.use.crossref.works.clean rec
API.log 'Saved crossref work ' + ret.DOI
crossref_works.insert ret
return if not ret? then undefined else if format then API.use.crossref.works.format(ret) else ret
API.use.crossref.works.title = (title, format) ->
metadata = if typeof title is 'object' then title else {}
title = metadata.title if typeof title is 'object'
return undefined if typeof title isnt 'string'
qr = 'title.exact:"' + title + '"'
if title.indexOf(' ') isnt -1
qr += ' OR ('
f = true
for t in title.split ' '
if t.length > 2
if f is true
f = false
else
qr += ' AND '
qr += '(title:"' + t + '" OR subtitle:"' + t + '")'
qr += ')'
res = crossref_works.search qr, 20
possible = false
if res?.hits?.total
ltitle = title.toLowerCase().replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
for r in res.hits.hits
rec = r._source
rt = (if typeof rec.title is 'string' then rec.title else rec.title[0]).toLowerCase()
if rec.subtitle?
st = (if typeof rec.subtitle is 'string' then rec.subtitle else rec.subtitle[0]).toLowerCase()
rt += ' ' + st if typeof st is 'string' and st.length and st not in rt
rt = rt.replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
if (ltitle.indexOf(rt) isnt -1 or rt.indexOf(ltitle) isnt -1) and ltitle.length/rt.length > 0.7 and ltitle.length/rt.length < 1.3
matches = true
fr = API.use.crossref.works.format rec
for k of metadata
if k not in ['citation','title'] and typeof metadata[k] in ['string','number']
matches = not fr[k]? or typeof fr[k] not in ['string','number'] or fr[k].toLowerCase() is metadata[k].toLowerCase()
if matches
if rec.type is 'journal-article'
return if format then API.use.crossref.works.format(rec) else rec
else if possible is false or possible.type isnt 'journal-article' and rec.type is 'journal-article'
possible = rec
return if possible is false then undefined else if format then API.use.crossref.works.format(possible) else match
# from could also be a cursor value, use * to start a cursor then return the next-cursor given in the response object
# largest size is 1000 and deepest from is 10000, so anything more than that needs cursor
API.use.crossref.works.search = (qrystr, from, size, filter, sort, order='desc', format, funder, publisher, journal) ->
# max size is 1000
url = 'https://api.crossref.org'
url += '/funders/' + funder if funder # crossref funder ID
url += '/members/' + publisher if publisher # crossref publisher ID
url += '/journals/' + journal if journal # journal issn
url += '/works?'
url += 'sort=' + sort + '&order=' + order + '&' if sort?
# more specific queries can be made using:
#query.container-title Query container-title aka. publication name
#query.author Query author given and family names
#query.editor Query editor given and family names
#query.chair Query chair given and family names
#query.translator Query translator given and family names
#query.contributor Query author, editor, chair and translator given and family names
#query.bibliographic Query bibliographic information, useful for citation look up. Includes titles, authors, ISSNs and publication years
#query.affiliation Query contributor affiliations
# note there is not a "title" one - just use bibliographic. bibliographic is titles, authors, ISSNs, and publication years
# ALSO NOTE: crossref LOOKS like it uses logica + and - operators, but it doesn't. their examples use + instaed of space, but either seems to make no difference
# + or - or space, all just result in OR queries, with increasing large result sets
if typeof qrystr is 'object'
for k of qrystr
if k not in ['from','size','filter','sort','order','format','funder','publisher','journal','issn'] or (k is 'funder' and not funder?) or (k is 'publisher' and not publisher?) or (k in ['issn','journal'] and not journal?)
ky = if k in ['title','citation','issn'] then 'query.bibliographic' else if k is 'journal' then 'query.container-title' else if k in ['author','editor','chair','translator','contributor','affiliation','bibliographic'] then 'query.' + k else k
url += ky + '=' + encodeURIComponent(qrystr[k]) + '&'
else if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'') #.replace(/ AND /g,'+').replace(/ NOT /g,'-')
#qry = if qry.indexOf(' OR ') isnt -1 then qry.replace(/ OR /g,' ') else qry.replace(/ /g,'+')
qry = qry.replace(/ /g,'+')
url += 'query=' + encodeURIComponent(qry) + '&'
if from?
if from isnt '*' and typeof from is 'string' and not from.replace(/[0-9]/g,'').length
try
fp = parseInt from
from = fp if not isNaN fp
if typeof from isnt 'number'
url += 'cursor=' + encodeURIComponent(from) + '&'
else
url += 'offset=' + from + '&'
url += 'rows=' + size + '&' if size?
url += 'filter=' + encodeURIComponent(filter) + '&'if filter? and filter isnt ''
url = url.replace('?&','?').replace(/&$/,'') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200
ri = res.data.message.items
if format
for r of ri
ri[r] = API.use.crossref.works.format ri[r]
return { total: res.data.message['total-results'], cursor: res.data.message['next-cursor'], data: ri, facets: res.data.message.facets}
else
return { status: 'error', data: res}
API.use.crossref.works.searchby = (searchby='published', qrystr, startdate, enddate, from, size, filter, order, format) ->
# can be published, indexed, deposited, created
# using ?filter=from-pub-date:2004-04-04,until-pub-date:2004-04-04 (the dates are inclusive)
part = if searchby is 'published' then 'pub' else if searchby is 'created' then 'created' else searchby.replace('ed','')
if filter? then filter += ',' else filter = ''
if startdate
startdate = moment(startdate).format('YYYY-MM-DD') if typeof startdate isnt 'string' or startdate.indexOf('-') is -1 or startdate.length > 4
filter += 'from-' + part + '-date:' + startdate
if enddate
enddate = moment(enddate).format('YYYY-MM-DD') if typeof enddate isnt 'string' or enddate.indexOf('-') is -1 or enddate.length > 4
filter += ',until-' + part + '-date:' + enddate
return API.use.crossref.works.search qrystr, from, size, filter, searchby, order, format
API.use.crossref.works.index = (lts, searchby='indexed') ->
if not lts and last = API.http.cache 'last', 'crossref_works_imported'
# just in case it is an old reading from before I had to switch to using cursor, I was storing the last from number too
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
console.log 'Set crossref works index import from cached last date'
console.log lts, moment(lts).startOf('day').format('YYYY-MM-DD')
else
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
startday = moment(lts).startOf('day').valueOf()
dn = Date.now()
loaded = 0
updated = 0
days = 0
broken = false
try
target = API.use.crossref.works.searchby(searchby, undefined, startday, undefined, undefined, 10).total
console.log target
catch
target = 0
while not broken and startday < dn
cursor = '*' # set a new cursor on each index day query
console.log startday
days += 1
totalthisday = false
fromthisday = 0
while not broken and (totalthisday is false or fromthisday < totalthisday)
console.log loaded, fromthisday, target, searchby
console.log cursor
try
thisdays = API.use.crossref.works.searchby searchby, undefined, startday, startday, cursor, 1000, undefined, 'asc' # using same day for crossref API gets that whole day
console.log thisdays.data.length
batch = []
xtb = []
for rec in thisdays.data
if not rec.DOI
console.log rec
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
xtb.push rt
cr = API.use.crossref.works.clean rec
updated += 1 if crossref_works.get cr._id
batch.push cr
if batch.length
l = crossref_works.insert batch
if l?.records is batch.length
loaded += l.records
API.http.cache 'last', 'crossref_works_imported', startday #+ '_' + fromthisday
else
broken = true
if xtb.length
try crossref_extra.insert xtb
if totalthisday is false
totalthisday = thisdays?.total ? 0
fromthisday += 1000
cursor = thisdays.cursor if thisdays?.cursor?
catch err
console.log 'crossref index process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
startday += 86400000
API.mail.send
service: 'openaccessbutton'
from: 'natalia.norori@openaccessbutton.org'
to: if broken then 'alert@cottagelabs.com' else 'mark@cottagelabs.com'
subject: 'Crossref index check ' + (if broken then 'broken' else 'complete')
text: 'Processed ' + days + ' days up to ' + startday + ' and loaded ' + loaded + ' records of which ' + updated + ' were updates. Target was ' + target
return loaded
API.use.crossref.works.lastindex = (count) ->
try
last = API.http.cache 'last', 'crossref_works_imported'
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
catch
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
if count
res = date: moment(lts).startOf('day').format('YYYY-MM-DD')
res.timestamp = moment(lts).startOf('day').valueOf()
res[p] = API.use.crossref.works.searchby(p, undefined, res.timestamp).total for p in ['published', 'indexed', 'deposited', 'created']
return res
else
return moment(lts).startOf('day').format('YYYY-MM-DD')
API.use.crossref.works.clean = (rec) ->
rec._id = rec.DOI.replace /\//g, '_'
delete rec.reference
delete rec.relation
delete rec.abstract
for p in ['published-print','published-online','issued','deposited','indexed']
if rec[p]
if rec[p]['date-time'] and rec[p]['date-time'].split('T')[0].split('-').length is 3
rec.published ?= rec[p]['date-time'].split('T')[0]
rec.year ?= rec.published.split('-')[0] if rec.published?
pbl = ''
if rec[p]['date-parts'] and rec[p]['date-parts'].length and rec[p]['date-parts'][0] and (not rec.published or not rec[p].timestamp)
rp = rec[p]['date-parts'][0] #crossref uses year month day in a list
pbl = rp[0]
if rp.length is 1
pbl += '-01-01'
else
pbl += if rp.length > 1 then '-' + (if rp[1].toString().length is 1 then '0' else '') + rp[1] else '-01'
pbl += if rp.length > 2 then '-' + (if rp[2].toString().length is 1 then '0' else '') + rp[2] else '-01'
if not rec.published
rec.published = pbl
rec.year = pbl.split('-')[0]
if not rec[p].timestamp and pbl
rec[p].timestamp = moment(pbl,'YYYY-MM-DD').valueOf()
rec.publishedAt ?= rec[p].timestamp
for a in rec.assertion ? []
if a.label is 'OPEN ACCESS'
if a.URL and a.URL.indexOf('creativecommons') isnt -1
rec.license ?= []
rec.license.push {'URL': a.URL}
rec.is_oa = true
for l in rec.license ? []
if l.URL and l.URL.indexOf('creativecommons') isnt -1 and (not rec.licence or rec.licence.indexOf('creativecommons') is -1)
rec.licence = l.URL
rec.licence = 'cc-' + rec.licence.split('/licenses/')[1].replace(/$\//,'').replace(/\//g, '-') if rec.licence.indexOf('/licenses/') isnt -1
rec.is_oa = true
return rec
API.use.crossref.works.format = (rec, metadata={}) ->
try metadata.title = rec.title[0]
try
if rec.subtitle? and rec.subtitle.length and rec.subtitle[0].length
metadata.title += ': ' + rec.subtitle[0]
try metadata.doi = rec.DOI if rec.DOI?
try metadata.doi = rec.doi if rec.doi? # just in case
try metadata.crossref_type = rec.type
try metadata.author = rec.author if rec.author?
if metadata.author
for a in metadata.author
a.name = a.family + ' ' + a.given if not a.name? and a.family and a.given
if a.affiliation?
a.affiliation = a.affiliation[0] if _.isArray a.affiliation
a.affiliation = {name: a.affiliation} if typeof a.affiliation is 'string'
try a.affiliation.name = a.affiliation.name.replace(/\s\s+/g,' ').trim()
try metadata.journal = rec['container-title'][0]
try metadata.journal_short = rec['short-container-title'][0]
try metadata.issue = rec.issue if rec.issue?
try metadata.volume = rec.volume if rec.volume?
try metadata.page = rec.page.toString() if rec.page?
try metadata.issn = _.uniq rec.ISSN
try metadata.keyword = rec.subject if rec.subject? # is a list of strings - goes in keywords because subject was already previously used as an object
try metadata.publisher = rec.publisher if rec.publisher?
for p in ['published-print','journal-issue.published-print','issued','published-online','created','deposited']
try
if rt = rec[p] ? rec['journal-issue']?[p.replace('journal-issue.','')]
if typeof rt['date-time'] is 'string' and rt['date-time'].indexOf('T') isnt -1 and rt['date-time'].split('T')[0].split('-').length is 3
metadata.published = rt['date-time'].split('T')[0]
metadata.year = metadata.published.split('-')[0]
break
else if rt['date-parts']? and rt['date-parts'].length and _.isArray(rt['date-parts'][0]) and rt['date-parts'][0].length
rp = rt['date-parts'][0]
pbl = rp[0].toString()
if pbl.length > 2 # needs to be a year
metadata.year ?= pbl
if rp.length is 1
pbl += '-01-01'
else
m = false
d = false
if not isNaN(parseInt(rp[1])) and parseInt(rp[1]) > 12
d = rp[1].toString()
else
m = rp[1].toString()
if rp.length is 2
if d isnt false
m = rp[2].toString()
else
d = rp[2].toString()
m = if m is false then '01' else if m.length is 1 then '0' + m else m
d = if d is false then '01' else if d.length is 1 then '0' + d else d
pbl += '-' + m + '-' + d
metadata.published = pbl
break
try metadata.abstract = API.convert.html2txt(rec.abstract).replace(/\n/g,' ') if rec.abstract?
try
if rec.reference? and rec.reference.length
metadata.reference ?= []
for r in rec.reference
rf = {}
rf.doi = r.DOI if r.DOI?
rf.title = r.article-title if r.article-title?
rf.journal = r.journal-title if r.journal-title?
metadata.reference.push(rf) if not _.isEmpty rf
try
if rec.license?
for l in rec.license
if typeof l.URL is 'string' and (typeof metadata.licence isnt 'string' or (metadata.licence.indexOf('creativecommons') is -1 and l.URL.indexOf('creativecommons') isnt -1))
metadata.licence = l.URL
if l.URL.indexOf('creativecommons') isnt -1
md = 'https://doi.org/' + metadata.doi
metadata.url ?= md
metadata.url.push(md) if _.isArray(metadata.url) and md not in metadata.url
try metadata.redirect = API.service.oab.redirect md
break
return metadata
API.use.crossref.works.import = (recs) ->
if _.isArray(recs) and recs.length
return crossref_works.insert recs
else
return undefined
_xref_import = () ->
if API.settings.cluster?.ip? and API.status.ip() not in API.settings.cluster.ip and API.settings.dev
API.log 'Setting up a crossref journal import to run every week on ' + API.status.ip()
Meteor.setInterval API.use.crossref.journals.import, 604800000
API.log 'Setting up a crossref works import to run every day on ' + API.status.ip()
Meteor.setInterval (() -> API.use.crossref.works.index(undefined, 'indexed')), 86400000
Meteor.setTimeout _xref_import, 19000
API.use.crossref.status = () ->
try
res = HTTP.call 'GET', 'https://api.crossref.org/works/10.1186/1758-2946-3-47', {headers: header, timeout: API.settings.use?.crossref?.timeout ? API.settings.use?._timeout ? 4000}
return if res.statusCode is 200 and res.data.status is 'ok' then true else res.data
catch err
return err.toString()
| 45871 |
import moment from 'moment'
import Future from 'fibers/future'
# a crossref API client
# https://github.com/CrossRef/rest-api-doc/blob/master/rest_api.md
# http://api.crossref.org/works/10.1016/j.paid.2009.02.013
# crossref now prefers some identifying headers
header = {
#'User-Agent': (API.settings.name ? 'noddy') + ' v' + (API.settings.version ? '0.0.1') + (if API.settings.dev then 'd' else '') + ' (https://cottagelabs.com; mailto:' + (API.settings.log?.to ? '<EMAIL>') + ')'
'User-Agent': 'OAB; mailto: <EMAIL>'
}
API.use ?= {}
API.use.crossref = {works:{},journals:{}, publishers: {}, funders: {}}
@crossref_journal = new API.collection {index:"crossref", type:"journal"}
@crossref_works = new API.collection {index:"crossref", type:"works", devislive:true}
@crossref_extra = new API.collection {index:"crossref", type:"extra", devislive:true}
API.add 'use/crossref/works',
get: () ->
if this.queryParams.title and ct = API.use.crossref.works.title this.queryParams.title, this.queryParams.format
return ct
else if this.queryParams.doi and dt = API.use.crossref.works.doi this.queryParams.doi, this.queryParams.format
return dt
else
return crossref_works.search this.queryParams
API.add 'use/crossref/works/:doipre/:doipost',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost, this.queryParams.format
# there are DOIs that can have slashes within their second part and are valid. Possibly could have more than one slash
# and there does not seem to be a way to pass wildcards to the urlparams to match multiple / routes
# so this will do for now...
API.add 'use/crossref/works/:doipre/:doipost/:doimore',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore, this.queryParams.format
API.add 'use/crossref/works/extra', () -> return crossref_extra.search this.queryParams
API.add 'use/crossref/works/search',
get: () -> return API.use.crossref.works.search (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby', # can be published, indexed, deposited, created
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate/:enddate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, this.urlParams.enddate, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/index',
get: () ->
Meteor.setTimeout (() => API.use.crossref.works.index(this.queryParams.lts, this.queryParams.by)), 1
return true
API.add 'use/crossref/works/lastindex', get: () -> return API.use.crossref.works.lastindex()
API.add 'use/crossref/works/lastindex/count', get: () -> return API.use.crossref.works.lastindex true
#API.add 'use/crossref/works/import', post: () -> return API.use.crossref.works.import this.request.body
API.add 'use/crossref/types',
get: () -> return API.use.crossref.types()
API.add 'use/crossref/journals', () -> crossref_journal.search this
API.add 'use/crossref/journals/import',
get:
roleRequired: if API.settings.dev then undefined else 'crossref.admin'
action:() ->
Meteor.setTimeout (() => API.use.crossref.journals.import()), 1
return true
API.add 'use/crossref/journals/:issn',
get: () -> return API.use.crossref.journals.issn this.urlParams.issn
API.add 'use/crossref/journals/:issn/works',
get: () -> return API.use.crossref.works.issn this.urlParams.issn, this.queryParams
API.add 'use/crossref/journals/:issn/doi',
get: () -> return API.use.crossref.journals.doi this.urlParams.issn
API.add 'use/crossref/journals/:issn/dois',
get: () -> return API.use.crossref.journals.dois this.urlParams.issn, this.queryParams.from
API.add 'use/crossref/publishers',
get: () -> return API.use.crossref.publishers.search (this.queryParams.q ? this.queryParams.query), (this.queryParams.from ? this.queryParams.offset), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter
API.add 'use/crossref/reverse',
get: () -> return API.use.crossref.reverse [this.queryParams.q ? this.queryParams.query ? this.queryParams.title], this.queryParams.score, this.queryParams.format
post: () -> return API.use.crossref.reverse this.request.body
API.add 'use/crossref/resolve', get: () -> return API.use.crossref.resolve this.queryParams.doi
API.add 'use/crossref/resolve/:doipre/:doipost', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost
API.add 'use/crossref/resolve/:doipre/:doipost/:doimore', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore
API.use.crossref.types = () ->
url = 'https://api.crossref.org/types'
API.log 'Using crossref for ' + url
try
res = HTTP.call 'GET', url, {headers: header}
if res.statusCode is 200
return res.data.message.items
else
return undefined
API.use.crossref.reverse = (citations, score=85, format=false) ->
citations = [citations] if typeof citations is 'string'
url = 'https://api.crossref.org/reverse'
API.log 'Using crossref for ' + url + ' with citation ' + JSON.stringify citations
try
res = HTTP.call 'POST', url, {data:citations, headers: header}
if res.statusCode is 200
if res?.data?.message?.DOI and res.data.message.score and res.data.message.type is 'journal-article'
sc = res.data.message.score
if sc < score
ignore = ["a","an","and","are","as","at","but","be","by","do","for","if","in","is","it","or","so","the","to"]
titleparts = res.data.message.title[0].toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').split(' ')
titles = []
for f in titleparts
titles.push(f) if ignore.indexOf(f.split("'")[0]) is -1 and f.length > 0
citeparts = citations.join(' ').toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').replace(/ /g,' ').split(' ')
cites = []
for c in citeparts
cites.push(c) if ignore.indexOf(c.split("'")[0]) is -1 and c.length > 1
bonus = (score - sc)/titles.length + 1
found = []
for w in titles
found.push(w) if w in cites
sc += bonus * found.length if titles.length is found.length and found.join() is titles.join()
if sc >= score
if format
return API.use.crossref.works.format res.data.message
else
return { data: {doi:res.data.message.DOI, title:res.data.message.title[0], received:res.data.message.score, adjusted: sc}, original:res.data}
else
return { data: {info: 'below score', received:res.data.message.score, adjusted: sc}, original:res.data}
else
return {}
else
return { status: 'error', data: res }
catch err
return { status: 'error', error: err.toString() }
API.use.crossref.resolve = (doi) ->
doi = doi.replace('http://','').replace('https://','').replace('dx.doi.org/','').replace('doi.org/','')
cached = API.http.cache doi, 'crossref_resolve'
if cached
return cached
else
url = false
try
# TODO NOTE that the URL given by crossref doi resolver may NOT be the final resolved URL. The publisher may still redirect to a different one
resp = HTTP.call 'GET', 'https://doi.org/api/handles/' + doi, {headers: header}
for r in resp.data?.values
if r.type.toLowerCase() is 'url'
url = r.data.value
# like these weird chinese ones, which end up throwing 404 anyway, but, just in case - https://doi.org/api/handles/10.7688/j.issn.1000-1646.2014.05.20
url = new Buffer(url,'base64').toString('utf-8') if r.data.format is 'base64'
API.http.cache doi, 'crossref_resolve', url
return url
API.use.crossref.journals.issn = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
return crossref_journal.find 'ISSN.exact:"' + issn.join('" OR ISSN.exact:"') + '"'
API.use.crossref.journals.doi = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
try
return crossref_works.find('ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {include: 'DOI', sort: {publishedAt:{order:'asc'}}}).DOI
catch
return undefined
API.use.crossref.journals.dois = (issn, from=0) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
dois = []
crossref_works.each 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {from: from, include: 'DOI', sort: {publishedAt:{order:'desc'}}}, (rec) ->
dois.push rec.DOI
if dois.length >= 10000
return 'break'
return dois
API.use.crossref.journals.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/journals?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'').replace(/ AND /g,'+').replace(/ OR /g,' ').replace(/ NOT /g,'-').replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
return if res?.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.journals.import = () ->
started = Date.now()
size = 1000
total = 0
counter = 0
journals = 0
batch = []
while total is 0 or counter < total
if batch.length >= 10000
crossref_journal.insert batch
batch = []
try
crls = API.use.crossref.journals.search undefined, counter, size
total = crls.total if total is 0
for crl in crls?.data ? []
journals += 1
batch.push crl
counter += size
catch err
console.log 'crossref journals import process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
crossref_journal.insert(batch) if batch.length
crossref_journal.remove 'createdAt:<' + started
API.log 'Retrieved and imported ' + journals + ' crossref journals'
return journals
API.use.crossref.publishers.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/members?'
if qrystr and qrystr isnt 'all'
url += 'query=' + encodeURIComponent qrystr
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.funders.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/funders?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.works.issn = (issn, q={}) ->
q = {format: true} if q is true
format = if q.format then true else false
delete q.format
issn = [issn] if typeof issn is 'string'
return crossref_works.search q, restrict: [{query_string: {query: 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"'}}]
API.use.crossref.works.doi = (doi, format) ->
ret = crossref_works.get doi.toLowerCase().replace /\//g, '_'
if not ret?
url = 'https://api.crossref.org/works/' + doi
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200 and res.data?.message?.DOI?
rec = res.data.message
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
crossref_extra.insert rt
ret = API.use.crossref.works.clean rec
API.log 'Saved crossref work ' + ret.DOI
crossref_works.insert ret
return if not ret? then undefined else if format then API.use.crossref.works.format(ret) else ret
API.use.crossref.works.title = (title, format) ->
metadata = if typeof title is 'object' then title else {}
title = metadata.title if typeof title is 'object'
return undefined if typeof title isnt 'string'
qr = 'title.exact:"' + title + '"'
if title.indexOf(' ') isnt -1
qr += ' OR ('
f = true
for t in title.split ' '
if t.length > 2
if f is true
f = false
else
qr += ' AND '
qr += '(title:"' + t + '" OR subtitle:"' + t + '")'
qr += ')'
res = crossref_works.search qr, 20
possible = false
if res?.hits?.total
ltitle = title.toLowerCase().replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
for r in res.hits.hits
rec = r._source
rt = (if typeof rec.title is 'string' then rec.title else rec.title[0]).toLowerCase()
if rec.subtitle?
st = (if typeof rec.subtitle is 'string' then rec.subtitle else rec.subtitle[0]).toLowerCase()
rt += ' ' + st if typeof st is 'string' and st.length and st not in rt
rt = rt.replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
if (ltitle.indexOf(rt) isnt -1 or rt.indexOf(ltitle) isnt -1) and ltitle.length/rt.length > 0.7 and ltitle.length/rt.length < 1.3
matches = true
fr = API.use.crossref.works.format rec
for k of metadata
if k not in ['citation','title'] and typeof metadata[k] in ['string','number']
matches = not fr[k]? or typeof fr[k] not in ['string','number'] or fr[k].toLowerCase() is metadata[k].toLowerCase()
if matches
if rec.type is 'journal-article'
return if format then API.use.crossref.works.format(rec) else rec
else if possible is false or possible.type isnt 'journal-article' and rec.type is 'journal-article'
possible = rec
return if possible is false then undefined else if format then API.use.crossref.works.format(possible) else match
# from could also be a cursor value, use * to start a cursor then return the next-cursor given in the response object
# largest size is 1000 and deepest from is 10000, so anything more than that needs cursor
API.use.crossref.works.search = (qrystr, from, size, filter, sort, order='desc', format, funder, publisher, journal) ->
# max size is 1000
url = 'https://api.crossref.org'
url += '/funders/' + funder if funder # crossref funder ID
url += '/members/' + publisher if publisher # crossref publisher ID
url += '/journals/' + journal if journal # journal issn
url += '/works?'
url += 'sort=' + sort + '&order=' + order + '&' if sort?
# more specific queries can be made using:
#query.container-title Query container-title aka. publication name
#query.author Query author given and family names
#query.editor Query editor given and family names
#query.chair Query chair given and family names
#query.translator Query translator given and family names
#query.contributor Query author, editor, chair and translator given and family names
#query.bibliographic Query bibliographic information, useful for citation look up. Includes titles, authors, ISSNs and publication years
#query.affiliation Query contributor affiliations
# note there is not a "title" one - just use bibliographic. bibliographic is titles, authors, ISSNs, and publication years
# ALSO NOTE: crossref LOOKS like it uses logica + and - operators, but it doesn't. their examples use + instaed of space, but either seems to make no difference
# + or - or space, all just result in OR queries, with increasing large result sets
if typeof qrystr is 'object'
for k of qrystr
if k not in ['from','size','filter','sort','order','format','funder','publisher','journal','issn'] or (k is 'funder' and not funder?) or (k is 'publisher' and not publisher?) or (k in ['issn','journal'] and not journal?)
ky = if k in ['title','citation','issn'] then 'query.bibliographic' else if k is 'journal' then 'query.container-title' else if k in ['author','editor','chair','translator','contributor','affiliation','bibliographic'] then 'query.' + k else k
url += ky + '=' + encodeURIComponent(qrystr[k]) + '&'
else if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'') #.replace(/ AND /g,'+').replace(/ NOT /g,'-')
#qry = if qry.indexOf(' OR ') isnt -1 then qry.replace(/ OR /g,' ') else qry.replace(/ /g,'+')
qry = qry.replace(/ /g,'+')
url += 'query=' + encodeURIComponent(qry) + '&'
if from?
if from isnt '*' and typeof from is 'string' and not from.replace(/[0-9]/g,'').length
try
fp = parseInt from
from = fp if not isNaN fp
if typeof from isnt 'number'
url += 'cursor=' + encodeURIComponent(from) + '&'
else
url += 'offset=' + from + '&'
url += 'rows=' + size + '&' if size?
url += 'filter=' + encodeURIComponent(filter) + '&'if filter? and filter isnt ''
url = url.replace('?&','?').replace(/&$/,'') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200
ri = res.data.message.items
if format
for r of ri
ri[r] = API.use.crossref.works.format ri[r]
return { total: res.data.message['total-results'], cursor: res.data.message['next-cursor'], data: ri, facets: res.data.message.facets}
else
return { status: 'error', data: res}
API.use.crossref.works.searchby = (searchby='published', qrystr, startdate, enddate, from, size, filter, order, format) ->
# can be published, indexed, deposited, created
# using ?filter=from-pub-date:2004-04-04,until-pub-date:2004-04-04 (the dates are inclusive)
part = if searchby is 'published' then 'pub' else if searchby is 'created' then 'created' else searchby.replace('ed','')
if filter? then filter += ',' else filter = ''
if startdate
startdate = moment(startdate).format('YYYY-MM-DD') if typeof startdate isnt 'string' or startdate.indexOf('-') is -1 or startdate.length > 4
filter += 'from-' + part + '-date:' + startdate
if enddate
enddate = moment(enddate).format('YYYY-MM-DD') if typeof enddate isnt 'string' or enddate.indexOf('-') is -1 or enddate.length > 4
filter += ',until-' + part + '-date:' + enddate
return API.use.crossref.works.search qrystr, from, size, filter, searchby, order, format
API.use.crossref.works.index = (lts, searchby='indexed') ->
if not lts and last = API.http.cache 'last', 'crossref_works_imported'
# just in case it is an old reading from before I had to switch to using cursor, I was storing the last from number too
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
console.log 'Set crossref works index import from cached last date'
console.log lts, moment(lts).startOf('day').format('YYYY-MM-DD')
else
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
startday = moment(lts).startOf('day').valueOf()
dn = Date.now()
loaded = 0
updated = 0
days = 0
broken = false
try
target = API.use.crossref.works.searchby(searchby, undefined, startday, undefined, undefined, 10).total
console.log target
catch
target = 0
while not broken and startday < dn
cursor = '*' # set a new cursor on each index day query
console.log startday
days += 1
totalthisday = false
fromthisday = 0
while not broken and (totalthisday is false or fromthisday < totalthisday)
console.log loaded, fromthisday, target, searchby
console.log cursor
try
thisdays = API.use.crossref.works.searchby searchby, undefined, startday, startday, cursor, 1000, undefined, 'asc' # using same day for crossref API gets that whole day
console.log thisdays.data.length
batch = []
xtb = []
for rec in thisdays.data
if not rec.DOI
console.log rec
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
xtb.push rt
cr = API.use.crossref.works.clean rec
updated += 1 if crossref_works.get cr._id
batch.push cr
if batch.length
l = crossref_works.insert batch
if l?.records is batch.length
loaded += l.records
API.http.cache 'last', 'crossref_works_imported', startday #+ '_' + fromthisday
else
broken = true
if xtb.length
try crossref_extra.insert xtb
if totalthisday is false
totalthisday = thisdays?.total ? 0
fromthisday += 1000
cursor = thisdays.cursor if thisdays?.cursor?
catch err
console.log 'crossref index process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
startday += 86400000
API.mail.send
service: 'openaccessbutton'
from: '<EMAIL>'
to: if broken then '<EMAIL>' else '<EMAIL>'
subject: 'Crossref index check ' + (if broken then 'broken' else 'complete')
text: 'Processed ' + days + ' days up to ' + startday + ' and loaded ' + loaded + ' records of which ' + updated + ' were updates. Target was ' + target
return loaded
API.use.crossref.works.lastindex = (count) ->
try
last = API.http.cache 'last', 'crossref_works_imported'
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
catch
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
if count
res = date: moment(lts).startOf('day').format('YYYY-MM-DD')
res.timestamp = moment(lts).startOf('day').valueOf()
res[p] = API.use.crossref.works.searchby(p, undefined, res.timestamp).total for p in ['published', 'indexed', 'deposited', 'created']
return res
else
return moment(lts).startOf('day').format('YYYY-MM-DD')
API.use.crossref.works.clean = (rec) ->
rec._id = rec.DOI.replace /\//g, '_'
delete rec.reference
delete rec.relation
delete rec.abstract
for p in ['published-print','published-online','issued','deposited','indexed']
if rec[p]
if rec[p]['date-time'] and rec[p]['date-time'].split('T')[0].split('-').length is 3
rec.published ?= rec[p]['date-time'].split('T')[0]
rec.year ?= rec.published.split('-')[0] if rec.published?
pbl = ''
if rec[p]['date-parts'] and rec[p]['date-parts'].length and rec[p]['date-parts'][0] and (not rec.published or not rec[p].timestamp)
rp = rec[p]['date-parts'][0] #crossref uses year month day in a list
pbl = rp[0]
if rp.length is 1
pbl += '-01-01'
else
pbl += if rp.length > 1 then '-' + (if rp[1].toString().length is 1 then '0' else '') + rp[1] else '-01'
pbl += if rp.length > 2 then '-' + (if rp[2].toString().length is 1 then '0' else '') + rp[2] else '-01'
if not rec.published
rec.published = pbl
rec.year = pbl.split('-')[0]
if not rec[p].timestamp and pbl
rec[p].timestamp = moment(pbl,'YYYY-MM-DD').valueOf()
rec.publishedAt ?= rec[p].timestamp
for a in rec.assertion ? []
if a.label is 'OPEN ACCESS'
if a.URL and a.URL.indexOf('creativecommons') isnt -1
rec.license ?= []
rec.license.push {'URL': a.URL}
rec.is_oa = true
for l in rec.license ? []
if l.URL and l.URL.indexOf('creativecommons') isnt -1 and (not rec.licence or rec.licence.indexOf('creativecommons') is -1)
rec.licence = l.URL
rec.licence = 'cc-' + rec.licence.split('/licenses/')[1].replace(/$\//,'').replace(/\//g, '-') if rec.licence.indexOf('/licenses/') isnt -1
rec.is_oa = true
return rec
API.use.crossref.works.format = (rec, metadata={}) ->
try metadata.title = rec.title[0]
try
if rec.subtitle? and rec.subtitle.length and rec.subtitle[0].length
metadata.title += ': ' + rec.subtitle[0]
try metadata.doi = rec.DOI if rec.DOI?
try metadata.doi = rec.doi if rec.doi? # just in case
try metadata.crossref_type = rec.type
try metadata.author = rec.author if rec.author?
if metadata.author
for a in metadata.author
a.name = a.family + ' ' + a.given if not a.name? and a.family and a.given
if a.affiliation?
a.affiliation = a.affiliation[0] if _.isArray a.affiliation
a.affiliation = {name: a.affiliation} if typeof a.affiliation is 'string'
try a.affiliation.name = a.affiliation.name.replace(/\s\s+/g,' ').trim()
try metadata.journal = rec['container-title'][0]
try metadata.journal_short = rec['short-container-title'][0]
try metadata.issue = rec.issue if rec.issue?
try metadata.volume = rec.volume if rec.volume?
try metadata.page = rec.page.toString() if rec.page?
try metadata.issn = _.uniq rec.ISSN
try metadata.keyword = rec.subject if rec.subject? # is a list of strings - goes in keywords because subject was already previously used as an object
try metadata.publisher = rec.publisher if rec.publisher?
for p in ['published-print','journal-issue.published-print','issued','published-online','created','deposited']
try
if rt = rec[p] ? rec['journal-issue']?[p.replace('journal-issue.','')]
if typeof rt['date-time'] is 'string' and rt['date-time'].indexOf('T') isnt -1 and rt['date-time'].split('T')[0].split('-').length is 3
metadata.published = rt['date-time'].split('T')[0]
metadata.year = metadata.published.split('-')[0]
break
else if rt['date-parts']? and rt['date-parts'].length and _.isArray(rt['date-parts'][0]) and rt['date-parts'][0].length
rp = rt['date-parts'][0]
pbl = rp[0].toString()
if pbl.length > 2 # needs to be a year
metadata.year ?= pbl
if rp.length is 1
pbl += '-01-01'
else
m = false
d = false
if not isNaN(parseInt(rp[1])) and parseInt(rp[1]) > 12
d = rp[1].toString()
else
m = rp[1].toString()
if rp.length is 2
if d isnt false
m = rp[2].toString()
else
d = rp[2].toString()
m = if m is false then '01' else if m.length is 1 then '0' + m else m
d = if d is false then '01' else if d.length is 1 then '0' + d else d
pbl += '-' + m + '-' + d
metadata.published = pbl
break
try metadata.abstract = API.convert.html2txt(rec.abstract).replace(/\n/g,' ') if rec.abstract?
try
if rec.reference? and rec.reference.length
metadata.reference ?= []
for r in rec.reference
rf = {}
rf.doi = r.DOI if r.DOI?
rf.title = r.article-title if r.article-title?
rf.journal = r.journal-title if r.journal-title?
metadata.reference.push(rf) if not _.isEmpty rf
try
if rec.license?
for l in rec.license
if typeof l.URL is 'string' and (typeof metadata.licence isnt 'string' or (metadata.licence.indexOf('creativecommons') is -1 and l.URL.indexOf('creativecommons') isnt -1))
metadata.licence = l.URL
if l.URL.indexOf('creativecommons') isnt -1
md = 'https://doi.org/' + metadata.doi
metadata.url ?= md
metadata.url.push(md) if _.isArray(metadata.url) and md not in metadata.url
try metadata.redirect = API.service.oab.redirect md
break
return metadata
API.use.crossref.works.import = (recs) ->
if _.isArray(recs) and recs.length
return crossref_works.insert recs
else
return undefined
_xref_import = () ->
if API.settings.cluster?.ip? and API.status.ip() not in API.settings.cluster.ip and API.settings.dev
API.log 'Setting up a crossref journal import to run every week on ' + API.status.ip()
Meteor.setInterval API.use.crossref.journals.import, 604800000
API.log 'Setting up a crossref works import to run every day on ' + API.status.ip()
Meteor.setInterval (() -> API.use.crossref.works.index(undefined, 'indexed')), 86400000
Meteor.setTimeout _xref_import, 19000
API.use.crossref.status = () ->
try
res = HTTP.call 'GET', 'https://api.crossref.org/works/10.1186/1758-2946-3-47', {headers: header, timeout: API.settings.use?.crossref?.timeout ? API.settings.use?._timeout ? 4000}
return if res.statusCode is 200 and res.data.status is 'ok' then true else res.data
catch err
return err.toString()
| true |
import moment from 'moment'
import Future from 'fibers/future'
# a crossref API client
# https://github.com/CrossRef/rest-api-doc/blob/master/rest_api.md
# http://api.crossref.org/works/10.1016/j.paid.2009.02.013
# crossref now prefers some identifying headers
header = {
#'User-Agent': (API.settings.name ? 'noddy') + ' v' + (API.settings.version ? '0.0.1') + (if API.settings.dev then 'd' else '') + ' (https://cottagelabs.com; mailto:' + (API.settings.log?.to ? 'PI:EMAIL:<EMAIL>END_PI') + ')'
'User-Agent': 'OAB; mailto: PI:EMAIL:<EMAIL>END_PI'
}
API.use ?= {}
API.use.crossref = {works:{},journals:{}, publishers: {}, funders: {}}
@crossref_journal = new API.collection {index:"crossref", type:"journal"}
@crossref_works = new API.collection {index:"crossref", type:"works", devislive:true}
@crossref_extra = new API.collection {index:"crossref", type:"extra", devislive:true}
API.add 'use/crossref/works',
get: () ->
if this.queryParams.title and ct = API.use.crossref.works.title this.queryParams.title, this.queryParams.format
return ct
else if this.queryParams.doi and dt = API.use.crossref.works.doi this.queryParams.doi, this.queryParams.format
return dt
else
return crossref_works.search this.queryParams
API.add 'use/crossref/works/:doipre/:doipost',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost, this.queryParams.format
# there are DOIs that can have slashes within their second part and are valid. Possibly could have more than one slash
# and there does not seem to be a way to pass wildcards to the urlparams to match multiple / routes
# so this will do for now...
API.add 'use/crossref/works/:doipre/:doipost/:doimore',
get: () -> return API.use.crossref.works.doi this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore, this.queryParams.format
API.add 'use/crossref/works/extra', () -> return crossref_extra.search this.queryParams
API.add 'use/crossref/works/search',
get: () -> return API.use.crossref.works.search (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby', # can be published, indexed, deposited, created
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), undefined, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, undefined, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/searchby/:searchby/:startdate/:enddate',
get: () -> return API.use.crossref.works.searchby this.urlParams.searchby, (this.queryParams.q ? this.queryParams.query), this.urlParams.startdate, this.urlParams.enddate, (this.queryParams.from ? this.queryParams.offset ? this.queryParams.cursor), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter, this.queryParams.order, this.queryParams.format
API.add 'use/crossref/works/index',
get: () ->
Meteor.setTimeout (() => API.use.crossref.works.index(this.queryParams.lts, this.queryParams.by)), 1
return true
API.add 'use/crossref/works/lastindex', get: () -> return API.use.crossref.works.lastindex()
API.add 'use/crossref/works/lastindex/count', get: () -> return API.use.crossref.works.lastindex true
#API.add 'use/crossref/works/import', post: () -> return API.use.crossref.works.import this.request.body
API.add 'use/crossref/types',
get: () -> return API.use.crossref.types()
API.add 'use/crossref/journals', () -> crossref_journal.search this
API.add 'use/crossref/journals/import',
get:
roleRequired: if API.settings.dev then undefined else 'crossref.admin'
action:() ->
Meteor.setTimeout (() => API.use.crossref.journals.import()), 1
return true
API.add 'use/crossref/journals/:issn',
get: () -> return API.use.crossref.journals.issn this.urlParams.issn
API.add 'use/crossref/journals/:issn/works',
get: () -> return API.use.crossref.works.issn this.urlParams.issn, this.queryParams
API.add 'use/crossref/journals/:issn/doi',
get: () -> return API.use.crossref.journals.doi this.urlParams.issn
API.add 'use/crossref/journals/:issn/dois',
get: () -> return API.use.crossref.journals.dois this.urlParams.issn, this.queryParams.from
API.add 'use/crossref/publishers',
get: () -> return API.use.crossref.publishers.search (this.queryParams.q ? this.queryParams.query), (this.queryParams.from ? this.queryParams.offset), (this.queryParams.size ? this.queryParams.rows), this.queryParams.filter
API.add 'use/crossref/reverse',
get: () -> return API.use.crossref.reverse [this.queryParams.q ? this.queryParams.query ? this.queryParams.title], this.queryParams.score, this.queryParams.format
post: () -> return API.use.crossref.reverse this.request.body
API.add 'use/crossref/resolve', get: () -> return API.use.crossref.resolve this.queryParams.doi
API.add 'use/crossref/resolve/:doipre/:doipost', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost
API.add 'use/crossref/resolve/:doipre/:doipost/:doimore', get: () -> return API.use.crossref.resolve this.urlParams.doipre + '/' + this.urlParams.doipost + '/' + this.urlParams.doimore
API.use.crossref.types = () ->
url = 'https://api.crossref.org/types'
API.log 'Using crossref for ' + url
try
res = HTTP.call 'GET', url, {headers: header}
if res.statusCode is 200
return res.data.message.items
else
return undefined
API.use.crossref.reverse = (citations, score=85, format=false) ->
citations = [citations] if typeof citations is 'string'
url = 'https://api.crossref.org/reverse'
API.log 'Using crossref for ' + url + ' with citation ' + JSON.stringify citations
try
res = HTTP.call 'POST', url, {data:citations, headers: header}
if res.statusCode is 200
if res?.data?.message?.DOI and res.data.message.score and res.data.message.type is 'journal-article'
sc = res.data.message.score
if sc < score
ignore = ["a","an","and","are","as","at","but","be","by","do","for","if","in","is","it","or","so","the","to"]
titleparts = res.data.message.title[0].toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').split(' ')
titles = []
for f in titleparts
titles.push(f) if ignore.indexOf(f.split("'")[0]) is -1 and f.length > 0
citeparts = citations.join(' ').toLowerCase().replace(/(<([^>]+)>)/g,'').replace(/[^a-z0-9]/g,' ').replace(/ /g,' ').split(' ')
cites = []
for c in citeparts
cites.push(c) if ignore.indexOf(c.split("'")[0]) is -1 and c.length > 1
bonus = (score - sc)/titles.length + 1
found = []
for w in titles
found.push(w) if w in cites
sc += bonus * found.length if titles.length is found.length and found.join() is titles.join()
if sc >= score
if format
return API.use.crossref.works.format res.data.message
else
return { data: {doi:res.data.message.DOI, title:res.data.message.title[0], received:res.data.message.score, adjusted: sc}, original:res.data}
else
return { data: {info: 'below score', received:res.data.message.score, adjusted: sc}, original:res.data}
else
return {}
else
return { status: 'error', data: res }
catch err
return { status: 'error', error: err.toString() }
API.use.crossref.resolve = (doi) ->
doi = doi.replace('http://','').replace('https://','').replace('dx.doi.org/','').replace('doi.org/','')
cached = API.http.cache doi, 'crossref_resolve'
if cached
return cached
else
url = false
try
# TODO NOTE that the URL given by crossref doi resolver may NOT be the final resolved URL. The publisher may still redirect to a different one
resp = HTTP.call 'GET', 'https://doi.org/api/handles/' + doi, {headers: header}
for r in resp.data?.values
if r.type.toLowerCase() is 'url'
url = r.data.value
# like these weird chinese ones, which end up throwing 404 anyway, but, just in case - https://doi.org/api/handles/10.7688/j.issn.1000-1646.2014.05.20
url = new Buffer(url,'base64').toString('utf-8') if r.data.format is 'base64'
API.http.cache doi, 'crossref_resolve', url
return url
API.use.crossref.journals.issn = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
return crossref_journal.find 'ISSN.exact:"' + issn.join('" OR ISSN.exact:"') + '"'
API.use.crossref.journals.doi = (issn) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
try
return crossref_works.find('ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {include: 'DOI', sort: {publishedAt:{order:'asc'}}}).DOI
catch
return undefined
API.use.crossref.journals.dois = (issn, from=0) ->
issn = issn.split(',') if typeof issn is 'string'
issn = [issn] if typeof issn is 'string'
dois = []
crossref_works.each 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"', {from: from, include: 'DOI', sort: {publishedAt:{order:'desc'}}}, (rec) ->
dois.push rec.DOI
if dois.length >= 10000
return 'break'
return dois
API.use.crossref.journals.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/journals?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'').replace(/ AND /g,'+').replace(/ OR /g,' ').replace(/ NOT /g,'-').replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
return if res?.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.journals.import = () ->
started = Date.now()
size = 1000
total = 0
counter = 0
journals = 0
batch = []
while total is 0 or counter < total
if batch.length >= 10000
crossref_journal.insert batch
batch = []
try
crls = API.use.crossref.journals.search undefined, counter, size
total = crls.total if total is 0
for crl in crls?.data ? []
journals += 1
batch.push crl
counter += size
catch err
console.log 'crossref journals import process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
crossref_journal.insert(batch) if batch.length
crossref_journal.remove 'createdAt:<' + started
API.log 'Retrieved and imported ' + journals + ' crossref journals'
return journals
API.use.crossref.publishers.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/members?'
if qrystr and qrystr isnt 'all'
url += 'query=' + encodeURIComponent qrystr
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.funders.search = (qrystr, from, size, filter) ->
url = 'https://api.crossref.org/funders?'
if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/ /g,'+')
url += 'query=' + qry
url += '&offset=' + from if from?
url += '&rows=' + size if size?
url += '&filter=' + filter if filter?
url = url.replace('?&','?')
API.log 'Using crossref for ' + url
res = HTTP.call 'GET', url, {headers: header}
return if res.statusCode is 200 then { total: res.data.message['total-results'], data: res.data.message.items, facets: res.data.message.facets} else { status: 'error', data: res}
API.use.crossref.works.issn = (issn, q={}) ->
q = {format: true} if q is true
format = if q.format then true else false
delete q.format
issn = [issn] if typeof issn is 'string'
return crossref_works.search q, restrict: [{query_string: {query: 'ISSN.exact:"' + issn.join('" OR issn.exact:"') + '"'}}]
API.use.crossref.works.doi = (doi, format) ->
ret = crossref_works.get doi.toLowerCase().replace /\//g, '_'
if not ret?
url = 'https://api.crossref.org/works/' + doi
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200 and res.data?.message?.DOI?
rec = res.data.message
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
crossref_extra.insert rt
ret = API.use.crossref.works.clean rec
API.log 'Saved crossref work ' + ret.DOI
crossref_works.insert ret
return if not ret? then undefined else if format then API.use.crossref.works.format(ret) else ret
API.use.crossref.works.title = (title, format) ->
metadata = if typeof title is 'object' then title else {}
title = metadata.title if typeof title is 'object'
return undefined if typeof title isnt 'string'
qr = 'title.exact:"' + title + '"'
if title.indexOf(' ') isnt -1
qr += ' OR ('
f = true
for t in title.split ' '
if t.length > 2
if f is true
f = false
else
qr += ' AND '
qr += '(title:"' + t + '" OR subtitle:"' + t + '")'
qr += ')'
res = crossref_works.search qr, 20
possible = false
if res?.hits?.total
ltitle = title.toLowerCase().replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
for r in res.hits.hits
rec = r._source
rt = (if typeof rec.title is 'string' then rec.title else rec.title[0]).toLowerCase()
if rec.subtitle?
st = (if typeof rec.subtitle is 'string' then rec.subtitle else rec.subtitle[0]).toLowerCase()
rt += ' ' + st if typeof st is 'string' and st.length and st not in rt
rt = rt.replace(/['".,\/\^&\*;:!\?#\$%{}=\-\+_`~()]/g,' ').replace(/\s{2,}/g,' ').trim()
if (ltitle.indexOf(rt) isnt -1 or rt.indexOf(ltitle) isnt -1) and ltitle.length/rt.length > 0.7 and ltitle.length/rt.length < 1.3
matches = true
fr = API.use.crossref.works.format rec
for k of metadata
if k not in ['citation','title'] and typeof metadata[k] in ['string','number']
matches = not fr[k]? or typeof fr[k] not in ['string','number'] or fr[k].toLowerCase() is metadata[k].toLowerCase()
if matches
if rec.type is 'journal-article'
return if format then API.use.crossref.works.format(rec) else rec
else if possible is false or possible.type isnt 'journal-article' and rec.type is 'journal-article'
possible = rec
return if possible is false then undefined else if format then API.use.crossref.works.format(possible) else match
# from could also be a cursor value, use * to start a cursor then return the next-cursor given in the response object
# largest size is 1000 and deepest from is 10000, so anything more than that needs cursor
API.use.crossref.works.search = (qrystr, from, size, filter, sort, order='desc', format, funder, publisher, journal) ->
# max size is 1000
url = 'https://api.crossref.org'
url += '/funders/' + funder if funder # crossref funder ID
url += '/members/' + publisher if publisher # crossref publisher ID
url += '/journals/' + journal if journal # journal issn
url += '/works?'
url += 'sort=' + sort + '&order=' + order + '&' if sort?
# more specific queries can be made using:
#query.container-title Query container-title aka. publication name
#query.author Query author given and family names
#query.editor Query editor given and family names
#query.chair Query chair given and family names
#query.translator Query translator given and family names
#query.contributor Query author, editor, chair and translator given and family names
#query.bibliographic Query bibliographic information, useful for citation look up. Includes titles, authors, ISSNs and publication years
#query.affiliation Query contributor affiliations
# note there is not a "title" one - just use bibliographic. bibliographic is titles, authors, ISSNs, and publication years
# ALSO NOTE: crossref LOOKS like it uses logica + and - operators, but it doesn't. their examples use + instaed of space, but either seems to make no difference
# + or - or space, all just result in OR queries, with increasing large result sets
if typeof qrystr is 'object'
for k of qrystr
if k not in ['from','size','filter','sort','order','format','funder','publisher','journal','issn'] or (k is 'funder' and not funder?) or (k is 'publisher' and not publisher?) or (k in ['issn','journal'] and not journal?)
ky = if k in ['title','citation','issn'] then 'query.bibliographic' else if k is 'journal' then 'query.container-title' else if k in ['author','editor','chair','translator','contributor','affiliation','bibliographic'] then 'query.' + k else k
url += ky + '=' + encodeURIComponent(qrystr[k]) + '&'
else if qrystr and qrystr isnt 'all'
qry = qrystr.replace(/\w+?\:/g,'') #.replace(/ AND /g,'+').replace(/ NOT /g,'-')
#qry = if qry.indexOf(' OR ') isnt -1 then qry.replace(/ OR /g,' ') else qry.replace(/ /g,'+')
qry = qry.replace(/ /g,'+')
url += 'query=' + encodeURIComponent(qry) + '&'
if from?
if from isnt '*' and typeof from is 'string' and not from.replace(/[0-9]/g,'').length
try
fp = parseInt from
from = fp if not isNaN fp
if typeof from isnt 'number'
url += 'cursor=' + encodeURIComponent(from) + '&'
else
url += 'offset=' + from + '&'
url += 'rows=' + size + '&' if size?
url += 'filter=' + encodeURIComponent(filter) + '&'if filter? and filter isnt ''
url = url.replace('?&','?').replace(/&$/,'') # tidy any params coming immediately after the start of search query param signifier, as it makes crossref error out
API.log 'Using crossref for ' + url
try res = HTTP.call 'GET', url, {headers: header}
if res?.statusCode is 200
ri = res.data.message.items
if format
for r of ri
ri[r] = API.use.crossref.works.format ri[r]
return { total: res.data.message['total-results'], cursor: res.data.message['next-cursor'], data: ri, facets: res.data.message.facets}
else
return { status: 'error', data: res}
API.use.crossref.works.searchby = (searchby='published', qrystr, startdate, enddate, from, size, filter, order, format) ->
# can be published, indexed, deposited, created
# using ?filter=from-pub-date:2004-04-04,until-pub-date:2004-04-04 (the dates are inclusive)
part = if searchby is 'published' then 'pub' else if searchby is 'created' then 'created' else searchby.replace('ed','')
if filter? then filter += ',' else filter = ''
if startdate
startdate = moment(startdate).format('YYYY-MM-DD') if typeof startdate isnt 'string' or startdate.indexOf('-') is -1 or startdate.length > 4
filter += 'from-' + part + '-date:' + startdate
if enddate
enddate = moment(enddate).format('YYYY-MM-DD') if typeof enddate isnt 'string' or enddate.indexOf('-') is -1 or enddate.length > 4
filter += ',until-' + part + '-date:' + enddate
return API.use.crossref.works.search qrystr, from, size, filter, searchby, order, format
API.use.crossref.works.index = (lts, searchby='indexed') ->
if not lts and last = API.http.cache 'last', 'crossref_works_imported'
# just in case it is an old reading from before I had to switch to using cursor, I was storing the last from number too
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
console.log 'Set crossref works index import from cached last date'
console.log lts, moment(lts).startOf('day').format('YYYY-MM-DD')
else
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
startday = moment(lts).startOf('day').valueOf()
dn = Date.now()
loaded = 0
updated = 0
days = 0
broken = false
try
target = API.use.crossref.works.searchby(searchby, undefined, startday, undefined, undefined, 10).total
console.log target
catch
target = 0
while not broken and startday < dn
cursor = '*' # set a new cursor on each index day query
console.log startday
days += 1
totalthisday = false
fromthisday = 0
while not broken and (totalthisday is false or fromthisday < totalthisday)
console.log loaded, fromthisday, target, searchby
console.log cursor
try
thisdays = API.use.crossref.works.searchby searchby, undefined, startday, startday, cursor, 1000, undefined, 'asc' # using same day for crossref API gets that whole day
console.log thisdays.data.length
batch = []
xtb = []
for rec in thisdays.data
if not rec.DOI
console.log rec
if rec.relation? or rec.reference? or rec.abstract?
rt = _id: rec.DOI.replace(/\//g, '_'), relation: rec.relation, reference: rec.reference, abstract: rec.abstract
if ext = crossref_extra.get rt._id
upd = {}
upd.relation = rec.relation if rec.relation? and not ext.relation?
upd.reference = rec.reference if rec.reference? and not ext.reference?
upd.abstract = rec.abstract if rec.abstract? and not ext.abstract?
if typeof upd.abstract is 'string'
upd.abstract = API.convert.html2txt upd.abstract
if JSON.stringify(upd) isnt '{}'
crossref_extra.update rt._id, upd
else
xtb.push rt
cr = API.use.crossref.works.clean rec
updated += 1 if crossref_works.get cr._id
batch.push cr
if batch.length
l = crossref_works.insert batch
if l?.records is batch.length
loaded += l.records
API.http.cache 'last', 'crossref_works_imported', startday #+ '_' + fromthisday
else
broken = true
if xtb.length
try crossref_extra.insert xtb
if totalthisday is false
totalthisday = thisdays?.total ? 0
fromthisday += 1000
cursor = thisdays.cursor if thisdays?.cursor?
catch err
console.log 'crossref index process error'
try
console.log err.toString()
catch
try console.log err
future = new Future()
Meteor.setTimeout (() -> future.return()), 2000 # wait 2s on crossref downtime
future.wait()
startday += 86400000
API.mail.send
service: 'openaccessbutton'
from: 'PI:EMAIL:<EMAIL>END_PI'
to: if broken then 'PI:EMAIL:<EMAIL>END_PI' else 'PI:EMAIL:<EMAIL>END_PI'
subject: 'Crossref index check ' + (if broken then 'broken' else 'complete')
text: 'Processed ' + days + ' days up to ' + startday + ' and loaded ' + loaded + ' records of which ' + updated + ' were updates. Target was ' + target
return loaded
API.use.crossref.works.lastindex = (count) ->
try
last = API.http.cache 'last', 'crossref_works_imported'
lts = if typeof last is 'string' then parseInt(last.split('_')[0]) else last
catch
lts = 1585971669199 # the timestamp of the last article from the data dump (around 4th April 2020)
if count
res = date: moment(lts).startOf('day').format('YYYY-MM-DD')
res.timestamp = moment(lts).startOf('day').valueOf()
res[p] = API.use.crossref.works.searchby(p, undefined, res.timestamp).total for p in ['published', 'indexed', 'deposited', 'created']
return res
else
return moment(lts).startOf('day').format('YYYY-MM-DD')
API.use.crossref.works.clean = (rec) ->
rec._id = rec.DOI.replace /\//g, '_'
delete rec.reference
delete rec.relation
delete rec.abstract
for p in ['published-print','published-online','issued','deposited','indexed']
if rec[p]
if rec[p]['date-time'] and rec[p]['date-time'].split('T')[0].split('-').length is 3
rec.published ?= rec[p]['date-time'].split('T')[0]
rec.year ?= rec.published.split('-')[0] if rec.published?
pbl = ''
if rec[p]['date-parts'] and rec[p]['date-parts'].length and rec[p]['date-parts'][0] and (not rec.published or not rec[p].timestamp)
rp = rec[p]['date-parts'][0] #crossref uses year month day in a list
pbl = rp[0]
if rp.length is 1
pbl += '-01-01'
else
pbl += if rp.length > 1 then '-' + (if rp[1].toString().length is 1 then '0' else '') + rp[1] else '-01'
pbl += if rp.length > 2 then '-' + (if rp[2].toString().length is 1 then '0' else '') + rp[2] else '-01'
if not rec.published
rec.published = pbl
rec.year = pbl.split('-')[0]
if not rec[p].timestamp and pbl
rec[p].timestamp = moment(pbl,'YYYY-MM-DD').valueOf()
rec.publishedAt ?= rec[p].timestamp
for a in rec.assertion ? []
if a.label is 'OPEN ACCESS'
if a.URL and a.URL.indexOf('creativecommons') isnt -1
rec.license ?= []
rec.license.push {'URL': a.URL}
rec.is_oa = true
for l in rec.license ? []
if l.URL and l.URL.indexOf('creativecommons') isnt -1 and (not rec.licence or rec.licence.indexOf('creativecommons') is -1)
rec.licence = l.URL
rec.licence = 'cc-' + rec.licence.split('/licenses/')[1].replace(/$\//,'').replace(/\//g, '-') if rec.licence.indexOf('/licenses/') isnt -1
rec.is_oa = true
return rec
API.use.crossref.works.format = (rec, metadata={}) ->
try metadata.title = rec.title[0]
try
if rec.subtitle? and rec.subtitle.length and rec.subtitle[0].length
metadata.title += ': ' + rec.subtitle[0]
try metadata.doi = rec.DOI if rec.DOI?
try metadata.doi = rec.doi if rec.doi? # just in case
try metadata.crossref_type = rec.type
try metadata.author = rec.author if rec.author?
if metadata.author
for a in metadata.author
a.name = a.family + ' ' + a.given if not a.name? and a.family and a.given
if a.affiliation?
a.affiliation = a.affiliation[0] if _.isArray a.affiliation
a.affiliation = {name: a.affiliation} if typeof a.affiliation is 'string'
try a.affiliation.name = a.affiliation.name.replace(/\s\s+/g,' ').trim()
try metadata.journal = rec['container-title'][0]
try metadata.journal_short = rec['short-container-title'][0]
try metadata.issue = rec.issue if rec.issue?
try metadata.volume = rec.volume if rec.volume?
try metadata.page = rec.page.toString() if rec.page?
try metadata.issn = _.uniq rec.ISSN
try metadata.keyword = rec.subject if rec.subject? # is a list of strings - goes in keywords because subject was already previously used as an object
try metadata.publisher = rec.publisher if rec.publisher?
for p in ['published-print','journal-issue.published-print','issued','published-online','created','deposited']
try
if rt = rec[p] ? rec['journal-issue']?[p.replace('journal-issue.','')]
if typeof rt['date-time'] is 'string' and rt['date-time'].indexOf('T') isnt -1 and rt['date-time'].split('T')[0].split('-').length is 3
metadata.published = rt['date-time'].split('T')[0]
metadata.year = metadata.published.split('-')[0]
break
else if rt['date-parts']? and rt['date-parts'].length and _.isArray(rt['date-parts'][0]) and rt['date-parts'][0].length
rp = rt['date-parts'][0]
pbl = rp[0].toString()
if pbl.length > 2 # needs to be a year
metadata.year ?= pbl
if rp.length is 1
pbl += '-01-01'
else
m = false
d = false
if not isNaN(parseInt(rp[1])) and parseInt(rp[1]) > 12
d = rp[1].toString()
else
m = rp[1].toString()
if rp.length is 2
if d isnt false
m = rp[2].toString()
else
d = rp[2].toString()
m = if m is false then '01' else if m.length is 1 then '0' + m else m
d = if d is false then '01' else if d.length is 1 then '0' + d else d
pbl += '-' + m + '-' + d
metadata.published = pbl
break
try metadata.abstract = API.convert.html2txt(rec.abstract).replace(/\n/g,' ') if rec.abstract?
try
if rec.reference? and rec.reference.length
metadata.reference ?= []
for r in rec.reference
rf = {}
rf.doi = r.DOI if r.DOI?
rf.title = r.article-title if r.article-title?
rf.journal = r.journal-title if r.journal-title?
metadata.reference.push(rf) if not _.isEmpty rf
try
if rec.license?
for l in rec.license
if typeof l.URL is 'string' and (typeof metadata.licence isnt 'string' or (metadata.licence.indexOf('creativecommons') is -1 and l.URL.indexOf('creativecommons') isnt -1))
metadata.licence = l.URL
if l.URL.indexOf('creativecommons') isnt -1
md = 'https://doi.org/' + metadata.doi
metadata.url ?= md
metadata.url.push(md) if _.isArray(metadata.url) and md not in metadata.url
try metadata.redirect = API.service.oab.redirect md
break
return metadata
API.use.crossref.works.import = (recs) ->
if _.isArray(recs) and recs.length
return crossref_works.insert recs
else
return undefined
_xref_import = () ->
if API.settings.cluster?.ip? and API.status.ip() not in API.settings.cluster.ip and API.settings.dev
API.log 'Setting up a crossref journal import to run every week on ' + API.status.ip()
Meteor.setInterval API.use.crossref.journals.import, 604800000
API.log 'Setting up a crossref works import to run every day on ' + API.status.ip()
Meteor.setInterval (() -> API.use.crossref.works.index(undefined, 'indexed')), 86400000
Meteor.setTimeout _xref_import, 19000
API.use.crossref.status = () ->
try
res = HTTP.call 'GET', 'https://api.crossref.org/works/10.1186/1758-2946-3-47', {headers: header, timeout: API.settings.use?.crossref?.timeout ? API.settings.use?._timeout ? 4000}
return if res.statusCode is 200 and res.data.status is 'ok' then true else res.data
catch err
return err.toString()
|
[
{
"context": " \"user\": conf.user,\n \"password\": conf.password,\n \"port\": port\n })\n co",
"end": 4787,
"score": 0.9993323683738708,
"start": 4774,
"tag": "PASSWORD",
"value": "conf.password"
}
] | scripts-available/mysql.coffee | coredump/hoardd | 13 | Mysql = require 'mysql'
Fs = require 'fs'
Path = require 'path'
# To configure this plugin use the mysql.json file and put it on the scripts/ directory
# Also remember that the user must be able to use SHOW SLAVE STATUS, it needs the
# SUPER or REPLICATION CLIENT privileges, something like this:
# grant replication client on *.* to <user>;
# Metrics to get from the statuses
generalMetrics =
'rxBytes': 'Bytes_received',
'txBytes': 'Bytes_sent',
'keyRead_requests': 'Key_read_requests',
'keyReads': 'Key_reads',
'keyWrite_requests': 'Key_write_requests',
'keyWrites': 'Key_writes',
'binlogCacheUse': 'Binlog_cache_use',
'binlogCacheDiskUse': 'Binlog_cache_disk_use',
'maxUsedConnections': 'Max_used_connections',
'abortedClients': 'Aborted_clients',
'abortedConnects': 'Aborted_connects',
'threadsConnected': 'Threads_connected',
'openFiles': 'Open_files',
'openTables': 'Open_tables',
'openedTables': 'Opened_tables',
'slaveLag': 'Seconds_Behind_Master',
'fullJoins': 'Select_full_join',
'fullRangeJoins': 'Select_full_range_join',
'selectRange': 'Select_range',
'selectRange_check': 'Select_range_check',
'selectScan': 'Select_scan'
queryCache =
'queriesInCache': 'Qcache_queries_in_cache',
'cacheHits': 'Qcache_hits',
'inserts': 'Qcache_inserts',
'notCached': 'Qcache_not_cached',
'lowMemPrunes': 'Qcache_lowmem_prunes'
counters =
'questions': 'Questions'
'select': 'Com_select',
'delete': 'Com_delete',
'insert': 'Com_insert',
'update': 'Com_update',
'replace': 'Com_replace',
'deleteMulti': 'Com_delete_multi',
'insertSelect': 'Com_insert_select',
'updateMulti': 'Com_update_multi',
'replaceSelect': 'Com_replace_select'
'handlerWrite': 'Handler_write',
'handlerUpdate': 'Handler_update',
'handlerDelete': 'Handler_delete',
'handlerRead_first': 'Handler_read_first',
'handlerRead_key': 'Handler_read_key',
'handlerRead_next': 'Handler_read_next',
'handlerRead_prev': 'Handler_read_prev',
'handlerRead_rnd': 'Handler_read_rnd',
'handlerRead_rnd_next': 'Handler_read_rnd_next'
'handlerCommit': 'Handler_commit',
'handlerRollback': 'Handler_rollback',
'handlerSavepoint': 'Handler_savepoint',
'handlerSavepointRollback': 'Handler_savepoint_rollback'
innodbMetrics =
'bufferTotal_pages': 'Innodb_buffer_pool_pages_total',
'bufferFree_pages': 'Innodb_buffer_pool_pages_free',
'bufferDirty_pages': 'Innodb_buffer_pool_pages_dirty',
'bufferUsed_pages': 'Innodb_buffer_pool_pages_data',
'pageSize': 'Innodb_page_size',
'pagesCreated': 'Innodb_pages_created',
'pagesRead': 'Innodb_pages_read',
'pagesWritten': 'Innodb_pages_written',
'currentLockWaits': 'Innodb_row_lock_current_waits',
'lockWaitTimes': 'Innodb_row_lock_waits',
'rowLockTime': 'Innodb_row_lock_time',
'fileReads': 'Innodb_data_reads',
'fileWrites': 'Innodb_data_writes',
'fileFsyncs': 'Innodb_data_fsyncs',
'logWrites': 'Innodb_log_writes'
'rowsUpdated': 'Innodb_rows_updated',
'rowsRead': 'Innodb_rows_read',
'rowsDeleted': 'Innodb_rows_deleted',
'rowsInserted': 'Innodb_rows_inserted',
metricGroups =
'general': generalMetrics,
'query_cache': queryCache,
'counters': counters,
'innodb_metrics': innodbMetrics
module.exports = (server) ->
run = () ->
server.cli.debug "Running the mysql plugin"
metricPrefix = "#{server.fqdn}.mysql"
port = 3306
data = {}
greppedLines = []
# This script needs configuration
confPath = Path.join server.sPath, 'mysql.json'
configFile = Fs.readFileSync confPath, 'utf-8'
conf = JSON.parse configFile
getMetrics = (greppedLines) ->
i = 0
while i < greppedLines.length
unless greppedLines[i] is ""
port = Math.round(/:([0-9][0-9][0-9][0-9])/.exec(greppedLines[i])[1])
conn = Mysql.createClient({
"host": conf.localhost,
"user": conf.user,
"password": conf.password,
"port": port
})
conn.query 'SHOW GLOBAL STATUS', (err, res, fields) ->
if err
server.cli.error "Error on STATUS query: #{err}"
for row in res
data[row.Variable_name] = row.Value
conn.query 'SHOW SLAVE STATUS', (err, res, fields) ->
if err
server.cli.error "Error on SLAVE STATUS query: #{err}"
data[key] = value for key, value of res[0]
# Replication lag being null is bad, very bad, so negativate it here
data['Seconds_Behind_Master'] = -1 if data['Seconds_Behind_Master'] == null
conn.end()
for name, group of metricGroups
server.push_metric("#{metricPrefix}.#{port}.#{name}.#{key}",
data[stat]) for key, stat of group
i++
switch conf.multiserver
when 0
greppedLines[0] = ":" + port
getMetrics(greppedLines)
when 1
{spawn} = require 'child_process'
netstat = spawn 'netstat', ['-ntpl']
grep = spawn 'grep', ['mysqld']
netstat.stdout.on 'data', (data) ->
grep.stdin.write(data)
netstat.on 'close', (code) ->
server.cli.error "netstat process exited with code " + code if code isnt 0
grep.stdin.end()
grep.stdout.on 'data', (data) ->
greppedLines = ("" + data).split( "\n" )
getMetrics(greppedLines) | 78576 | Mysql = require 'mysql'
Fs = require 'fs'
Path = require 'path'
# To configure this plugin use the mysql.json file and put it on the scripts/ directory
# Also remember that the user must be able to use SHOW SLAVE STATUS, it needs the
# SUPER or REPLICATION CLIENT privileges, something like this:
# grant replication client on *.* to <user>;
# Metrics to get from the statuses
generalMetrics =
'rxBytes': 'Bytes_received',
'txBytes': 'Bytes_sent',
'keyRead_requests': 'Key_read_requests',
'keyReads': 'Key_reads',
'keyWrite_requests': 'Key_write_requests',
'keyWrites': 'Key_writes',
'binlogCacheUse': 'Binlog_cache_use',
'binlogCacheDiskUse': 'Binlog_cache_disk_use',
'maxUsedConnections': 'Max_used_connections',
'abortedClients': 'Aborted_clients',
'abortedConnects': 'Aborted_connects',
'threadsConnected': 'Threads_connected',
'openFiles': 'Open_files',
'openTables': 'Open_tables',
'openedTables': 'Opened_tables',
'slaveLag': 'Seconds_Behind_Master',
'fullJoins': 'Select_full_join',
'fullRangeJoins': 'Select_full_range_join',
'selectRange': 'Select_range',
'selectRange_check': 'Select_range_check',
'selectScan': 'Select_scan'
queryCache =
'queriesInCache': 'Qcache_queries_in_cache',
'cacheHits': 'Qcache_hits',
'inserts': 'Qcache_inserts',
'notCached': 'Qcache_not_cached',
'lowMemPrunes': 'Qcache_lowmem_prunes'
counters =
'questions': 'Questions'
'select': 'Com_select',
'delete': 'Com_delete',
'insert': 'Com_insert',
'update': 'Com_update',
'replace': 'Com_replace',
'deleteMulti': 'Com_delete_multi',
'insertSelect': 'Com_insert_select',
'updateMulti': 'Com_update_multi',
'replaceSelect': 'Com_replace_select'
'handlerWrite': 'Handler_write',
'handlerUpdate': 'Handler_update',
'handlerDelete': 'Handler_delete',
'handlerRead_first': 'Handler_read_first',
'handlerRead_key': 'Handler_read_key',
'handlerRead_next': 'Handler_read_next',
'handlerRead_prev': 'Handler_read_prev',
'handlerRead_rnd': 'Handler_read_rnd',
'handlerRead_rnd_next': 'Handler_read_rnd_next'
'handlerCommit': 'Handler_commit',
'handlerRollback': 'Handler_rollback',
'handlerSavepoint': 'Handler_savepoint',
'handlerSavepointRollback': 'Handler_savepoint_rollback'
innodbMetrics =
'bufferTotal_pages': 'Innodb_buffer_pool_pages_total',
'bufferFree_pages': 'Innodb_buffer_pool_pages_free',
'bufferDirty_pages': 'Innodb_buffer_pool_pages_dirty',
'bufferUsed_pages': 'Innodb_buffer_pool_pages_data',
'pageSize': 'Innodb_page_size',
'pagesCreated': 'Innodb_pages_created',
'pagesRead': 'Innodb_pages_read',
'pagesWritten': 'Innodb_pages_written',
'currentLockWaits': 'Innodb_row_lock_current_waits',
'lockWaitTimes': 'Innodb_row_lock_waits',
'rowLockTime': 'Innodb_row_lock_time',
'fileReads': 'Innodb_data_reads',
'fileWrites': 'Innodb_data_writes',
'fileFsyncs': 'Innodb_data_fsyncs',
'logWrites': 'Innodb_log_writes'
'rowsUpdated': 'Innodb_rows_updated',
'rowsRead': 'Innodb_rows_read',
'rowsDeleted': 'Innodb_rows_deleted',
'rowsInserted': 'Innodb_rows_inserted',
metricGroups =
'general': generalMetrics,
'query_cache': queryCache,
'counters': counters,
'innodb_metrics': innodbMetrics
module.exports = (server) ->
run = () ->
server.cli.debug "Running the mysql plugin"
metricPrefix = "#{server.fqdn}.mysql"
port = 3306
data = {}
greppedLines = []
# This script needs configuration
confPath = Path.join server.sPath, 'mysql.json'
configFile = Fs.readFileSync confPath, 'utf-8'
conf = JSON.parse configFile
getMetrics = (greppedLines) ->
i = 0
while i < greppedLines.length
unless greppedLines[i] is ""
port = Math.round(/:([0-9][0-9][0-9][0-9])/.exec(greppedLines[i])[1])
conn = Mysql.createClient({
"host": conf.localhost,
"user": conf.user,
"password": <PASSWORD>,
"port": port
})
conn.query 'SHOW GLOBAL STATUS', (err, res, fields) ->
if err
server.cli.error "Error on STATUS query: #{err}"
for row in res
data[row.Variable_name] = row.Value
conn.query 'SHOW SLAVE STATUS', (err, res, fields) ->
if err
server.cli.error "Error on SLAVE STATUS query: #{err}"
data[key] = value for key, value of res[0]
# Replication lag being null is bad, very bad, so negativate it here
data['Seconds_Behind_Master'] = -1 if data['Seconds_Behind_Master'] == null
conn.end()
for name, group of metricGroups
server.push_metric("#{metricPrefix}.#{port}.#{name}.#{key}",
data[stat]) for key, stat of group
i++
switch conf.multiserver
when 0
greppedLines[0] = ":" + port
getMetrics(greppedLines)
when 1
{spawn} = require 'child_process'
netstat = spawn 'netstat', ['-ntpl']
grep = spawn 'grep', ['mysqld']
netstat.stdout.on 'data', (data) ->
grep.stdin.write(data)
netstat.on 'close', (code) ->
server.cli.error "netstat process exited with code " + code if code isnt 0
grep.stdin.end()
grep.stdout.on 'data', (data) ->
greppedLines = ("" + data).split( "\n" )
getMetrics(greppedLines) | true | Mysql = require 'mysql'
Fs = require 'fs'
Path = require 'path'
# To configure this plugin use the mysql.json file and put it on the scripts/ directory
# Also remember that the user must be able to use SHOW SLAVE STATUS, it needs the
# SUPER or REPLICATION CLIENT privileges, something like this:
# grant replication client on *.* to <user>;
# Metrics to get from the statuses
generalMetrics =
'rxBytes': 'Bytes_received',
'txBytes': 'Bytes_sent',
'keyRead_requests': 'Key_read_requests',
'keyReads': 'Key_reads',
'keyWrite_requests': 'Key_write_requests',
'keyWrites': 'Key_writes',
'binlogCacheUse': 'Binlog_cache_use',
'binlogCacheDiskUse': 'Binlog_cache_disk_use',
'maxUsedConnections': 'Max_used_connections',
'abortedClients': 'Aborted_clients',
'abortedConnects': 'Aborted_connects',
'threadsConnected': 'Threads_connected',
'openFiles': 'Open_files',
'openTables': 'Open_tables',
'openedTables': 'Opened_tables',
'slaveLag': 'Seconds_Behind_Master',
'fullJoins': 'Select_full_join',
'fullRangeJoins': 'Select_full_range_join',
'selectRange': 'Select_range',
'selectRange_check': 'Select_range_check',
'selectScan': 'Select_scan'
queryCache =
'queriesInCache': 'Qcache_queries_in_cache',
'cacheHits': 'Qcache_hits',
'inserts': 'Qcache_inserts',
'notCached': 'Qcache_not_cached',
'lowMemPrunes': 'Qcache_lowmem_prunes'
counters =
'questions': 'Questions'
'select': 'Com_select',
'delete': 'Com_delete',
'insert': 'Com_insert',
'update': 'Com_update',
'replace': 'Com_replace',
'deleteMulti': 'Com_delete_multi',
'insertSelect': 'Com_insert_select',
'updateMulti': 'Com_update_multi',
'replaceSelect': 'Com_replace_select'
'handlerWrite': 'Handler_write',
'handlerUpdate': 'Handler_update',
'handlerDelete': 'Handler_delete',
'handlerRead_first': 'Handler_read_first',
'handlerRead_key': 'Handler_read_key',
'handlerRead_next': 'Handler_read_next',
'handlerRead_prev': 'Handler_read_prev',
'handlerRead_rnd': 'Handler_read_rnd',
'handlerRead_rnd_next': 'Handler_read_rnd_next'
'handlerCommit': 'Handler_commit',
'handlerRollback': 'Handler_rollback',
'handlerSavepoint': 'Handler_savepoint',
'handlerSavepointRollback': 'Handler_savepoint_rollback'
innodbMetrics =
'bufferTotal_pages': 'Innodb_buffer_pool_pages_total',
'bufferFree_pages': 'Innodb_buffer_pool_pages_free',
'bufferDirty_pages': 'Innodb_buffer_pool_pages_dirty',
'bufferUsed_pages': 'Innodb_buffer_pool_pages_data',
'pageSize': 'Innodb_page_size',
'pagesCreated': 'Innodb_pages_created',
'pagesRead': 'Innodb_pages_read',
'pagesWritten': 'Innodb_pages_written',
'currentLockWaits': 'Innodb_row_lock_current_waits',
'lockWaitTimes': 'Innodb_row_lock_waits',
'rowLockTime': 'Innodb_row_lock_time',
'fileReads': 'Innodb_data_reads',
'fileWrites': 'Innodb_data_writes',
'fileFsyncs': 'Innodb_data_fsyncs',
'logWrites': 'Innodb_log_writes'
'rowsUpdated': 'Innodb_rows_updated',
'rowsRead': 'Innodb_rows_read',
'rowsDeleted': 'Innodb_rows_deleted',
'rowsInserted': 'Innodb_rows_inserted',
metricGroups =
'general': generalMetrics,
'query_cache': queryCache,
'counters': counters,
'innodb_metrics': innodbMetrics
module.exports = (server) ->
run = () ->
server.cli.debug "Running the mysql plugin"
metricPrefix = "#{server.fqdn}.mysql"
port = 3306
data = {}
greppedLines = []
# This script needs configuration
confPath = Path.join server.sPath, 'mysql.json'
configFile = Fs.readFileSync confPath, 'utf-8'
conf = JSON.parse configFile
getMetrics = (greppedLines) ->
i = 0
while i < greppedLines.length
unless greppedLines[i] is ""
port = Math.round(/:([0-9][0-9][0-9][0-9])/.exec(greppedLines[i])[1])
conn = Mysql.createClient({
"host": conf.localhost,
"user": conf.user,
"password": PI:PASSWORD:<PASSWORD>END_PI,
"port": port
})
conn.query 'SHOW GLOBAL STATUS', (err, res, fields) ->
if err
server.cli.error "Error on STATUS query: #{err}"
for row in res
data[row.Variable_name] = row.Value
conn.query 'SHOW SLAVE STATUS', (err, res, fields) ->
if err
server.cli.error "Error on SLAVE STATUS query: #{err}"
data[key] = value for key, value of res[0]
# Replication lag being null is bad, very bad, so negativate it here
data['Seconds_Behind_Master'] = -1 if data['Seconds_Behind_Master'] == null
conn.end()
for name, group of metricGroups
server.push_metric("#{metricPrefix}.#{port}.#{name}.#{key}",
data[stat]) for key, stat of group
i++
switch conf.multiserver
when 0
greppedLines[0] = ":" + port
getMetrics(greppedLines)
when 1
{spawn} = require 'child_process'
netstat = spawn 'netstat', ['-ntpl']
grep = spawn 'grep', ['mysqld']
netstat.stdout.on 'data', (data) ->
grep.stdin.write(data)
netstat.on 'close', (code) ->
server.cli.error "netstat process exited with code " + code if code isnt 0
grep.stdin.end()
grep.stdout.on 'data', (data) ->
greppedLines = ("" + data).split( "\n" )
getMetrics(greppedLines) |
[
{
"context": "m is 'counters')\n #testKey = \"#{item.name}:#{rec.id}\"\n #\n #if (countDictTe",
"end": 858,
"score": 0.710387647151947,
"start": 858,
"tag": "KEY",
"value": ""
}
] | src/provider/foundationdb/activerecord/functions/add.coffee | frisb/formalize | 2 | fdb = require('fdb').apiVersion(200)
deepak = require('deepak')(fdb)
countDictTest = {}
module.exports = (mechanism) ->
resolveKey = (rec, key) ->
resolvedKey = []
for subkey in key
if (typeof(subkey) is 'function')
# generate value from function
data = subkey(rec)
else
# get value from record
data = rec.data(subkey)
resolvedKey.push(data)
resolvedKey
add = (tr, rec, value, callback) ->
for item in rec[mechanism].items
if (!item.filter || item.filter(rec))
# no filter or successfully filtered
directory = rec.provider.dir[mechanism][item.name]
resolvedKey = resolveKey(rec, item.key)
packedKey = directory.pack(deepak.packArrayValues(resolvedKey))
if (mechanism is 'counters')
#testKey = "#{item.name}:#{rec.id}"
#
#if (countDictTest[testKey])
#console.log(testKey)
#else
#countDictTest[testKey] = 1
#console.log(value)
tr.add(packedKey, value)
else
tr.set(packedKey, value)
return
#transactionalAdd = fdb.transactional(add)
(tr, value) ->
complete = (err) ->
throw new Error(err) if (err)
return
#transactionalAdd(tr || @provider.db, @, value, complete)
add(tr, @, value)
| 223536 | fdb = require('fdb').apiVersion(200)
deepak = require('deepak')(fdb)
countDictTest = {}
module.exports = (mechanism) ->
resolveKey = (rec, key) ->
resolvedKey = []
for subkey in key
if (typeof(subkey) is 'function')
# generate value from function
data = subkey(rec)
else
# get value from record
data = rec.data(subkey)
resolvedKey.push(data)
resolvedKey
add = (tr, rec, value, callback) ->
for item in rec[mechanism].items
if (!item.filter || item.filter(rec))
# no filter or successfully filtered
directory = rec.provider.dir[mechanism][item.name]
resolvedKey = resolveKey(rec, item.key)
packedKey = directory.pack(deepak.packArrayValues(resolvedKey))
if (mechanism is 'counters')
#testKey = "#{item.name<KEY>}:#{rec.id}"
#
#if (countDictTest[testKey])
#console.log(testKey)
#else
#countDictTest[testKey] = 1
#console.log(value)
tr.add(packedKey, value)
else
tr.set(packedKey, value)
return
#transactionalAdd = fdb.transactional(add)
(tr, value) ->
complete = (err) ->
throw new Error(err) if (err)
return
#transactionalAdd(tr || @provider.db, @, value, complete)
add(tr, @, value)
| true | fdb = require('fdb').apiVersion(200)
deepak = require('deepak')(fdb)
countDictTest = {}
module.exports = (mechanism) ->
resolveKey = (rec, key) ->
resolvedKey = []
for subkey in key
if (typeof(subkey) is 'function')
# generate value from function
data = subkey(rec)
else
# get value from record
data = rec.data(subkey)
resolvedKey.push(data)
resolvedKey
add = (tr, rec, value, callback) ->
for item in rec[mechanism].items
if (!item.filter || item.filter(rec))
# no filter or successfully filtered
directory = rec.provider.dir[mechanism][item.name]
resolvedKey = resolveKey(rec, item.key)
packedKey = directory.pack(deepak.packArrayValues(resolvedKey))
if (mechanism is 'counters')
#testKey = "#{item.namePI:KEY:<KEY>END_PI}:#{rec.id}"
#
#if (countDictTest[testKey])
#console.log(testKey)
#else
#countDictTest[testKey] = 1
#console.log(value)
tr.add(packedKey, value)
else
tr.set(packedKey, value)
return
#transactionalAdd = fdb.transactional(add)
(tr, value) ->
complete = (err) ->
throw new Error(err) if (err)
return
#transactionalAdd(tr || @provider.db, @, value, complete)
add(tr, @, value)
|
[
{
"context": "tringA, stringB, insert, remove, update);\n#\n# @see Levenshtein, Vladimir I. \"Binary codes capable of correcting d",
"end": 358,
"score": 0.944029688835144,
"start": 347,
"tag": "NAME",
"value": "Levenshtein"
},
{
"context": "gB, insert, remove, update);\n#\n# @see Levenshtein, Vladimir I. \"Binary codes capable of correcting deletions,\n#",
"end": 370,
"score": 0.9991205930709839,
"start": 360,
"tag": "NAME",
"value": "Vladimir I"
},
{
"context": "ls.\" Soviet physics doklady. Vol. 10. 1966.\n# @see Wagner, Robert A., and Michael J. Fischer. \"The string-to",
"end": 500,
"score": 0.9978234171867371,
"start": 494,
"tag": "NAME",
"value": "Wagner"
},
{
"context": "iet physics doklady. Vol. 10. 1966.\n# @see Wagner, Robert A., and Michael J. Fischer. \"The string-to-string\n# c",
"end": 510,
"score": 0.9678599238395691,
"start": 502,
"tag": "NAME",
"value": "Robert A"
},
{
"context": "lady. Vol. 10. 1966.\n# @see Wagner, Robert A., and Michael J. Fischer. \"The string-to-string\n# correction problem.\" Jou",
"end": 535,
"score": 0.9998419880867004,
"start": 517,
"tag": "NAME",
"value": "Michael J. Fischer"
}
] | src/levenshtein.coffee | schulzch/edit-distance-js | 22 | {Mapping, zero, trackedMin} = require './util'
#
# Computes the Levenshtein distance.
#
# @example
# var stringA = "abcdef";
# var stringB = "abdfgh";
# var insert = remove = function(char) { return 1; };
# var update = function(charA, charB) { return charA !== charB ? 1 : 0; };
# levenshtein(stringA, stringB, insert, remove, update);
#
# @see Levenshtein, Vladimir I. "Binary codes capable of correcting deletions,
# insertions and reversals." Soviet physics doklady. Vol. 10. 1966.
# @see Wagner, Robert A., and Michael J. Fischer. "The string-to-string
# correction problem." Journal of the ACM (JACM) 21.1 (1974): 168-173.
#
levenshtein = (stringA, stringB, insertCb, removeCb, updateCb) ->
a = stringA
b = stringB
track = zero a.length + 1, b.length + 1
dist = zero a.length + 1, b.length + 1
for i in [1..a.length] by 1
dist[i][0] = i
for j in [1..b.length] by 1
dist[0][j] = j
for i in [1..a.length] by 1
for j in [1..b.length] by 1
aC = a[i - 1]
bC = b[j - 1]
min = trackedMin(
dist[i - 1][j] + removeCb(aC),
dist[i][j - 1] + insertCb(bC),
dist[i - 1][j - 1] + updateCb(aC, bC))
track[i][j] = min.index
dist[i][j] = min.value
distance = dist[a.length][b.length]
return new Mapping a, b, distance, track, levenshteinBt
#
# Backtracks the string-to-string mapping from lower right to upper left.
#
levenshteinBt = (a, b, track) ->
i = a.length
j = b.length
mapping = []
while i > 0 and j > 0
switch track[i][j]
when 0
# Remove
mapping.push [a[i - 1], null]
--i
when 1
# Insert
mapping.push [null, b[j - 1]]
--j
when 2
# Update
mapping.push [a[i - 1], b[j - 1]]
--i
--j
else
throw new Error "Invalid operation #{track[i][j]} at (#{i}, #{j})"
# Handle epsilon letters.
if i is 0 and j isnt 0
while j > 0
mapping.push [null, b[j - 1]]
--j
if i isnt 0 and j is 0
while i > 0
mapping.push [a[i - 1], null]
--i
return mapping
module.exports = levenshtein
| 4143 | {Mapping, zero, trackedMin} = require './util'
#
# Computes the Levenshtein distance.
#
# @example
# var stringA = "abcdef";
# var stringB = "abdfgh";
# var insert = remove = function(char) { return 1; };
# var update = function(charA, charB) { return charA !== charB ? 1 : 0; };
# levenshtein(stringA, stringB, insert, remove, update);
#
# @see <NAME>, <NAME>. "Binary codes capable of correcting deletions,
# insertions and reversals." Soviet physics doklady. Vol. 10. 1966.
# @see <NAME>, <NAME>., and <NAME>. "The string-to-string
# correction problem." Journal of the ACM (JACM) 21.1 (1974): 168-173.
#
levenshtein = (stringA, stringB, insertCb, removeCb, updateCb) ->
a = stringA
b = stringB
track = zero a.length + 1, b.length + 1
dist = zero a.length + 1, b.length + 1
for i in [1..a.length] by 1
dist[i][0] = i
for j in [1..b.length] by 1
dist[0][j] = j
for i in [1..a.length] by 1
for j in [1..b.length] by 1
aC = a[i - 1]
bC = b[j - 1]
min = trackedMin(
dist[i - 1][j] + removeCb(aC),
dist[i][j - 1] + insertCb(bC),
dist[i - 1][j - 1] + updateCb(aC, bC))
track[i][j] = min.index
dist[i][j] = min.value
distance = dist[a.length][b.length]
return new Mapping a, b, distance, track, levenshteinBt
#
# Backtracks the string-to-string mapping from lower right to upper left.
#
levenshteinBt = (a, b, track) ->
i = a.length
j = b.length
mapping = []
while i > 0 and j > 0
switch track[i][j]
when 0
# Remove
mapping.push [a[i - 1], null]
--i
when 1
# Insert
mapping.push [null, b[j - 1]]
--j
when 2
# Update
mapping.push [a[i - 1], b[j - 1]]
--i
--j
else
throw new Error "Invalid operation #{track[i][j]} at (#{i}, #{j})"
# Handle epsilon letters.
if i is 0 and j isnt 0
while j > 0
mapping.push [null, b[j - 1]]
--j
if i isnt 0 and j is 0
while i > 0
mapping.push [a[i - 1], null]
--i
return mapping
module.exports = levenshtein
| true | {Mapping, zero, trackedMin} = require './util'
#
# Computes the Levenshtein distance.
#
# @example
# var stringA = "abcdef";
# var stringB = "abdfgh";
# var insert = remove = function(char) { return 1; };
# var update = function(charA, charB) { return charA !== charB ? 1 : 0; };
# levenshtein(stringA, stringB, insert, remove, update);
#
# @see PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI. "Binary codes capable of correcting deletions,
# insertions and reversals." Soviet physics doklady. Vol. 10. 1966.
# @see PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI., and PI:NAME:<NAME>END_PI. "The string-to-string
# correction problem." Journal of the ACM (JACM) 21.1 (1974): 168-173.
#
levenshtein = (stringA, stringB, insertCb, removeCb, updateCb) ->
a = stringA
b = stringB
track = zero a.length + 1, b.length + 1
dist = zero a.length + 1, b.length + 1
for i in [1..a.length] by 1
dist[i][0] = i
for j in [1..b.length] by 1
dist[0][j] = j
for i in [1..a.length] by 1
for j in [1..b.length] by 1
aC = a[i - 1]
bC = b[j - 1]
min = trackedMin(
dist[i - 1][j] + removeCb(aC),
dist[i][j - 1] + insertCb(bC),
dist[i - 1][j - 1] + updateCb(aC, bC))
track[i][j] = min.index
dist[i][j] = min.value
distance = dist[a.length][b.length]
return new Mapping a, b, distance, track, levenshteinBt
#
# Backtracks the string-to-string mapping from lower right to upper left.
#
levenshteinBt = (a, b, track) ->
i = a.length
j = b.length
mapping = []
while i > 0 and j > 0
switch track[i][j]
when 0
# Remove
mapping.push [a[i - 1], null]
--i
when 1
# Insert
mapping.push [null, b[j - 1]]
--j
when 2
# Update
mapping.push [a[i - 1], b[j - 1]]
--i
--j
else
throw new Error "Invalid operation #{track[i][j]} at (#{i}, #{j})"
# Handle epsilon letters.
if i is 0 and j isnt 0
while j > 0
mapping.push [null, b[j - 1]]
--j
if i isnt 0 and j is 0
while i > 0
mapping.push [a[i - 1], null]
--i
return mapping
module.exports = levenshtein
|
[
{
"context": "eUser\n profile:\n name: \"Kallie Emil\"\n username: \"kallie\"\n passw",
"end": 360,
"score": 0.9998605847358704,
"start": 349,
"tag": "NAME",
"value": "Kallie Emil"
},
{
"context": " name: \"Kallie Emil\"\n username: \"kallie\"\n password: \"password\"\n Roles.a",
"end": 391,
"score": 0.9997442364692688,
"start": 385,
"tag": "USERNAME",
"value": "kallie"
},
{
"context": " username: \"kallie\"\n password: \"password\"\n Roles.addUsersToRoles kallieId, [\"tech\"]",
"end": 424,
"score": 0.9995527267456055,
"start": 416,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "eUser\n profile:\n name: \"Isa Tufayl\"\n username: \"isa\"\n password",
"end": 566,
"score": 0.9998540878295898,
"start": 556,
"tag": "NAME",
"value": "Isa Tufayl"
},
{
"context": " name: \"Isa Tufayl\"\n username: \"isa\"\n password: \"password\"\n Roles.a",
"end": 594,
"score": 0.9996705055236816,
"start": 591,
"tag": "USERNAME",
"value": "isa"
},
{
"context": " username: \"isa\"\n password: \"password\"\n Roles.addUsersToRoles isaId, [\"tech\"]\n\n ",
"end": 627,
"score": 0.9995865821838379,
"start": 619,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " name: \"Admin\"\n username: \"admin\"\n password: \"password\"\n Roles.a",
"end": 813,
"score": 0.9995115399360657,
"start": 808,
"tag": "USERNAME",
"value": "admin"
},
{
"context": " username: \"admin\"\n password: \"password\"\n Roles.addUsersToRoles adminId, [\"admin\"]",
"end": 846,
"score": 0.9995728135108948,
"start": 838,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "login\n profile:\n name: \"Tech Sal\"\n username: \"techsal\"\n pass",
"end": 1012,
"score": 0.8763333559036255,
"start": 1004,
"tag": "NAME",
"value": "Tech Sal"
},
{
"context": " name: \"Tech Sal\"\n username: \"techsal\"\n password: \"password\"\n Roles.a",
"end": 1044,
"score": 0.9997428059577942,
"start": 1037,
"tag": "USERNAME",
"value": "techsal"
},
{
"context": " username: \"techsal\"\n password: \"password\"\n Roles.addUsersToRoles techsalId, [\"tech\"",
"end": 1077,
"score": 0.9995222687721252,
"start": 1069,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " kallieId = Meteor.users.findOne({\"username\":\"kallie\"})._id\n adminId = Meteor.users.findOne({\"u",
"end": 1197,
"score": 0.9997362494468689,
"start": 1191,
"tag": "USERNAME",
"value": "kallie"
},
{
"context": " adminId = Meteor.users.findOne({\"username\":\"admin\"})._id\n isaId = Meteor.users.findOne({\"use",
"end": 1262,
"score": 0.9996976852416992,
"start": 1257,
"tag": "USERNAME",
"value": "admin"
},
{
"context": " isaId = Meteor.users.findOne({\"username\":\"isa\"})._id\n techsalId = Meteor.users.findOne({",
"end": 1323,
"score": 0.9997055530548096,
"start": 1320,
"tag": "USERNAME",
"value": "isa"
},
{
"context": " techsalId = Meteor.users.findOne({\"username\":\"techsal\"})._id\n\n Meteor.methods\n \"/fixtures/loa",
"end": 1392,
"score": 0.9997328519821167,
"start": 1385,
"tag": "USERNAME",
"value": "techsal"
},
{
"context": " patients = [\n {\n firstName: \"Makena\"\n lastName: \"Asaph\"\n mrn: \"",
"end": 1852,
"score": 0.9997754096984863,
"start": 1846,
"tag": "NAME",
"value": "Makena"
},
{
"context": " firstName: \"Makena\"\n lastName: \"Asaph\"\n mrn: \"1234567890\"\n dateOf",
"end": 1882,
"score": 0.9996975660324097,
"start": 1877,
"tag": "NAME",
"value": "Asaph"
},
{
"context": "isaId\n }\n {\n firstName: \"Lunete\"\n lastName: \"Margarita\"\n mi",
"end": 2471,
"score": 0.9997973442077637,
"start": 2465,
"tag": "NAME",
"value": "Lunete"
},
{
"context": " firstName: \"Lunete\"\n lastName: \"Margarita\"\n middleName: \"P\"\n mrn: \"13",
"end": 2505,
"score": 0.9997962117195129,
"start": 2496,
"tag": "NAME",
"value": "Margarita"
},
{
"context": " lastName: \"Margarita\"\n middleName: \"P\"\n mrn: \"1357902468\"\n dateOf",
"end": 2533,
"score": 0.9997005462646484,
"start": 2532,
"tag": "NAME",
"value": "P"
},
{
"context": "521K\"\n }\n {\n firstName: \"Yasmeen\"\n lastName: \"Capucine\"\n mrn",
"end": 3142,
"score": 0.9997918605804443,
"start": 3135,
"tag": "NAME",
"value": "Yasmeen"
},
{
"context": " firstName: \"Yasmeen\"\n lastName: \"Capucine\"\n mrn: \"1234567891\"\n dateOf",
"end": 3175,
"score": 0.9997679591178894,
"start": 3167,
"tag": "NAME",
"value": "Capucine"
}
] | server/fixtures.coffee | drobbins/kassebaum | 0 | if process.env.NODE_ENV is "development"
now = new Date().getTime()
hour = 1000 * 60 * 60 # ms * sec * min
day = hour * 24
week = day * 7
year = day * 365
now = now - 10 * hour
if Meteor.users.find().count() is 0
# Create Users
kallieId = Accounts.createUser
profile:
name: "Kallie Emil"
username: "kallie"
password: "password"
Roles.addUsersToRoles kallieId, ["tech"]
isaId = Accounts.createUser
profile:
name: "Isa Tufayl"
username: "isa"
password: "password"
Roles.addUsersToRoles isaId, ["tech"]
adminId = Accounts.createUser #Create admin login
profile:
name: "Admin"
username: "admin"
password: "password"
Roles.addUsersToRoles adminId, ["admin"]
techsalId = Accounts.createUser #Create techsal login
profile:
name: "Tech Sal"
username: "techsal"
password: "password"
Roles.addUsersToRoles techsalId, ["tech"]
else
kallieId = Meteor.users.findOne({"username":"kallie"})._id
adminId = Meteor.users.findOne({"username":"admin"})._id
isaId = Meteor.users.findOne({"username":"isa"})._id
techsalId = Meteor.users.findOne({"username":"techsal"})._id
Meteor.methods
"/fixtures/loadpatients": -> Patients.insert patient for patient in patients
"/fixtures/reloadpatients": ->
Patients.remove {}
Patients.insert patient for patient in patients
"/fixtures/reloadlogs": (count) -> Logs.loadFixture(count)
"/fixtures/removetestuser": (username) -> Meteor.users.remove "username": username
patients = [
{
firstName: "Makena"
lastName: "Asaph"
mrn: "1234567890"
dateOfBirth: 702280800000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192837465"
date: now - (2 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "0912873456"
date: now - (2 * year + 6 * week + 1 * day + 4 * hour)
}
]
shortId: "abcdEF"
added: now + 7 * hour
addedBy: isaId
}
{
firstName: "Lunete"
lastName: "Margarita"
middleName: "P"
mrn: "1357902468"
dateOfBirth: -487623600000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192867465"
date: now - (0 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "6912873456"
date: now - (8 * year + 8 * week + 1 * day + 4 * hour)
}
]
shortId: "EFdsdw"
added: now
addedBy: isaId
value: "p.R521K"
}
{
firstName: "Yasmeen"
lastName: "Capucine"
mrn: "1234567891"
dateOfBirth: 171176400000
instancesOfProcurement: [
{
surgicalPathologyNumber: "1094859925"
date: now - (2 * year + 5 * week + 1 * day + 4 * hour)
}
{
otherNumber: "1993848458"
date: now - (3 * year + 1 * week + 10 * day - 3 * hour)
}
]
shortId: "cbd1rC"
added: now
addedBy: kallieId
}
]
if Patients.find().count() is 0 then Meteor.call "/fixtures/loadpatients"
| 110582 | if process.env.NODE_ENV is "development"
now = new Date().getTime()
hour = 1000 * 60 * 60 # ms * sec * min
day = hour * 24
week = day * 7
year = day * 365
now = now - 10 * hour
if Meteor.users.find().count() is 0
# Create Users
kallieId = Accounts.createUser
profile:
name: "<NAME>"
username: "kallie"
password: "<PASSWORD>"
Roles.addUsersToRoles kallieId, ["tech"]
isaId = Accounts.createUser
profile:
name: "<NAME>"
username: "isa"
password: "<PASSWORD>"
Roles.addUsersToRoles isaId, ["tech"]
adminId = Accounts.createUser #Create admin login
profile:
name: "Admin"
username: "admin"
password: "<PASSWORD>"
Roles.addUsersToRoles adminId, ["admin"]
techsalId = Accounts.createUser #Create techsal login
profile:
name: "<NAME>"
username: "techsal"
password: "<PASSWORD>"
Roles.addUsersToRoles techsalId, ["tech"]
else
kallieId = Meteor.users.findOne({"username":"kallie"})._id
adminId = Meteor.users.findOne({"username":"admin"})._id
isaId = Meteor.users.findOne({"username":"isa"})._id
techsalId = Meteor.users.findOne({"username":"techsal"})._id
Meteor.methods
"/fixtures/loadpatients": -> Patients.insert patient for patient in patients
"/fixtures/reloadpatients": ->
Patients.remove {}
Patients.insert patient for patient in patients
"/fixtures/reloadlogs": (count) -> Logs.loadFixture(count)
"/fixtures/removetestuser": (username) -> Meteor.users.remove "username": username
patients = [
{
firstName: "<NAME>"
lastName: "<NAME>"
mrn: "1234567890"
dateOfBirth: 702280800000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192837465"
date: now - (2 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "0912873456"
date: now - (2 * year + 6 * week + 1 * day + 4 * hour)
}
]
shortId: "abcdEF"
added: now + 7 * hour
addedBy: isaId
}
{
firstName: "<NAME>"
lastName: "<NAME>"
middleName: "<NAME>"
mrn: "1357902468"
dateOfBirth: -487623600000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192867465"
date: now - (0 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "6912873456"
date: now - (8 * year + 8 * week + 1 * day + 4 * hour)
}
]
shortId: "EFdsdw"
added: now
addedBy: isaId
value: "p.R521K"
}
{
firstName: "<NAME>"
lastName: "<NAME>"
mrn: "1234567891"
dateOfBirth: 171176400000
instancesOfProcurement: [
{
surgicalPathologyNumber: "1094859925"
date: now - (2 * year + 5 * week + 1 * day + 4 * hour)
}
{
otherNumber: "1993848458"
date: now - (3 * year + 1 * week + 10 * day - 3 * hour)
}
]
shortId: "cbd1rC"
added: now
addedBy: kallieId
}
]
if Patients.find().count() is 0 then Meteor.call "/fixtures/loadpatients"
| true | if process.env.NODE_ENV is "development"
now = new Date().getTime()
hour = 1000 * 60 * 60 # ms * sec * min
day = hour * 24
week = day * 7
year = day * 365
now = now - 10 * hour
if Meteor.users.find().count() is 0
# Create Users
kallieId = Accounts.createUser
profile:
name: "PI:NAME:<NAME>END_PI"
username: "kallie"
password: "PI:PASSWORD:<PASSWORD>END_PI"
Roles.addUsersToRoles kallieId, ["tech"]
isaId = Accounts.createUser
profile:
name: "PI:NAME:<NAME>END_PI"
username: "isa"
password: "PI:PASSWORD:<PASSWORD>END_PI"
Roles.addUsersToRoles isaId, ["tech"]
adminId = Accounts.createUser #Create admin login
profile:
name: "Admin"
username: "admin"
password: "PI:PASSWORD:<PASSWORD>END_PI"
Roles.addUsersToRoles adminId, ["admin"]
techsalId = Accounts.createUser #Create techsal login
profile:
name: "PI:NAME:<NAME>END_PI"
username: "techsal"
password: "PI:PASSWORD:<PASSWORD>END_PI"
Roles.addUsersToRoles techsalId, ["tech"]
else
kallieId = Meteor.users.findOne({"username":"kallie"})._id
adminId = Meteor.users.findOne({"username":"admin"})._id
isaId = Meteor.users.findOne({"username":"isa"})._id
techsalId = Meteor.users.findOne({"username":"techsal"})._id
Meteor.methods
"/fixtures/loadpatients": -> Patients.insert patient for patient in patients
"/fixtures/reloadpatients": ->
Patients.remove {}
Patients.insert patient for patient in patients
"/fixtures/reloadlogs": (count) -> Logs.loadFixture(count)
"/fixtures/removetestuser": (username) -> Meteor.users.remove "username": username
patients = [
{
firstName: "PI:NAME:<NAME>END_PI"
lastName: "PI:NAME:<NAME>END_PI"
mrn: "1234567890"
dateOfBirth: 702280800000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192837465"
date: now - (2 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "0912873456"
date: now - (2 * year + 6 * week + 1 * day + 4 * hour)
}
]
shortId: "abcdEF"
added: now + 7 * hour
addedBy: isaId
}
{
firstName: "PI:NAME:<NAME>END_PI"
lastName: "PI:NAME:<NAME>END_PI"
middleName: "PI:NAME:<NAME>END_PI"
mrn: "1357902468"
dateOfBirth: -487623600000
instancesOfProcurement: [
{
surgicalPathologyNumber: "0192867465"
date: now - (0 * year + 8 * week + 1 * day + 4 * hour)
}
{
otherNumber: "6912873456"
date: now - (8 * year + 8 * week + 1 * day + 4 * hour)
}
]
shortId: "EFdsdw"
added: now
addedBy: isaId
value: "p.R521K"
}
{
firstName: "PI:NAME:<NAME>END_PI"
lastName: "PI:NAME:<NAME>END_PI"
mrn: "1234567891"
dateOfBirth: 171176400000
instancesOfProcurement: [
{
surgicalPathologyNumber: "1094859925"
date: now - (2 * year + 5 * week + 1 * day + 4 * hour)
}
{
otherNumber: "1993848458"
date: now - (3 * year + 1 * week + 10 * day - 3 * hour)
}
]
shortId: "cbd1rC"
added: now
addedBy: kallieId
}
]
if Patients.find().count() is 0 then Meteor.call "/fixtures/loadpatients"
|
[
{
"context": "rver_name\n for key in ['listeners', 'slow_listeners', 'total_bytes_read', 'total_bytes_sent', 'title'",
"end": 2211,
"score": 0.7276904582977295,
"start": 2202,
"tag": "KEY",
"value": "listeners"
}
] | controllers/analytics/index.coffee | moul/tapas-icecast-analytics | 3 | url = require 'url'
config = require '../../config'
{Admin} = require 'icecast-admin'
lynx = require 'lynx'
metrics = new lynx config.statsd.host, config.statsd.port
metrics.increment "#{config.statsd.prefix}.run"
admins_key = []
admins = {}
tree = {}
isNumber = (n) -> return !isNaN(parseFloat(n)) && isFinite(n)
next_i = 0
updateNextAdmin = ->
metrics.increment "#{config.statsd.prefix}.updateNextAdmin"
clients = exports.tapas.io.rooms['']?.length || 0
metrics.gauge "#{config.statsd.prefix}.io.clients", clients
active = clients > 0
admin_key = admins_key[next_i++ % admins_key.length]
admin = admins[admin_key]
updateAdmin admin, active
exports.open = (app, tapas) ->
exports.tapas = tapas
tapas.io.on 'connection', (socket) ->
metrics.increment "#{config.statsd.prefix}.io.on.connection"
console.log 'new io client'
socket.on 'getTree', (fn = null) ->
metrics.increment "#{config.statsd.prefix}.io.on.getTree"
console.log 'getTree'
fn tree
for group, servers of config.icecast_servers
tree[group] = {}
for server in servers
id = url.parse(server).host
branch = tree[group][id] = {}
admins_key.push id
admin = admins[id] = new Admin
url: server
admin.id = id
admin.branch = branch
updateAdmin admin, true, true
do updateNextAdmin
inactiveLoop = 0
updateAdmin = (admin, active = true, firstTime = false) ->
if active
inactiveLoop = 0
console.log "updateAdmin #{admin.id}"
metric_name = "#{config.statsd.prefix}.icecast.#{admin.id.replace(/[^a-zA-Z0-9]/g,'-')}"
admin.stats (err, data) ->
if err
console.log "Error with #{admin.id}:", err
metrics.increment "#{metric_name}.error"
else
metrics.increment "#{metric_name}.success"
for source in data.icestats.source
server_name = source['server_name'][0]
server_name_clean = server_name.replace(/[^a-zA-Z0-9]/g,'-')
metrics.increment "#{metric_name}.source.#{server_name_clean}.success"
branch = admin.branch[server_name] =
id: admin.id
server_name: server_name
for key in ['listeners', 'slow_listeners', 'total_bytes_read', 'total_bytes_sent', 'title', 'bitrate', 'max_listeners']
branch[key] = source[key]?[0]
if isNumber branch[key]
metrics.gauge "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
#console.log "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
exports.tapas.io.sockets.emit 'updateServer', admin.branch
if not firstTime
setTimeout updateNextAdmin, config.timer
else
if not firstTime
inactiveLoop++
if inactiveLoop > config.inactiveLoopMax
updateAdmin admin, true
else
setTimeout updateNextAdmin, config.timer
exports.index = (req, res) ->
metrics.increment "#{config.statsd.prefix}.page.index"
mounts = req.query.mount?.split(',') || []
for mount in mounts
metrics.increment "#{config.statsd.prefix}.page.index_with_mount.#{mount}"
res.render 'analytics',
mounts: mounts
| 75115 | url = require 'url'
config = require '../../config'
{Admin} = require 'icecast-admin'
lynx = require 'lynx'
metrics = new lynx config.statsd.host, config.statsd.port
metrics.increment "#{config.statsd.prefix}.run"
admins_key = []
admins = {}
tree = {}
isNumber = (n) -> return !isNaN(parseFloat(n)) && isFinite(n)
next_i = 0
updateNextAdmin = ->
metrics.increment "#{config.statsd.prefix}.updateNextAdmin"
clients = exports.tapas.io.rooms['']?.length || 0
metrics.gauge "#{config.statsd.prefix}.io.clients", clients
active = clients > 0
admin_key = admins_key[next_i++ % admins_key.length]
admin = admins[admin_key]
updateAdmin admin, active
exports.open = (app, tapas) ->
exports.tapas = tapas
tapas.io.on 'connection', (socket) ->
metrics.increment "#{config.statsd.prefix}.io.on.connection"
console.log 'new io client'
socket.on 'getTree', (fn = null) ->
metrics.increment "#{config.statsd.prefix}.io.on.getTree"
console.log 'getTree'
fn tree
for group, servers of config.icecast_servers
tree[group] = {}
for server in servers
id = url.parse(server).host
branch = tree[group][id] = {}
admins_key.push id
admin = admins[id] = new Admin
url: server
admin.id = id
admin.branch = branch
updateAdmin admin, true, true
do updateNextAdmin
inactiveLoop = 0
updateAdmin = (admin, active = true, firstTime = false) ->
if active
inactiveLoop = 0
console.log "updateAdmin #{admin.id}"
metric_name = "#{config.statsd.prefix}.icecast.#{admin.id.replace(/[^a-zA-Z0-9]/g,'-')}"
admin.stats (err, data) ->
if err
console.log "Error with #{admin.id}:", err
metrics.increment "#{metric_name}.error"
else
metrics.increment "#{metric_name}.success"
for source in data.icestats.source
server_name = source['server_name'][0]
server_name_clean = server_name.replace(/[^a-zA-Z0-9]/g,'-')
metrics.increment "#{metric_name}.source.#{server_name_clean}.success"
branch = admin.branch[server_name] =
id: admin.id
server_name: server_name
for key in ['listeners', 'slow_<KEY>', 'total_bytes_read', 'total_bytes_sent', 'title', 'bitrate', 'max_listeners']
branch[key] = source[key]?[0]
if isNumber branch[key]
metrics.gauge "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
#console.log "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
exports.tapas.io.sockets.emit 'updateServer', admin.branch
if not firstTime
setTimeout updateNextAdmin, config.timer
else
if not firstTime
inactiveLoop++
if inactiveLoop > config.inactiveLoopMax
updateAdmin admin, true
else
setTimeout updateNextAdmin, config.timer
exports.index = (req, res) ->
metrics.increment "#{config.statsd.prefix}.page.index"
mounts = req.query.mount?.split(',') || []
for mount in mounts
metrics.increment "#{config.statsd.prefix}.page.index_with_mount.#{mount}"
res.render 'analytics',
mounts: mounts
| true | url = require 'url'
config = require '../../config'
{Admin} = require 'icecast-admin'
lynx = require 'lynx'
metrics = new lynx config.statsd.host, config.statsd.port
metrics.increment "#{config.statsd.prefix}.run"
admins_key = []
admins = {}
tree = {}
isNumber = (n) -> return !isNaN(parseFloat(n)) && isFinite(n)
next_i = 0
updateNextAdmin = ->
metrics.increment "#{config.statsd.prefix}.updateNextAdmin"
clients = exports.tapas.io.rooms['']?.length || 0
metrics.gauge "#{config.statsd.prefix}.io.clients", clients
active = clients > 0
admin_key = admins_key[next_i++ % admins_key.length]
admin = admins[admin_key]
updateAdmin admin, active
exports.open = (app, tapas) ->
exports.tapas = tapas
tapas.io.on 'connection', (socket) ->
metrics.increment "#{config.statsd.prefix}.io.on.connection"
console.log 'new io client'
socket.on 'getTree', (fn = null) ->
metrics.increment "#{config.statsd.prefix}.io.on.getTree"
console.log 'getTree'
fn tree
for group, servers of config.icecast_servers
tree[group] = {}
for server in servers
id = url.parse(server).host
branch = tree[group][id] = {}
admins_key.push id
admin = admins[id] = new Admin
url: server
admin.id = id
admin.branch = branch
updateAdmin admin, true, true
do updateNextAdmin
inactiveLoop = 0
updateAdmin = (admin, active = true, firstTime = false) ->
if active
inactiveLoop = 0
console.log "updateAdmin #{admin.id}"
metric_name = "#{config.statsd.prefix}.icecast.#{admin.id.replace(/[^a-zA-Z0-9]/g,'-')}"
admin.stats (err, data) ->
if err
console.log "Error with #{admin.id}:", err
metrics.increment "#{metric_name}.error"
else
metrics.increment "#{metric_name}.success"
for source in data.icestats.source
server_name = source['server_name'][0]
server_name_clean = server_name.replace(/[^a-zA-Z0-9]/g,'-')
metrics.increment "#{metric_name}.source.#{server_name_clean}.success"
branch = admin.branch[server_name] =
id: admin.id
server_name: server_name
for key in ['listeners', 'slow_PI:KEY:<KEY>END_PI', 'total_bytes_read', 'total_bytes_sent', 'title', 'bitrate', 'max_listeners']
branch[key] = source[key]?[0]
if isNumber branch[key]
metrics.gauge "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
#console.log "#{metric_name}.source.#{server_name_clean}.data.#{key}", parseFloat(branch[key])
exports.tapas.io.sockets.emit 'updateServer', admin.branch
if not firstTime
setTimeout updateNextAdmin, config.timer
else
if not firstTime
inactiveLoop++
if inactiveLoop > config.inactiveLoopMax
updateAdmin admin, true
else
setTimeout updateNextAdmin, config.timer
exports.index = (req, res) ->
metrics.increment "#{config.statsd.prefix}.page.index"
mounts = req.query.mount?.split(',') || []
for mount in mounts
metrics.increment "#{config.statsd.prefix}.page.index_with_mount.#{mount}"
res.render 'analytics',
mounts: mounts
|
[
{
"context": "e input has an existing valid time on page load.\n\nTom (tomabuct@me.com)\n\n###\n\n(($) ->\n TIME_FORMAT = \"",
"end": 284,
"score": 0.9229885935783386,
"start": 281,
"tag": "NAME",
"value": "Tom"
},
{
"context": "ut has an existing valid time on page load.\n\nTom (tomabuct@me.com)\n\n###\n\n(($) ->\n TIME_FORMAT = \"h:mm A\"\n\n $.fn.t",
"end": 301,
"score": 0.9999305009841919,
"start": 286,
"tag": "EMAIL",
"value": "tomabuct@me.com"
}
] | app/assets/javascripts/time_input.js.coffee | icanberk/Autolab | 2 | ###
Autolab Time Input 0.0.1 is a jQuery plugin that adds time validation to textfields using Moment.js
Usage:
<script src="moment.js" type="text/javascript">
<input type="text" data-provide="time">
For good UX, make sure the input has an existing valid time on page load.
Tom (tomabuct@me.com)
###
(($) ->
TIME_FORMAT = "h:mm A"
$.fn.time_input = ->
this.filter('input').focus ->
value = $(this).val()
# set value to blank, if current value isn't valid
value = "" unless is_valid value
# save value as previous_value
$(this).data('previous_value', value);
this.filter('input').change ->
value = $(this).val()
# set value to previous value, if current value isn't valid
value = $(this).data('previous_value') unless is_valid value
$(this).val formatted value
is_valid = (s) ->
moment(s, TIME_FORMAT).isValid()
formatted = (s) ->
m = moment(s, TIME_FORMAT)
if m.isValid() then m.format(TIME_FORMAT) else null
# make inputs with data-provide="time" time inputs!
$(-> $('input[data-provide="time"]').time_input())
)(jQuery)
| 212476 | ###
Autolab Time Input 0.0.1 is a jQuery plugin that adds time validation to textfields using Moment.js
Usage:
<script src="moment.js" type="text/javascript">
<input type="text" data-provide="time">
For good UX, make sure the input has an existing valid time on page load.
<NAME> (<EMAIL>)
###
(($) ->
TIME_FORMAT = "h:mm A"
$.fn.time_input = ->
this.filter('input').focus ->
value = $(this).val()
# set value to blank, if current value isn't valid
value = "" unless is_valid value
# save value as previous_value
$(this).data('previous_value', value);
this.filter('input').change ->
value = $(this).val()
# set value to previous value, if current value isn't valid
value = $(this).data('previous_value') unless is_valid value
$(this).val formatted value
is_valid = (s) ->
moment(s, TIME_FORMAT).isValid()
formatted = (s) ->
m = moment(s, TIME_FORMAT)
if m.isValid() then m.format(TIME_FORMAT) else null
# make inputs with data-provide="time" time inputs!
$(-> $('input[data-provide="time"]').time_input())
)(jQuery)
| true | ###
Autolab Time Input 0.0.1 is a jQuery plugin that adds time validation to textfields using Moment.js
Usage:
<script src="moment.js" type="text/javascript">
<input type="text" data-provide="time">
For good UX, make sure the input has an existing valid time on page load.
PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
###
(($) ->
TIME_FORMAT = "h:mm A"
$.fn.time_input = ->
this.filter('input').focus ->
value = $(this).val()
# set value to blank, if current value isn't valid
value = "" unless is_valid value
# save value as previous_value
$(this).data('previous_value', value);
this.filter('input').change ->
value = $(this).val()
# set value to previous value, if current value isn't valid
value = $(this).data('previous_value') unless is_valid value
$(this).val formatted value
is_valid = (s) ->
moment(s, TIME_FORMAT).isValid()
formatted = (s) ->
m = moment(s, TIME_FORMAT)
if m.isValid() then m.format(TIME_FORMAT) else null
# make inputs with data-provide="time" time inputs!
$(-> $('input[data-provide="time"]').time_input())
)(jQuery)
|
[
{
"context": "egory_type\":\"series\",\"chanid\":1791,\"description\":\"Alexia enlists Troy's help to find her father.\",\"endtime",
"end": 3430,
"score": 0.999707818031311,
"start": 3424,
"tag": "NAME",
"value": "Alexia"
},
{
"context": "eries\",\"chanid\":1791,\"description\":\"Alexia enlists Troy's help to find her father.\",\"endtime\":\"2013-03-04",
"end": 3443,
"score": 0.9996379017829895,
"start": 3439,
"tag": "NAME",
"value": "Troy"
}
] | lib/assets/javascripts/use_block.js.coffee | emeyekayee/scheduled_resource | 0 | class @UseBlock
constructor: -> nil
class @ZTimeHeaderDayUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
classes = 'ZTimeHeaderDayRow '
block.css_classes = classes
class @ZTimeHeaderHourUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
# classes = 'ZTimeHeaderHourRow '
# block.css_classes = classes
# Uses these fields of block: title, subtitle, category, category_type,
# Sets these fields of block: label, css_classes
class @StationUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@css_classes block
@label block
block
@label: (block) ->
block.label = block.title
block.label += ":<br> #{block.subtitle}" if block.subtitle
block.label += "<span class='flags'>#{@flags(block)}</span>"
@flags: (block) ->
flags = ''
flags += ' hd' if block.hdtv
flags += ' new' unless block.previouslyshown
flags
@css_classes: (block) ->
block.css_classes = @ct_name(block) + " " + @to_css_class(block.category)
@ct_name: (block) ->
ct = block.category_type || ''
return '' if /unknown/i.test ct
"type_#{ct.replace /[^a-z0-9\-_]+/gi, '_'}"
@to_css_class: (category, clss = null) ->
return clss if (clss = @memo[ category ])
@memo[ category ] = @css_class_search category
@css_class_search: (category) ->
for key, regex of @categories
return ('cat_' + key) if regex.test category
'cat_Unknown'
@memo: {}
@categories:
Action: /\b(action|adven)/i
Adult: /\b(adult|erot)/i
Animals: /\b(animal|tiere)/i
Art_Music: /\b(art|dance|music|cultur)/i
Business: /\b(biz|busine)/i
Children: /\b(child|infan|animation)/i
Comedy: /\b(comed|entertain|sitcom)/i
Crime_Mystery: /\b(crim|myster)/i
Documentary: /\b(doc)/i
Drama: /\b(drama)/i
Educational: /\b(edu|interests)/i
Food: /\b(food|cook|drink)/i
Game: /\b(game)/i
Health_Medical: /\b(health|medic)/i
History: /\b(hist)/i
Horror: /\b(horror)/i
HowTo: /\b(how|home|house|garden)/i
Misc: /\b(special|variety|info|collect)/i
News: /\b(news|current)/i
Reality: /\b(reality)/i
Romance: /\b(romance)/i
SciFi_Fantasy: /\b(fantasy|sci\\w*\\W*fi)/i
Science_Nature: /\b(science|nature|environm)/i
Shopping: /\b(shop)/i
Soaps: /\b(soaps)/i
Spiritual: /\b(spirit|relig)/i
Sports: /\b(sport)/i
Talk: /\b(talk)/i
Travel: /\b(travel)/i
War: /\b(war)/i
Western: /\b(west)/i
# console.log ChannelUseBlock.css_class_search('action')
# console.log ChannelUseBlock.to_css_class('action')
# block =
# category_type: 'series'
# category: 'adventure'
# block = {"airdate":0,"category":"Reality","category_type":"series","chanid":1791,"description":"Alexia enlists Troy's help to find her father.","endtime":"2013-03-04T12:00:00-08:00","first":true,"generic":false,"hdtv":false,"last":true,"listingsource":0,"manualid":0,"originalairdate":"2009-03-28","partnumber":0,"parttotal":0,"pid":"","previouslyshown":1,"programid":"EP010679340014","seriesid":"EP01067934","showtype":"Series","stars":0.0,"starttime":"2013-03-04T11:30:00-08:00","stereo":false,"subtitle":"A Daughter's Mission","subtitled":false,"syndicatedepisodenumber":"204","title":"The Locator"}
# console.log ChannelUseBlock.ct_name block
# console.log ChannelUseBlock.css_classes block
| 76951 | class @UseBlock
constructor: -> nil
class @ZTimeHeaderDayUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
classes = 'ZTimeHeaderDayRow '
block.css_classes = classes
class @ZTimeHeaderHourUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
# classes = 'ZTimeHeaderHourRow '
# block.css_classes = classes
# Uses these fields of block: title, subtitle, category, category_type,
# Sets these fields of block: label, css_classes
class @StationUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@css_classes block
@label block
block
@label: (block) ->
block.label = block.title
block.label += ":<br> #{block.subtitle}" if block.subtitle
block.label += "<span class='flags'>#{@flags(block)}</span>"
@flags: (block) ->
flags = ''
flags += ' hd' if block.hdtv
flags += ' new' unless block.previouslyshown
flags
@css_classes: (block) ->
block.css_classes = @ct_name(block) + " " + @to_css_class(block.category)
@ct_name: (block) ->
ct = block.category_type || ''
return '' if /unknown/i.test ct
"type_#{ct.replace /[^a-z0-9\-_]+/gi, '_'}"
@to_css_class: (category, clss = null) ->
return clss if (clss = @memo[ category ])
@memo[ category ] = @css_class_search category
@css_class_search: (category) ->
for key, regex of @categories
return ('cat_' + key) if regex.test category
'cat_Unknown'
@memo: {}
@categories:
Action: /\b(action|adven)/i
Adult: /\b(adult|erot)/i
Animals: /\b(animal|tiere)/i
Art_Music: /\b(art|dance|music|cultur)/i
Business: /\b(biz|busine)/i
Children: /\b(child|infan|animation)/i
Comedy: /\b(comed|entertain|sitcom)/i
Crime_Mystery: /\b(crim|myster)/i
Documentary: /\b(doc)/i
Drama: /\b(drama)/i
Educational: /\b(edu|interests)/i
Food: /\b(food|cook|drink)/i
Game: /\b(game)/i
Health_Medical: /\b(health|medic)/i
History: /\b(hist)/i
Horror: /\b(horror)/i
HowTo: /\b(how|home|house|garden)/i
Misc: /\b(special|variety|info|collect)/i
News: /\b(news|current)/i
Reality: /\b(reality)/i
Romance: /\b(romance)/i
SciFi_Fantasy: /\b(fantasy|sci\\w*\\W*fi)/i
Science_Nature: /\b(science|nature|environm)/i
Shopping: /\b(shop)/i
Soaps: /\b(soaps)/i
Spiritual: /\b(spirit|relig)/i
Sports: /\b(sport)/i
Talk: /\b(talk)/i
Travel: /\b(travel)/i
War: /\b(war)/i
Western: /\b(west)/i
# console.log ChannelUseBlock.css_class_search('action')
# console.log ChannelUseBlock.to_css_class('action')
# block =
# category_type: 'series'
# category: 'adventure'
# block = {"airdate":0,"category":"Reality","category_type":"series","chanid":1791,"description":"<NAME> enlists <NAME>'s help to find her father.","endtime":"2013-03-04T12:00:00-08:00","first":true,"generic":false,"hdtv":false,"last":true,"listingsource":0,"manualid":0,"originalairdate":"2009-03-28","partnumber":0,"parttotal":0,"pid":"","previouslyshown":1,"programid":"EP010679340014","seriesid":"EP01067934","showtype":"Series","stars":0.0,"starttime":"2013-03-04T11:30:00-08:00","stereo":false,"subtitle":"A Daughter's Mission","subtitled":false,"syndicatedepisodenumber":"204","title":"The Locator"}
# console.log ChannelUseBlock.ct_name block
# console.log ChannelUseBlock.css_classes block
| true | class @UseBlock
constructor: -> nil
class @ZTimeHeaderDayUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
classes = 'ZTimeHeaderDayRow '
block.css_classes = classes
class @ZTimeHeaderHourUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@label block
@css_classes block
block
@label: (block) ->
block.label = block.title
@css_classes: (block) ->
# Could just as well come from server.
# classes = 'ZTimeHeaderHourRow '
# block.css_classes = classes
# Uses these fields of block: title, subtitle, category, category_type,
# Sets these fields of block: label, css_classes
class @StationUseBlock extends UseBlock
constructor: -> nil
@process: (block) ->
@css_classes block
@label block
block
@label: (block) ->
block.label = block.title
block.label += ":<br> #{block.subtitle}" if block.subtitle
block.label += "<span class='flags'>#{@flags(block)}</span>"
@flags: (block) ->
flags = ''
flags += ' hd' if block.hdtv
flags += ' new' unless block.previouslyshown
flags
@css_classes: (block) ->
block.css_classes = @ct_name(block) + " " + @to_css_class(block.category)
@ct_name: (block) ->
ct = block.category_type || ''
return '' if /unknown/i.test ct
"type_#{ct.replace /[^a-z0-9\-_]+/gi, '_'}"
@to_css_class: (category, clss = null) ->
return clss if (clss = @memo[ category ])
@memo[ category ] = @css_class_search category
@css_class_search: (category) ->
for key, regex of @categories
return ('cat_' + key) if regex.test category
'cat_Unknown'
@memo: {}
@categories:
Action: /\b(action|adven)/i
Adult: /\b(adult|erot)/i
Animals: /\b(animal|tiere)/i
Art_Music: /\b(art|dance|music|cultur)/i
Business: /\b(biz|busine)/i
Children: /\b(child|infan|animation)/i
Comedy: /\b(comed|entertain|sitcom)/i
Crime_Mystery: /\b(crim|myster)/i
Documentary: /\b(doc)/i
Drama: /\b(drama)/i
Educational: /\b(edu|interests)/i
Food: /\b(food|cook|drink)/i
Game: /\b(game)/i
Health_Medical: /\b(health|medic)/i
History: /\b(hist)/i
Horror: /\b(horror)/i
HowTo: /\b(how|home|house|garden)/i
Misc: /\b(special|variety|info|collect)/i
News: /\b(news|current)/i
Reality: /\b(reality)/i
Romance: /\b(romance)/i
SciFi_Fantasy: /\b(fantasy|sci\\w*\\W*fi)/i
Science_Nature: /\b(science|nature|environm)/i
Shopping: /\b(shop)/i
Soaps: /\b(soaps)/i
Spiritual: /\b(spirit|relig)/i
Sports: /\b(sport)/i
Talk: /\b(talk)/i
Travel: /\b(travel)/i
War: /\b(war)/i
Western: /\b(west)/i
# console.log ChannelUseBlock.css_class_search('action')
# console.log ChannelUseBlock.to_css_class('action')
# block =
# category_type: 'series'
# category: 'adventure'
# block = {"airdate":0,"category":"Reality","category_type":"series","chanid":1791,"description":"PI:NAME:<NAME>END_PI enlists PI:NAME:<NAME>END_PI's help to find her father.","endtime":"2013-03-04T12:00:00-08:00","first":true,"generic":false,"hdtv":false,"last":true,"listingsource":0,"manualid":0,"originalairdate":"2009-03-28","partnumber":0,"parttotal":0,"pid":"","previouslyshown":1,"programid":"EP010679340014","seriesid":"EP01067934","showtype":"Series","stars":0.0,"starttime":"2013-03-04T11:30:00-08:00","stereo":false,"subtitle":"A Daughter's Mission","subtitled":false,"syndicatedepisodenumber":"204","title":"The Locator"}
# console.log ChannelUseBlock.ct_name block
# console.log ChannelUseBlock.css_classes block
|
[
{
"context": "oller =\n\t\t\tgetLoggedInUserId: sinon.stub().returns(@user._id)\n\t\t@UserUpdater =\n\t\t\taddEmailAddress: sinon.s",
"end": 694,
"score": 0.9955607652664185,
"start": 689,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "LoggedInUserId: sinon.stub().returns(@user._id)\n\t\t@UserUpdater =\n\t\t\taddEmailAddress: sinon.stub()\n\t\t\tremoveEmail",
"end": 714,
"score": 0.8372475504875183,
"start": 702,
"tag": "USERNAME",
"value": "@UserUpdater"
},
{
"context": "escribe 'Add', ->\n\t\tbeforeEach ->\n\t\t\t@newEmail = 'new_email@baz.com'\n\t\t\t@req.body =\n\t\t\t\temail: @newEmail\n\t\t\t\tuniversi",
"end": 1893,
"score": 0.9998655319213867,
"start": 1876,
"tag": "EMAIL",
"value": "new_email@baz.com"
},
{
"context": "escribe 'remove', ->\n\t\tbeforeEach ->\n\t\t\t@email = 'email_to_remove@bar.com'\n\t\t\t@req.body.email = @email\n\t\t\t@EmailHelper.pars",
"end": 3349,
"score": 0.9996830821037292,
"start": 3326,
"tag": "EMAIL",
"value": "email_to_remove@bar.com"
},
{
"context": "ibe 'setDefault', ->\n\t\tbeforeEach ->\n\t\t\t@email = \"email_to_set_default@bar.com\"\n\t\t\t@req.body.email = @email\n\t\t\t@EmailHelper.pars",
"end": 4070,
"score": 0.9990653991699219,
"start": 4042,
"tag": "EMAIL",
"value": "email_to_set_default@bar.com"
},
{
"context": "scribe 'endorse', ->\n\t\tbeforeEach ->\n\t\t\t@email = 'email_to_endorse@bar.com'\n\t\t\t@req.body.email = @email\n\t\t\t@EmailHelper.pars",
"end": 4831,
"score": 0.9997712969779968,
"start": 4807,
"tag": "EMAIL",
"value": "email_to_endorse@bar.com"
},
{
"context": "eturns(@res)\n\t\t\t@next = sinon.stub()\n\t\t\t@token = 'mock-token'\n\t\t\t@req.body = token: @token\n\n\t\tdescribe 'succes",
"end": 5471,
"score": 0.7393967509269714,
"start": 5461,
"tag": "PASSWORD",
"value": "mock-token"
}
] | test/unit/coffee/User/UserEmailsControllerTests.coffee | shyoshyo/web-sharelatex | 1 | sinon = require('sinon')
assertCalledWith = sinon.assert.calledWith
assertNotCalled = sinon.assert.notCalled
chai = require('chai')
should = chai.should()
assert = chai.assert
modulePath = "../../../../app/js/Features/User/UserEmailsController.js"
SandboxedModule = require('sandboxed-module')
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
Errors = require("../../../../app/js/Features/Errors/Errors")
describe "UserEmailsController", ->
beforeEach ->
@req = new MockRequest()
@user =
_id: 'mock-user-id'
@UserGetter =
getUserFullEmails: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
@UserUpdater =
addEmailAddress: sinon.stub()
removeEmailAddress: sinon.stub()
setDefaultEmailAddress: sinon.stub()
updateV1AndSetDefaultEmailAddress: sinon.stub()
@EmailHelper =
parseEmail: sinon.stub()
@endorseAffiliation = sinon.stub().yields()
@UserEmailsController = SandboxedModule.require modulePath, requires:
"../Authentication/AuthenticationController": @AuthenticationController
"./UserGetter": @UserGetter
"./UserUpdater": @UserUpdater
"../Helpers/EmailHelper": @EmailHelper
"./UserEmailsConfirmationHandler": @UserEmailsConfirmationHandler = {}
"../Institutions/InstitutionsAPI": endorseAffiliation: @endorseAffiliation
"../Errors/Errors": Errors
"logger-sharelatex":
log: -> console.log(arguments)
err: ->
describe 'List', ->
beforeEach ->
it 'lists emails', (done) ->
fullEmails = [{some: 'data'}]
@UserGetter.getUserFullEmails.callsArgWith 1, null, fullEmails
@UserEmailsController.list @req,
json: (response) =>
assert.deepEqual response, fullEmails
assertCalledWith @UserGetter.getUserFullEmails, @user._id
done()
describe 'Add', ->
beforeEach ->
@newEmail = 'new_email@baz.com'
@req.body =
email: @newEmail
university: { name: 'University Name' }
department: 'Department'
role: 'Role'
@EmailHelper.parseEmail.returns @newEmail
@UserEmailsConfirmationHandler.sendConfirmationEmail = sinon.stub().yields()
@UserUpdater.addEmailAddress.callsArgWith 3, null
it 'adds new email', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @EmailHelper.parseEmail, @newEmail
assertCalledWith @UserUpdater.addEmailAddress, @user._id, @newEmail
affiliationOptions = @UserUpdater.addEmailAddress.lastCall.args[2]
Object.keys(affiliationOptions).length.should.equal 3
affiliationOptions.university.should.equal @req.body.university
affiliationOptions.department.should.equal @req.body.department
affiliationOptions.role.should.equal @req.body.role
done()
it 'sends an email confirmation', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @UserEmailsConfirmationHandler.sendConfirmationEmail, @user._id, @newEmail
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.addEmailAddress
done()
describe 'remove', ->
beforeEach ->
@email = 'email_to_remove@bar.com'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'removes email', (done) ->
@UserUpdater.removeEmailAddress.callsArgWith 2, null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.removeEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.removeEmailAddress
done()
describe 'setDefault', ->
beforeEach ->
@email = "email_to_set_default@bar.com"
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'sets default email', (done) ->
@UserUpdater.updateV1AndSetDefaultEmailAddress.callsArgWith 2, null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.updateV1AndSetDefaultEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.setDefaultEmailAddress
done()
describe 'endorse', ->
beforeEach ->
@email = 'email_to_endorse@bar.com'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'endorses affiliation', (done) ->
@req.body.role = 'Role'
@req.body.department = 'Department'
@UserEmailsController.endorse @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @endorseAffiliation, @user._id, @email, 'Role', 'Department'
done()
describe 'confirm', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields()
@res =
sendStatus: sinon.stub()
json: sinon.stub()
@res.status = sinon.stub().returns(@res)
@next = sinon.stub()
@token = 'mock-token'
@req.body = token: @token
describe 'successfully', ->
beforeEach ->
@UserEmailsController.confirm @req, @res, @next
it 'should confirm the email from the token', ->
@UserEmailsConfirmationHandler.confirmEmailFromToken
.calledWith(@token)
.should.equal true
it 'should return a 200 status', ->
@res.sendStatus.calledWith(200).should.equal true
describe 'without a token', ->
beforeEach ->
@req.body.token = null
@UserEmailsController.confirm @req, @res, @next
it 'should return a 422 status', ->
@res.sendStatus.calledWith(422).should.equal true
describe 'when confirming fails', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields(
new Errors.NotFoundError('not found')
)
@UserEmailsController.confirm @req, @res, @next
it 'should return a 404 error code with a message', ->
@res.status.calledWith(404).should.equal true
@res.json.calledWith({
message: 'Sorry, your confirmation token is invalid or has expired. Please request a new email confirmation link.'
}).should.equal true
| 208011 | sinon = require('sinon')
assertCalledWith = sinon.assert.calledWith
assertNotCalled = sinon.assert.notCalled
chai = require('chai')
should = chai.should()
assert = chai.assert
modulePath = "../../../../app/js/Features/User/UserEmailsController.js"
SandboxedModule = require('sandboxed-module')
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
Errors = require("../../../../app/js/Features/Errors/Errors")
describe "UserEmailsController", ->
beforeEach ->
@req = new MockRequest()
@user =
_id: 'mock-user-id'
@UserGetter =
getUserFullEmails: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
@UserUpdater =
addEmailAddress: sinon.stub()
removeEmailAddress: sinon.stub()
setDefaultEmailAddress: sinon.stub()
updateV1AndSetDefaultEmailAddress: sinon.stub()
@EmailHelper =
parseEmail: sinon.stub()
@endorseAffiliation = sinon.stub().yields()
@UserEmailsController = SandboxedModule.require modulePath, requires:
"../Authentication/AuthenticationController": @AuthenticationController
"./UserGetter": @UserGetter
"./UserUpdater": @UserUpdater
"../Helpers/EmailHelper": @EmailHelper
"./UserEmailsConfirmationHandler": @UserEmailsConfirmationHandler = {}
"../Institutions/InstitutionsAPI": endorseAffiliation: @endorseAffiliation
"../Errors/Errors": Errors
"logger-sharelatex":
log: -> console.log(arguments)
err: ->
describe 'List', ->
beforeEach ->
it 'lists emails', (done) ->
fullEmails = [{some: 'data'}]
@UserGetter.getUserFullEmails.callsArgWith 1, null, fullEmails
@UserEmailsController.list @req,
json: (response) =>
assert.deepEqual response, fullEmails
assertCalledWith @UserGetter.getUserFullEmails, @user._id
done()
describe 'Add', ->
beforeEach ->
@newEmail = '<EMAIL>'
@req.body =
email: @newEmail
university: { name: 'University Name' }
department: 'Department'
role: 'Role'
@EmailHelper.parseEmail.returns @newEmail
@UserEmailsConfirmationHandler.sendConfirmationEmail = sinon.stub().yields()
@UserUpdater.addEmailAddress.callsArgWith 3, null
it 'adds new email', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @EmailHelper.parseEmail, @newEmail
assertCalledWith @UserUpdater.addEmailAddress, @user._id, @newEmail
affiliationOptions = @UserUpdater.addEmailAddress.lastCall.args[2]
Object.keys(affiliationOptions).length.should.equal 3
affiliationOptions.university.should.equal @req.body.university
affiliationOptions.department.should.equal @req.body.department
affiliationOptions.role.should.equal @req.body.role
done()
it 'sends an email confirmation', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @UserEmailsConfirmationHandler.sendConfirmationEmail, @user._id, @newEmail
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.addEmailAddress
done()
describe 'remove', ->
beforeEach ->
@email = '<EMAIL>'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'removes email', (done) ->
@UserUpdater.removeEmailAddress.callsArgWith 2, null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.removeEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.removeEmailAddress
done()
describe 'setDefault', ->
beforeEach ->
@email = "<EMAIL>"
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'sets default email', (done) ->
@UserUpdater.updateV1AndSetDefaultEmailAddress.callsArgWith 2, null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.updateV1AndSetDefaultEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.setDefaultEmailAddress
done()
describe 'endorse', ->
beforeEach ->
@email = '<EMAIL>'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'endorses affiliation', (done) ->
@req.body.role = 'Role'
@req.body.department = 'Department'
@UserEmailsController.endorse @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @endorseAffiliation, @user._id, @email, 'Role', 'Department'
done()
describe 'confirm', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields()
@res =
sendStatus: sinon.stub()
json: sinon.stub()
@res.status = sinon.stub().returns(@res)
@next = sinon.stub()
@token = '<PASSWORD>'
@req.body = token: @token
describe 'successfully', ->
beforeEach ->
@UserEmailsController.confirm @req, @res, @next
it 'should confirm the email from the token', ->
@UserEmailsConfirmationHandler.confirmEmailFromToken
.calledWith(@token)
.should.equal true
it 'should return a 200 status', ->
@res.sendStatus.calledWith(200).should.equal true
describe 'without a token', ->
beforeEach ->
@req.body.token = null
@UserEmailsController.confirm @req, @res, @next
it 'should return a 422 status', ->
@res.sendStatus.calledWith(422).should.equal true
describe 'when confirming fails', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields(
new Errors.NotFoundError('not found')
)
@UserEmailsController.confirm @req, @res, @next
it 'should return a 404 error code with a message', ->
@res.status.calledWith(404).should.equal true
@res.json.calledWith({
message: 'Sorry, your confirmation token is invalid or has expired. Please request a new email confirmation link.'
}).should.equal true
| true | sinon = require('sinon')
assertCalledWith = sinon.assert.calledWith
assertNotCalled = sinon.assert.notCalled
chai = require('chai')
should = chai.should()
assert = chai.assert
modulePath = "../../../../app/js/Features/User/UserEmailsController.js"
SandboxedModule = require('sandboxed-module')
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
Errors = require("../../../../app/js/Features/Errors/Errors")
describe "UserEmailsController", ->
beforeEach ->
@req = new MockRequest()
@user =
_id: 'mock-user-id'
@UserGetter =
getUserFullEmails: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
@UserUpdater =
addEmailAddress: sinon.stub()
removeEmailAddress: sinon.stub()
setDefaultEmailAddress: sinon.stub()
updateV1AndSetDefaultEmailAddress: sinon.stub()
@EmailHelper =
parseEmail: sinon.stub()
@endorseAffiliation = sinon.stub().yields()
@UserEmailsController = SandboxedModule.require modulePath, requires:
"../Authentication/AuthenticationController": @AuthenticationController
"./UserGetter": @UserGetter
"./UserUpdater": @UserUpdater
"../Helpers/EmailHelper": @EmailHelper
"./UserEmailsConfirmationHandler": @UserEmailsConfirmationHandler = {}
"../Institutions/InstitutionsAPI": endorseAffiliation: @endorseAffiliation
"../Errors/Errors": Errors
"logger-sharelatex":
log: -> console.log(arguments)
err: ->
describe 'List', ->
beforeEach ->
it 'lists emails', (done) ->
fullEmails = [{some: 'data'}]
@UserGetter.getUserFullEmails.callsArgWith 1, null, fullEmails
@UserEmailsController.list @req,
json: (response) =>
assert.deepEqual response, fullEmails
assertCalledWith @UserGetter.getUserFullEmails, @user._id
done()
describe 'Add', ->
beforeEach ->
@newEmail = 'PI:EMAIL:<EMAIL>END_PI'
@req.body =
email: @newEmail
university: { name: 'University Name' }
department: 'Department'
role: 'Role'
@EmailHelper.parseEmail.returns @newEmail
@UserEmailsConfirmationHandler.sendConfirmationEmail = sinon.stub().yields()
@UserUpdater.addEmailAddress.callsArgWith 3, null
it 'adds new email', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @EmailHelper.parseEmail, @newEmail
assertCalledWith @UserUpdater.addEmailAddress, @user._id, @newEmail
affiliationOptions = @UserUpdater.addEmailAddress.lastCall.args[2]
Object.keys(affiliationOptions).length.should.equal 3
affiliationOptions.university.should.equal @req.body.university
affiliationOptions.department.should.equal @req.body.department
affiliationOptions.role.should.equal @req.body.role
done()
it 'sends an email confirmation', (done) ->
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @UserEmailsConfirmationHandler.sendConfirmationEmail, @user._id, @newEmail
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.add @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.addEmailAddress
done()
describe 'remove', ->
beforeEach ->
@email = 'PI:EMAIL:<EMAIL>END_PI'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'removes email', (done) ->
@UserUpdater.removeEmailAddress.callsArgWith 2, null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.removeEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.remove @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.removeEmailAddress
done()
describe 'setDefault', ->
beforeEach ->
@email = "PI:EMAIL:<EMAIL>END_PI"
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'sets default email', (done) ->
@UserUpdater.updateV1AndSetDefaultEmailAddress.callsArgWith 2, null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 200
assertCalledWith @EmailHelper.parseEmail, @email
assertCalledWith @UserUpdater.updateV1AndSetDefaultEmailAddress, @user._id, @email
done()
it 'handles email parse error', (done) ->
@EmailHelper.parseEmail.returns null
@UserEmailsController.setDefault @req,
sendStatus: (code) =>
code.should.equal 422
assertNotCalled @UserUpdater.setDefaultEmailAddress
done()
describe 'endorse', ->
beforeEach ->
@email = 'PI:EMAIL:<EMAIL>END_PI'
@req.body.email = @email
@EmailHelper.parseEmail.returns @email
it 'endorses affiliation', (done) ->
@req.body.role = 'Role'
@req.body.department = 'Department'
@UserEmailsController.endorse @req,
sendStatus: (code) =>
code.should.equal 204
assertCalledWith @endorseAffiliation, @user._id, @email, 'Role', 'Department'
done()
describe 'confirm', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields()
@res =
sendStatus: sinon.stub()
json: sinon.stub()
@res.status = sinon.stub().returns(@res)
@next = sinon.stub()
@token = 'PI:PASSWORD:<PASSWORD>END_PI'
@req.body = token: @token
describe 'successfully', ->
beforeEach ->
@UserEmailsController.confirm @req, @res, @next
it 'should confirm the email from the token', ->
@UserEmailsConfirmationHandler.confirmEmailFromToken
.calledWith(@token)
.should.equal true
it 'should return a 200 status', ->
@res.sendStatus.calledWith(200).should.equal true
describe 'without a token', ->
beforeEach ->
@req.body.token = null
@UserEmailsController.confirm @req, @res, @next
it 'should return a 422 status', ->
@res.sendStatus.calledWith(422).should.equal true
describe 'when confirming fails', ->
beforeEach ->
@UserEmailsConfirmationHandler.confirmEmailFromToken = sinon.stub().yields(
new Errors.NotFoundError('not found')
)
@UserEmailsController.confirm @req, @res, @next
it 'should return a 404 error code with a message', ->
@res.status.calledWith(404).should.equal true
@res.json.calledWith({
message: 'Sorry, your confirmation token is invalid or has expired. Please request a new email confirmation link.'
}).should.equal true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.