entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": " adapters: [ { type: \"http_in\", url: \"http://127.0.0.1:8888\" } ]\n }\n hActor = new Actor topolo",
"end": 1864,
"score": 0.9996939897537231,
"start": 1855,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ssage\", {goodBye:'worldPOST'}, {hea... | test/hHttpAdapter.coffee | fredpottier/hubiquitus | 2 | #
# * Copyright (c) Novedia Group 2012.
# *
# * This file is part of Hubiquitus
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to deal
# * in the Software without restriction, including without limitation the rights
# * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# * of the Software, and to permit persons to whom the Software is furnished to do so,
# * subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in all copies
# * or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# *
# * You should have received a copy of the MIT License along with Hubiquitus.
# * If not, see <http://opensource.org/licenses/mit-license.php>.
#
should = require("should")
validator = require "../lib/validator"
describe "hHttpAdapter", ->
hActor = undefined
hMessage = undefined
config = require("./_config")
hResultStatus = require("../lib/codes").hResultStatus
Actor = require "../lib/actors/hactor"
describe "Http_inbound", ->
http = require "http"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ { type: "http_in", url: "http://127.0.0.1:8888" } ]
}
hActor = new Actor topology
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should receive the http POST request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldPOST'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/'
method: 'POST'
req = http.request options, (res) ->
req.write myHMessage
req.end()
it "should receive the http GET request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldGET'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/hmessage='+myHMessage.replace(/\s/g, '');
http.get options, (res) ->
describe "Http_outbound", ->
http = require "http"
qs = require "querystring"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ {type: "http_out", url: "127.0.0.1", targetActorAid :"urn:localhost:httpOutMochaTest" ,path: "/" ,port: 8989 } ]
}
hActor = new Actor(topology)
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should send http request", (done) ->
server = http.createServer (req, res) =>
body = undefined
req.on "data", (data) ->
body = data
req.on "end", =>
result = validator.validateHMessage JSON.parse( body.toString('utf8') )
unless result.valid
console.log "hMessage not conform : " + JSON.stringify(result)
#@owner.log "hMessage not conform : " + JSON.stringify(result)
else
done()
server.listen 8989, "127.0.0.1"
hActor.send hActor.buildMessage("urn:localhost:httpOutMochaTest", "hHttpMessage", {hello:"world"})
| 162247 | #
# * Copyright (c) Novedia Group 2012.
# *
# * This file is part of Hubiquitus
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to deal
# * in the Software without restriction, including without limitation the rights
# * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# * of the Software, and to permit persons to whom the Software is furnished to do so,
# * subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in all copies
# * or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# *
# * You should have received a copy of the MIT License along with Hubiquitus.
# * If not, see <http://opensource.org/licenses/mit-license.php>.
#
should = require("should")
validator = require "../lib/validator"
describe "hHttpAdapter", ->
hActor = undefined
hMessage = undefined
config = require("./_config")
hResultStatus = require("../lib/codes").hResultStatus
Actor = require "../lib/actors/hactor"
describe "Http_inbound", ->
http = require "http"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ { type: "http_in", url: "http://127.0.0.1:8888" } ]
}
hActor = new Actor topology
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should receive the http POST request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldPOST'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/'
method: 'POST'
req = http.request options, (res) ->
req.write myHMessage
req.end()
it "should receive the http GET request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldGET'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/hmessage='+myHMessage.replace(/\s/g, '');
http.get options, (res) ->
describe "Http_outbound", ->
http = require "http"
qs = require "querystring"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ {type: "http_out", url: "127.0.0.1", targetActorAid :"urn:localhost:httpOutMochaTest" ,path: "/" ,port: 8989 } ]
}
hActor = new Actor(topology)
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should send http request", (done) ->
server = http.createServer (req, res) =>
body = undefined
req.on "data", (data) ->
body = data
req.on "end", =>
result = validator.validateHMessage JSON.parse( body.toString('utf8') )
unless result.valid
console.log "hMessage not conform : " + JSON.stringify(result)
<EMAIL> "hMessage not conform : " + JSON.stringify(result)
else
done()
server.listen 8989, "127.0.0.1"
hActor.send hActor.buildMessage("urn:localhost:httpOutMochaTest", "hHttpMessage", {hello:"world"})
| true | #
# * Copyright (c) Novedia Group 2012.
# *
# * This file is part of Hubiquitus
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to deal
# * in the Software without restriction, including without limitation the rights
# * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# * of the Software, and to permit persons to whom the Software is furnished to do so,
# * subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in all copies
# * or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# *
# * You should have received a copy of the MIT License along with Hubiquitus.
# * If not, see <http://opensource.org/licenses/mit-license.php>.
#
should = require("should")
validator = require "../lib/validator"
describe "hHttpAdapter", ->
hActor = undefined
hMessage = undefined
config = require("./_config")
hResultStatus = require("../lib/codes").hResultStatus
Actor = require "../lib/actors/hactor"
describe "Http_inbound", ->
http = require "http"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ { type: "http_in", url: "http://127.0.0.1:8888" } ]
}
hActor = new Actor topology
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should receive the http POST request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldPOST'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/'
method: 'POST'
req = http.request options, (res) ->
req.write myHMessage
req.end()
it "should receive the http GET request", (done) ->
myHMessage = JSON.stringify(hActor.buildMessage(hActor.actor, "hHttpMessage", {goodBye:'worldGET'}, {headers:{ 'host: "127.0.0.1:8888",
connection: "keep-alive",
transfer-encoding: "chunked"' }}))
hActor.onMessage = (hMessage) ->
hMessage.type.should.be.equal('hHttpMessage')
hMessage.should.have.property("headers").and.be.an.instanceOf(Object)
hMessage.should.have.property("payload").and.be.an.instanceOf(Object)
done()
options =
hostname: "127.0.0.1"
port: 8888
path: '/hmessage='+myHMessage.replace(/\s/g, '');
http.get options, (res) ->
describe "Http_outbound", ->
http = require "http"
qs = require "querystring"
before () ->
topology = {
actor: "urn:localhost:actor",
type: "hactor",
properties: {},
adapters: [ {type: "http_out", url: "127.0.0.1", targetActorAid :"urn:localhost:httpOutMochaTest" ,path: "/" ,port: 8989 } ]
}
hActor = new Actor(topology)
hMessage = hActor.h_buildSignal(hActor.actor, "start", {})
hMessage.sent = new Date().getTime()
hActor.h_onMessageInternal(hMessage)
after () ->
hActor.h_tearDown()
hActor = null
it "should send http request", (done) ->
server = http.createServer (req, res) =>
body = undefined
req.on "data", (data) ->
body = data
req.on "end", =>
result = validator.validateHMessage JSON.parse( body.toString('utf8') )
unless result.valid
console.log "hMessage not conform : " + JSON.stringify(result)
PI:EMAIL:<EMAIL>END_PI "hMessage not conform : " + JSON.stringify(result)
else
done()
server.listen 8989, "127.0.0.1"
hActor.send hActor.buildMessage("urn:localhost:httpOutMochaTest", "hHttpMessage", {hello:"world"})
|
[
{
"context": " in context of object', ->\n @object.first = 'Jonas'\n @object.last = 'Nicklas'\n definePrope",
"end": 2420,
"score": 0.9997893571853638,
"start": 2415,
"tag": "NAME",
"value": "Jonas"
},
{
"context": " @object.first = 'Jonas'\n @object.last = 'Ni... | test/property.spec.coffee | varvet/serenade.js | 5 | require './spec_helper'
{extend} = Build
{defineProperty} = Serenade
describe 'Serenade.defineProperty', ->
beforeEach ->
@object = {}
it 'does not bleed over between objects with same prototype', ->
@inst1 = Object.create(@object)
@inst2 = Object.create(@object)
defineProperty @object, 'name', serialize: true
defineProperty @inst1, 'age', serialize: true
defineProperty @inst2, 'height', serialize: true
expect(Object.keys(@inst1)).to.include('age')
expect(Object.keys(@inst2)).not.to.include('age')
it 'can be redefined', ->
defineProperty @object, 'name', get: -> "foo"
defineProperty @object, 'name', get: -> "bar"
expect(@object.name).to.eql("bar")
it 'is listed as an own property', ->
defineProperty @object, 'name'
expect(Object.keys(@object)).to.eql(["name"])
expect(prop for prop of @object).to.eql(["name"])
@child = Object.create(@object)
expect(Object.keys(@child)).to.eql([])
it 'adopts own property status when redefined', ->
defineProperty @object, 'name'
@child = Object.create(@object)
@child.name = "bar"
expect(Object.keys(@child)).to.eql(["name"])
describe '#set', ->
beforeEach ->
defineProperty @object, ("foo")
it 'sets that property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'triggers a change event if it is defined', ->
Serenade.defineEvent(@object, "changed")
expect(=> @object.foo = 23).to.triggerEvent(@object.changed)
it 'triggers a change event for this property', ->
expect(=> @object.foo = 23).to.triggerEvent(@object.foo_property, with: [undefined, 23])
expect(=> @object.foo = 32).to.triggerEvent(@object.foo_property, with: [23, 32])
it 'uses a custom setter', ->
setValue = null
defineProperty @object, 'foo', set: (value) -> setValue = value
@object.foo = 42
expect(setValue).to.eql(42)
it 'consumes assigned functions and makes them getters', ->
defineProperty @object, 'foo'
@object.foo = -> 42
expect(@object.foo).to.eql(42)
describe '#get', ->
it 'reads an existing property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'uses a custom getter', ->
defineProperty @object, 'foo', get: -> 42
expect(@object.foo).to.eql(42)
it 'runs custom getter in context of object', ->
@object.first = 'Jonas'
@object.last = 'Nicklas'
defineProperty @object, 'fullName', get: -> [@first, @last].join(' ')
expect(@object.fullName).to.eql('Jonas Nicklas')
describe '#format', ->
it 'defaults to value', ->
defineProperty @object, 'foo', value: 42
expect(@object.foo_property.format()).to.eql(42)
it 'uses a custom formatter', ->
defineProperty @object, 'foo', value: 12, format: (val) -> val + "px"
expect(@object.foo_property.format()).to.eql("12px")
it 'runs formatter in object context', ->
@object.unit = "em"
defineProperty @object, 'foo', value: 12, format: (val) -> val + @unit
expect(@object.foo_property.format()).to.eql("12em")
describe 'enumerable', ->
it 'defaults to true', ->
defineProperty @object, 'foo'
expect(Object.keys(@object)).to.include('foo')
it 'can be set to false', ->
defineProperty @object, 'foo', enumerable: false
expect(Object.keys(@object)).not.to.include('foo')
it 'can be set to true', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.include('foo')
it 'adds no other enumerable properties', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.eql(['foo'])
describe 'serialize', ->
it 'will setup a setter method for that name', ->
defineProperty @object, 'fooBar', serialize: 'foo_bar'
@object.foo_bar = 56
expect(@object.foo_bar).to.eql(56)
expect(@object.fooBar).to.eql(56)
describe "with `value` option", ->
it 'can be given a value', ->
defineProperty @object, 'name', value: "Jonas"
expect(@object.name).to.eql("Jonas")
it 'can set up default value', ->
defineProperty @object, 'name', value: "foobar"
expect(@object.name).to.eql("foobar")
@object.name = "baz"
expect(@object.name).to.eql("baz")
@object.name = undefined
expect(@object.name).to.eql(undefined)
it 'can set up falsy default values', ->
defineProperty @object, 'name', value: null
expect(@object.name).to.equal(null)
it 'ignores default when custom getter given', ->
defineProperty @object, 'name', value: "bar", get: -> "foo"
expect(@object.name).to.eql("foo")
describe "with `cache` option", ->
it "returns values from cache", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "Jonas"
expect(@object.name).to.eql("Jonas")
expect(@object.name).to.eql("Jonas")
expect(hitCount).to.eql(1)
it "does not make cache enumerable", ->
defineProperty @object, "name", cache: true, get: -> "Jonas"
expect(@object.name).to.eql("Jonas")
expect(Object.keys(@object)).to.eql(["name"])
it "resets cache when change event triggered", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "Jonas"
expect(@object.name).to.eql("Jonas")
expect(@object.name).to.eql("Jonas")
expect(hitCount).to.eql(1)
@object.name_property.trigger()
expect(@object.name).to.eql("Jonas")
expect(@object.name).to.eql("Jonas")
expect(hitCount).to.eql(2)
it "resets cache before attached events are fired", ->
@object.__hitCount = 0
defineProperty @object, "hitCount", cache: true, get: -> ++@__hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.hitCount_property.trigger()
expect(@object.result).to.eql(2)
it "resets cache before attached global events are fired", ->
defineProperty @object, "foo", value: { __hitCount: 0 }
defineProperty @object.foo, "hitCount", cache: true, get: -> ++@__hitCount
defineProperty @object, "hitCount", dependsOn: "foo.hitCount", get: -> @foo.hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.foo.hitCount_property.trigger()
expect(@object.result).to.eql(2)
describe "with `changed` option", ->
it "triggers a change event if value of property has changed if option not given", ->
defineProperty @object, "name"
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "jonas").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "kim").to.triggerEvent(@object.name_property)
it "triggers a change event if changed option evaluates to true", ->
defineProperty @object, "name", value: "jonas", changed: (oldVal, newVal) -> oldVal is newVal
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "kim").not.to.triggerEvent(@object.name_property)
it "always triggers a change event the first time a property is changed when a function is given since we don't know the initial value", ->
defineProperty @object, "name", changed: -> false
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "kim").not.to.triggerEvent(@object.name_property)
it "does not trigger dependencies when not changed", ->
defineProperty @object, "name", changed: (oldVal, newVal) -> oldVal isnt newVal
defineProperty @object, "bigName", dependsOn: "name", get: -> @name?.toUpperCase()
expect(=> @object.name = "jonas").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "jonas").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "kim").to.triggerEvent(@object.bigName_property)
it "always triggers a change event when mutable object is assigned", ->
obj = {}
defineProperty @object, "name", value: obj
expect(=> @object.name = {}).to.triggerEvent(@object.name_property)
it "does not trigger when computed property has not changed", ->
defineProperty @object, "name"
defineProperty @object, "bigName",
dependsOn: "name"
get: -> @name?.toUpperCase()
changed: (oldVal, newVal) -> oldVal isnt newVal
expect(=> @object.name = "jonas").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "jonas").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "kim").to.triggerEvent(@object.bigName_property)
it "never triggers a change event when option is false", ->
defineProperty @object, "name", changed: false
expect(=> @object.name = "jonas").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "jonas").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "kim").not.to.triggerEvent(@object.name_property)
it "always triggers a change event when option is true", ->
defineProperty @object, "name", changed: true
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "jonas").to.triggerEvent(@object.name_property)
expect(=> @object.name = "kim").to.triggerEvent(@object.name_property)
describe "with `async` option", ->
it "dispatches a change event for this property asynchronously", (done) ->
defineProperty @object, "foo", async: true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "optimizes multiple change events for a property into one", (done) ->
@object.num = 0
defineProperty @object, "foo", value: 12, async: true
@object.foo_property.resolve()
@object.foo_property.bind (before, after) -> @result = "#{before}:#{after}"
@object.foo = 23
@object.foo = 15
@object.foo = 45
expect(=> @object.result).to.become("12:45", done)
describe "when Serenade.async is true", ->
it "dispatches change event asynchronously", (done) ->
defineProperty @object, "foo"
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "stays asynchronous when async option is true", (done) ->
defineProperty @object, "foo", async: true
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "can be made synchronous", ->
defineProperty @object, "foo", async: false
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).to.be.ok
| 180652 | require './spec_helper'
{extend} = Build
{defineProperty} = Serenade
describe 'Serenade.defineProperty', ->
beforeEach ->
@object = {}
it 'does not bleed over between objects with same prototype', ->
@inst1 = Object.create(@object)
@inst2 = Object.create(@object)
defineProperty @object, 'name', serialize: true
defineProperty @inst1, 'age', serialize: true
defineProperty @inst2, 'height', serialize: true
expect(Object.keys(@inst1)).to.include('age')
expect(Object.keys(@inst2)).not.to.include('age')
it 'can be redefined', ->
defineProperty @object, 'name', get: -> "foo"
defineProperty @object, 'name', get: -> "bar"
expect(@object.name).to.eql("bar")
it 'is listed as an own property', ->
defineProperty @object, 'name'
expect(Object.keys(@object)).to.eql(["name"])
expect(prop for prop of @object).to.eql(["name"])
@child = Object.create(@object)
expect(Object.keys(@child)).to.eql([])
it 'adopts own property status when redefined', ->
defineProperty @object, 'name'
@child = Object.create(@object)
@child.name = "bar"
expect(Object.keys(@child)).to.eql(["name"])
describe '#set', ->
beforeEach ->
defineProperty @object, ("foo")
it 'sets that property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'triggers a change event if it is defined', ->
Serenade.defineEvent(@object, "changed")
expect(=> @object.foo = 23).to.triggerEvent(@object.changed)
it 'triggers a change event for this property', ->
expect(=> @object.foo = 23).to.triggerEvent(@object.foo_property, with: [undefined, 23])
expect(=> @object.foo = 32).to.triggerEvent(@object.foo_property, with: [23, 32])
it 'uses a custom setter', ->
setValue = null
defineProperty @object, 'foo', set: (value) -> setValue = value
@object.foo = 42
expect(setValue).to.eql(42)
it 'consumes assigned functions and makes them getters', ->
defineProperty @object, 'foo'
@object.foo = -> 42
expect(@object.foo).to.eql(42)
describe '#get', ->
it 'reads an existing property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'uses a custom getter', ->
defineProperty @object, 'foo', get: -> 42
expect(@object.foo).to.eql(42)
it 'runs custom getter in context of object', ->
@object.first = '<NAME>'
@object.last = '<NAME>'
defineProperty @object, 'fullName', get: -> [@first, @last].join(' ')
expect(@object.fullName).to.eql('<NAME>')
describe '#format', ->
it 'defaults to value', ->
defineProperty @object, 'foo', value: 42
expect(@object.foo_property.format()).to.eql(42)
it 'uses a custom formatter', ->
defineProperty @object, 'foo', value: 12, format: (val) -> val + "px"
expect(@object.foo_property.format()).to.eql("12px")
it 'runs formatter in object context', ->
@object.unit = "em"
defineProperty @object, 'foo', value: 12, format: (val) -> val + @unit
expect(@object.foo_property.format()).to.eql("12em")
describe 'enumerable', ->
it 'defaults to true', ->
defineProperty @object, 'foo'
expect(Object.keys(@object)).to.include('foo')
it 'can be set to false', ->
defineProperty @object, 'foo', enumerable: false
expect(Object.keys(@object)).not.to.include('foo')
it 'can be set to true', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.include('foo')
it 'adds no other enumerable properties', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.eql(['foo'])
describe 'serialize', ->
it 'will setup a setter method for that name', ->
defineProperty @object, 'fooBar', serialize: 'foo_bar'
@object.foo_bar = 56
expect(@object.foo_bar).to.eql(56)
expect(@object.fooBar).to.eql(56)
describe "with `value` option", ->
it 'can be given a value', ->
defineProperty @object, 'name', value: "<NAME>"
expect(@object.name).to.eql("<NAME>")
it 'can set up default value', ->
defineProperty @object, 'name', value: "foobar"
expect(@object.name).to.eql("foobar")
@object.name = "<NAME>"
expect(@object.name).to.eql("baz")
@object.name = undefined
expect(@object.name).to.eql(undefined)
it 'can set up falsy default values', ->
defineProperty @object, 'name', value: null
expect(@object.name).to.equal(null)
it 'ignores default when custom getter given', ->
defineProperty @object, 'name', value: "bar", get: -> "foo"
expect(@object.name).to.eql("foo")
describe "with `cache` option", ->
it "returns values from cache", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "<NAME>"
expect(@object.name).to.eql("<NAME>")
expect(@object.name).to.eql("<NAME>")
expect(hitCount).to.eql(1)
it "does not make cache enumerable", ->
defineProperty @object, "name", cache: true, get: -> "<NAME>"
expect(@object.name).to.eql("<NAME>")
expect(Object.keys(@object)).to.eql(["name"])
it "resets cache when change event triggered", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "<NAME>"
expect(@object.name).to.eql("<NAME>")
expect(@object.name).to.eql("<NAME>")
expect(hitCount).to.eql(1)
@object.name_property.trigger()
expect(@object.name).to.eql("<NAME>")
expect(@object.name).to.eql("<NAME>")
expect(hitCount).to.eql(2)
it "resets cache before attached events are fired", ->
@object.__hitCount = 0
defineProperty @object, "hitCount", cache: true, get: -> ++@__hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.hitCount_property.trigger()
expect(@object.result).to.eql(2)
it "resets cache before attached global events are fired", ->
defineProperty @object, "foo", value: { __hitCount: 0 }
defineProperty @object.foo, "hitCount", cache: true, get: -> ++@__hitCount
defineProperty @object, "hitCount", dependsOn: "foo.hitCount", get: -> @foo.hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.foo.hitCount_property.trigger()
expect(@object.result).to.eql(2)
describe "with `changed` option", ->
it "triggers a change event if value of property has changed if option not given", ->
defineProperty @object, "name"
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
it "triggers a change event if changed option evaluates to true", ->
defineProperty @object, "name", value: "<NAME>", changed: (oldVal, newVal) -> oldVal is newVal
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
it "always triggers a change event the first time a property is changed when a function is given since we don't know the initial value", ->
defineProperty @object, "name", changed: -> false
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
it "does not trigger dependencies when not changed", ->
defineProperty @object, "name", changed: (oldVal, newVal) -> oldVal isnt newVal
defineProperty @object, "bigName", dependsOn: "name", get: -> @name?.toUpperCase()
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.bigName_property)
it "always triggers a change event when mutable object is assigned", ->
obj = {}
defineProperty @object, "name", value: obj
expect(=> @object.name = {}).to.triggerEvent(@object.name_property)
it "does not trigger when computed property has not changed", ->
defineProperty @object, "name"
defineProperty @object, "bigName",
dependsOn: "name"
get: -> @name?.toUpperCase()
changed: (oldVal, newVal) -> oldVal isnt newVal
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.bigName_property)
it "never triggers a change event when option is false", ->
defineProperty @object, "name", changed: false
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").not.to.triggerEvent(@object.name_property)
it "always triggers a change event when option is true", ->
defineProperty @object, "name", changed: true
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
expect(=> @object.name = "<NAME>").to.triggerEvent(@object.name_property)
describe "with `async` option", ->
it "dispatches a change event for this property asynchronously", (done) ->
defineProperty @object, "foo", async: true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "optimizes multiple change events for a property into one", (done) ->
@object.num = 0
defineProperty @object, "foo", value: 12, async: true
@object.foo_property.resolve()
@object.foo_property.bind (before, after) -> @result = "#{before}:#{after}"
@object.foo = 23
@object.foo = 15
@object.foo = 45
expect(=> @object.result).to.become("12:45", done)
describe "when Serenade.async is true", ->
it "dispatches change event asynchronously", (done) ->
defineProperty @object, "foo"
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "stays asynchronous when async option is true", (done) ->
defineProperty @object, "foo", async: true
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "can be made synchronous", ->
defineProperty @object, "foo", async: false
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).to.be.ok
| true | require './spec_helper'
{extend} = Build
{defineProperty} = Serenade
describe 'Serenade.defineProperty', ->
beforeEach ->
@object = {}
it 'does not bleed over between objects with same prototype', ->
@inst1 = Object.create(@object)
@inst2 = Object.create(@object)
defineProperty @object, 'name', serialize: true
defineProperty @inst1, 'age', serialize: true
defineProperty @inst2, 'height', serialize: true
expect(Object.keys(@inst1)).to.include('age')
expect(Object.keys(@inst2)).not.to.include('age')
it 'can be redefined', ->
defineProperty @object, 'name', get: -> "foo"
defineProperty @object, 'name', get: -> "bar"
expect(@object.name).to.eql("bar")
it 'is listed as an own property', ->
defineProperty @object, 'name'
expect(Object.keys(@object)).to.eql(["name"])
expect(prop for prop of @object).to.eql(["name"])
@child = Object.create(@object)
expect(Object.keys(@child)).to.eql([])
it 'adopts own property status when redefined', ->
defineProperty @object, 'name'
@child = Object.create(@object)
@child.name = "bar"
expect(Object.keys(@child)).to.eql(["name"])
describe '#set', ->
beforeEach ->
defineProperty @object, ("foo")
it 'sets that property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'triggers a change event if it is defined', ->
Serenade.defineEvent(@object, "changed")
expect(=> @object.foo = 23).to.triggerEvent(@object.changed)
it 'triggers a change event for this property', ->
expect(=> @object.foo = 23).to.triggerEvent(@object.foo_property, with: [undefined, 23])
expect(=> @object.foo = 32).to.triggerEvent(@object.foo_property, with: [23, 32])
it 'uses a custom setter', ->
setValue = null
defineProperty @object, 'foo', set: (value) -> setValue = value
@object.foo = 42
expect(setValue).to.eql(42)
it 'consumes assigned functions and makes them getters', ->
defineProperty @object, 'foo'
@object.foo = -> 42
expect(@object.foo).to.eql(42)
describe '#get', ->
it 'reads an existing property', ->
@object.foo = 23
expect(@object.foo).to.eql(23)
it 'uses a custom getter', ->
defineProperty @object, 'foo', get: -> 42
expect(@object.foo).to.eql(42)
it 'runs custom getter in context of object', ->
@object.first = 'PI:NAME:<NAME>END_PI'
@object.last = 'PI:NAME:<NAME>END_PI'
defineProperty @object, 'fullName', get: -> [@first, @last].join(' ')
expect(@object.fullName).to.eql('PI:NAME:<NAME>END_PI')
describe '#format', ->
it 'defaults to value', ->
defineProperty @object, 'foo', value: 42
expect(@object.foo_property.format()).to.eql(42)
it 'uses a custom formatter', ->
defineProperty @object, 'foo', value: 12, format: (val) -> val + "px"
expect(@object.foo_property.format()).to.eql("12px")
it 'runs formatter in object context', ->
@object.unit = "em"
defineProperty @object, 'foo', value: 12, format: (val) -> val + @unit
expect(@object.foo_property.format()).to.eql("12em")
describe 'enumerable', ->
it 'defaults to true', ->
defineProperty @object, 'foo'
expect(Object.keys(@object)).to.include('foo')
it 'can be set to false', ->
defineProperty @object, 'foo', enumerable: false
expect(Object.keys(@object)).not.to.include('foo')
it 'can be set to true', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.include('foo')
it 'adds no other enumerable properties', ->
defineProperty @object, 'foo', enumerable: true
expect(Object.keys(@object)).to.eql(['foo'])
describe 'serialize', ->
it 'will setup a setter method for that name', ->
defineProperty @object, 'fooBar', serialize: 'foo_bar'
@object.foo_bar = 56
expect(@object.foo_bar).to.eql(56)
expect(@object.fooBar).to.eql(56)
describe "with `value` option", ->
it 'can be given a value', ->
defineProperty @object, 'name', value: "PI:NAME:<NAME>END_PI"
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
it 'can set up default value', ->
defineProperty @object, 'name', value: "foobar"
expect(@object.name).to.eql("foobar")
@object.name = "PI:NAME:<NAME>END_PI"
expect(@object.name).to.eql("baz")
@object.name = undefined
expect(@object.name).to.eql(undefined)
it 'can set up falsy default values', ->
defineProperty @object, 'name', value: null
expect(@object.name).to.equal(null)
it 'ignores default when custom getter given', ->
defineProperty @object, 'name', value: "bar", get: -> "foo"
expect(@object.name).to.eql("foo")
describe "with `cache` option", ->
it "returns values from cache", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "PI:NAME:<NAME>END_PI"
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(hitCount).to.eql(1)
it "does not make cache enumerable", ->
defineProperty @object, "name", cache: true, get: -> "PI:NAME:<NAME>END_PI"
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(Object.keys(@object)).to.eql(["name"])
it "resets cache when change event triggered", ->
hitCount = 0
defineProperty @object, "name", cache: true, get: -> hitCount++; "PI:NAME:<NAME>END_PI"
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(hitCount).to.eql(1)
@object.name_property.trigger()
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(@object.name).to.eql("PI:NAME:<NAME>END_PI")
expect(hitCount).to.eql(2)
it "resets cache before attached events are fired", ->
@object.__hitCount = 0
defineProperty @object, "hitCount", cache: true, get: -> ++@__hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.hitCount_property.trigger()
expect(@object.result).to.eql(2)
it "resets cache before attached global events are fired", ->
defineProperty @object, "foo", value: { __hitCount: 0 }
defineProperty @object.foo, "hitCount", cache: true, get: -> ++@__hitCount
defineProperty @object, "hitCount", dependsOn: "foo.hitCount", get: -> @foo.hitCount
@object.hitCount_property.bind -> @result = @hitCount
expect(@object.hitCount).to.eql(1)
expect(@object.hitCount).to.eql(1)
@object.foo.hitCount_property.trigger()
expect(@object.result).to.eql(2)
describe "with `changed` option", ->
it "triggers a change event if value of property has changed if option not given", ->
defineProperty @object, "name"
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
it "triggers a change event if changed option evaluates to true", ->
defineProperty @object, "name", value: "PI:NAME:<NAME>END_PI", changed: (oldVal, newVal) -> oldVal is newVal
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
it "always triggers a change event the first time a property is changed when a function is given since we don't know the initial value", ->
defineProperty @object, "name", changed: -> false
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
it "does not trigger dependencies when not changed", ->
defineProperty @object, "name", changed: (oldVal, newVal) -> oldVal isnt newVal
defineProperty @object, "bigName", dependsOn: "name", get: -> @name?.toUpperCase()
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.bigName_property)
it "always triggers a change event when mutable object is assigned", ->
obj = {}
defineProperty @object, "name", value: obj
expect(=> @object.name = {}).to.triggerEvent(@object.name_property)
it "does not trigger when computed property has not changed", ->
defineProperty @object, "name"
defineProperty @object, "bigName",
dependsOn: "name"
get: -> @name?.toUpperCase()
changed: (oldVal, newVal) -> oldVal isnt newVal
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.bigName_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.bigName_property)
it "never triggers a change event when option is false", ->
defineProperty @object, "name", changed: false
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").not.to.triggerEvent(@object.name_property)
it "always triggers a change event when option is true", ->
defineProperty @object, "name", changed: true
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
expect(=> @object.name = "PI:NAME:<NAME>END_PI").to.triggerEvent(@object.name_property)
describe "with `async` option", ->
it "dispatches a change event for this property asynchronously", (done) ->
defineProperty @object, "foo", async: true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "optimizes multiple change events for a property into one", (done) ->
@object.num = 0
defineProperty @object, "foo", value: 12, async: true
@object.foo_property.resolve()
@object.foo_property.bind (before, after) -> @result = "#{before}:#{after}"
@object.foo = 23
@object.foo = 15
@object.foo = 45
expect(=> @object.result).to.become("12:45", done)
describe "when Serenade.async is true", ->
it "dispatches change event asynchronously", (done) ->
defineProperty @object, "foo"
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "stays asynchronous when async option is true", (done) ->
defineProperty @object, "foo", async: true
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).not.to.be.ok
expect(=> @object.result).to.become(true, done)
it "can be made synchronous", ->
defineProperty @object, "foo", async: false
Serenade.async = true
@object.foo_property.bind -> @result = true
@object.foo = 23
expect(@object.result).to.be.ok
|
[
{
"context": "do fill = (item = 'Bob') ->\n $('#content').append \"#{item}\"\n console.d",
"end": 22,
"score": 0.9994577765464783,
"start": 19,
"tag": "NAME",
"value": "Bob"
}
] | components/coffee/script.coffee | planetoftheweb/coffeescript | 5 | do fill = (item = 'Bob') ->
$('#content').append "#{item}"
console.dir @fill
fill | 124545 | do fill = (item = '<NAME>') ->
$('#content').append "#{item}"
console.dir @fill
fill | true | do fill = (item = 'PI:NAME:<NAME>END_PI') ->
$('#content').append "#{item}"
console.dir @fill
fill |
[
{
"context": "s.cache\"\n ]\n projectHome: \"~/src/github.com/aegypius/\"\n telemetryConsent: \"limited\"\n uriHandlerR",
"end": 2284,
"score": 0.9995814561843872,
"start": 2276,
"tag": "USERNAME",
"value": "aegypius"
},
{
"context": "t\":\n panelHeight: 108\n metrics... | home/.atom/config.cson | aegypius/dotfiles | 1 | "*":
_dart:
_firstRun: true
_version: "1.0.1"
"atom-autocomplete-php": {}
"atom-beautify":
apex: {}
arduino: {}
c: {}
cfml: {}
coffeescript: {}
cpp: {}
cs: {}
css: {}
d: {}
ejs: {}
erb: {}
fortran: {}
general:
_analyticsUserId: "a6669e24-585f-4be5-abcf-1009aef1b8a8"
gherkin: {}
handlebars: {}
html:
brace_style: "expand"
jade: {}
java: {}
js: {}
json: {}
jsx: {}
latex: {}
less: {}
marko: {}
mustache: {}
objectivec: {}
pawn: {}
perl: {}
php: {}
python: {}
riot: {}
ruby: {}
rust: {}
sass: {}
scss: {}
spacebars: {}
sql: {}
svg: {}
swig: {}
tss: {}
twig: {}
typescript: {}
vala: {}
visualforce: {}
xml:
brace_style: "expand"
default_beautifier: "JS Beautify"
indent_inner_html: true
xtemplate: {}
yaml:
disabled: true
"atom-ide-ui":
"atom-ide-diagnostics-ui": {}
use:
"atom-ide-refactor": "never"
"autocomplete-emojis": {}
"autocomplete-plus":
autoActivationDelay: 500
"compatibility-reporter":
browsers: "last 1 version, last 3 IE versions, last 1 Samsung version, last 1 Android version"
excludeVcsIgnoredFiles: false
core:
autoHideMenuBar: true
closeDeletedFileTabs: true
customFileTypes:
"source.ini": [
".buckconfig"
".flowconfig"
".hgrc"
]
"source.json": [
"BUCK.autodeps"
]
"source.python": [
"BUCK"
]
"source.yaml": [
"neon"
"yaml.dist"
"yml.dist"
]
"text.html.basic": [
"thtm"
]
disabledPackages: [
"language-twig"
"linter-jshint"
"linter-jsxhint"
]
followSymlinks: false
ignoredNames: [
".git"
".hg"
".svn"
".wercker"
"_site"
".DS_Store"
"Thumbs.db"
"app/cache"
"_cache"
"_tmp"
"_tools"
"cgi-bin"
".vagrant"
"userfiles"
".nyc_output"
".capistrano"
".serenata"
"_builds"
"_projects"
"_steps"
".rules"
"**/var/cache"
"**/.php_cs.cache"
]
projectHome: "~/src/github.com/aegypius/"
telemetryConsent: "limited"
uriHandlerRegistration: "always"
"custom-folds":
areRegionsFoldedOnLoad: true
"disable-keybindings":
communityPackages: [
"regex-railroad-diagram"
]
docblockr:
per_section_indent: true
editor:
fontFamily: "Fira Code, Source Code Pro"
invisibles: {}
scrollPastEnd: true
showIndentGuide: true
showInvisibles: true
emmet: {}
"exception-reporting":
userId: "042b8423-000c-3604-2877-fa18bc8ce717"
"file-icons": {}
"file-types":
Cargofile$: "source.coffee"
gotpl: "source.gotemplate"
thtm: "source.html"
"find-and-replace":
projectSearchResultsPaneSplitDirection: "right"
"fuzzy-finder":
ignoredNames: [
"app/cache*"
"node_modules"
"vendor"
]
scoringSystem: "fast"
"git-diff": {}
"language-gentoo":
updateManifestOnSave: true
linter:
ignoredLinterErrors: [
"jsonlint"
"php"
"phpcs"
]
subtleLinterErrors: [
"jscs"
]
"linter-docker": {}
"linter-eslint":
disableWhenNoEslintrcFileInPath: true
fixOnSave: true
useGlobalEslint: true
"linter-js-standard-engine":
enabledProjects: 3
"linter-jscs":
harmony: true
onlyConfig: true
"linter-less":
includePath: [
"vendor/bower_components"
]
"linter-phpcs":
disableWhenNoConfigFile: true
"linter-ui-default":
panelHeight: 108
metrics:
userId: "5afe01d925f56ca54ea8945b2cce677f8755be3a"
"mocha-test-runner":
nodeBinaryPath: "/usr/bin/node"
nuclide:
installRecommendedPackages: true
"nuclide-file-tree":
allowKeyboardPrefixNavigation: false
showOpenFiles: false
showUncommittedChanges: false
"one-dark-ui": {}
"php-cs-fixer":
executablePath: "/home/laurentn/.local/bin/php-cs-fixer"
executeOnSave: true
"php-ide-serenata":
core: {}
general:
doNotAskForSupport: true
doNotShowProjectChangeMessage: true
linting:
showMissingDocs: false
storagePath: "/home/laurentn/.cache/php-ide-serenata"
"php-integrator-base":
general:
indexContinuously: false
linting:
showMissingDocs: false
memoryLimit: 2048
"php-integrator-base:confirmed-upgrade-message": true
"php-integrator-linter":
validateDocblockCorrectness: false
pigments:
groupPaletteColors: "by file"
markerType: "native-dot"
mergeColorDuplicates: true
sortPaletteColors: "by color"
"project-manager":
sortBy: "group"
react: {}
"release-notes":
viewedVersion: "0.1.0"
"spell-check":
locales: [
"en-US"
"fr-FR"
]
"split-diff":
diffWords: true
ignoreWhitespace: true
leftEditorColor: "red"
rightEditorColor: "green"
scrollSyncType: "Vertical + Horizontal"
syncHorizontalScroll: true
"standard-formatter":
checkStyleDevDependencies: true
formatOnSave: true
style: "semi-standard"
tablr:
csvEditor:
header: true
tableEditor: {}
tabs:
enableVcsColoring: true
usePreviewTabs: true
"todo-show":
ignoreThesePaths: [
"node_modules"
"vendor"
"bower_components"
"web"
"lib/3rdParties"
]
"tool-bar": {}
"tree-view":
alwaysOpenExisting: true
hideIgnoredNames: true
wakatime:
apikey: "Saved in your ~/.wakatime.cfg file"
"wakatime-hidden":
lastInit: 1492510437
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
ignoreWhitespaceOnCurrentLine: false
removeTrailingWhitespace: false
| 139091 | "*":
_dart:
_firstRun: true
_version: "1.0.1"
"atom-autocomplete-php": {}
"atom-beautify":
apex: {}
arduino: {}
c: {}
cfml: {}
coffeescript: {}
cpp: {}
cs: {}
css: {}
d: {}
ejs: {}
erb: {}
fortran: {}
general:
_analyticsUserId: "a6669e24-585f-4be5-abcf-1009aef1b8a8"
gherkin: {}
handlebars: {}
html:
brace_style: "expand"
jade: {}
java: {}
js: {}
json: {}
jsx: {}
latex: {}
less: {}
marko: {}
mustache: {}
objectivec: {}
pawn: {}
perl: {}
php: {}
python: {}
riot: {}
ruby: {}
rust: {}
sass: {}
scss: {}
spacebars: {}
sql: {}
svg: {}
swig: {}
tss: {}
twig: {}
typescript: {}
vala: {}
visualforce: {}
xml:
brace_style: "expand"
default_beautifier: "JS Beautify"
indent_inner_html: true
xtemplate: {}
yaml:
disabled: true
"atom-ide-ui":
"atom-ide-diagnostics-ui": {}
use:
"atom-ide-refactor": "never"
"autocomplete-emojis": {}
"autocomplete-plus":
autoActivationDelay: 500
"compatibility-reporter":
browsers: "last 1 version, last 3 IE versions, last 1 Samsung version, last 1 Android version"
excludeVcsIgnoredFiles: false
core:
autoHideMenuBar: true
closeDeletedFileTabs: true
customFileTypes:
"source.ini": [
".buckconfig"
".flowconfig"
".hgrc"
]
"source.json": [
"BUCK.autodeps"
]
"source.python": [
"BUCK"
]
"source.yaml": [
"neon"
"yaml.dist"
"yml.dist"
]
"text.html.basic": [
"thtm"
]
disabledPackages: [
"language-twig"
"linter-jshint"
"linter-jsxhint"
]
followSymlinks: false
ignoredNames: [
".git"
".hg"
".svn"
".wercker"
"_site"
".DS_Store"
"Thumbs.db"
"app/cache"
"_cache"
"_tmp"
"_tools"
"cgi-bin"
".vagrant"
"userfiles"
".nyc_output"
".capistrano"
".serenata"
"_builds"
"_projects"
"_steps"
".rules"
"**/var/cache"
"**/.php_cs.cache"
]
projectHome: "~/src/github.com/aegypius/"
telemetryConsent: "limited"
uriHandlerRegistration: "always"
"custom-folds":
areRegionsFoldedOnLoad: true
"disable-keybindings":
communityPackages: [
"regex-railroad-diagram"
]
docblockr:
per_section_indent: true
editor:
fontFamily: "Fira Code, Source Code Pro"
invisibles: {}
scrollPastEnd: true
showIndentGuide: true
showInvisibles: true
emmet: {}
"exception-reporting":
userId: "042b8423-000c-3604-2877-fa18bc8ce717"
"file-icons": {}
"file-types":
Cargofile$: "source.coffee"
gotpl: "source.gotemplate"
thtm: "source.html"
"find-and-replace":
projectSearchResultsPaneSplitDirection: "right"
"fuzzy-finder":
ignoredNames: [
"app/cache*"
"node_modules"
"vendor"
]
scoringSystem: "fast"
"git-diff": {}
"language-gentoo":
updateManifestOnSave: true
linter:
ignoredLinterErrors: [
"jsonlint"
"php"
"phpcs"
]
subtleLinterErrors: [
"jscs"
]
"linter-docker": {}
"linter-eslint":
disableWhenNoEslintrcFileInPath: true
fixOnSave: true
useGlobalEslint: true
"linter-js-standard-engine":
enabledProjects: 3
"linter-jscs":
harmony: true
onlyConfig: true
"linter-less":
includePath: [
"vendor/bower_components"
]
"linter-phpcs":
disableWhenNoConfigFile: true
"linter-ui-default":
panelHeight: 108
metrics:
userId: "5<KEY>"
"mocha-test-runner":
nodeBinaryPath: "/usr/bin/node"
nuclide:
installRecommendedPackages: true
"nuclide-file-tree":
allowKeyboardPrefixNavigation: false
showOpenFiles: false
showUncommittedChanges: false
"one-dark-ui": {}
"php-cs-fixer":
executablePath: "/home/laurentn/.local/bin/php-cs-fixer"
executeOnSave: true
"php-ide-serenata":
core: {}
general:
doNotAskForSupport: true
doNotShowProjectChangeMessage: true
linting:
showMissingDocs: false
storagePath: "/home/laurentn/.cache/php-ide-serenata"
"php-integrator-base":
general:
indexContinuously: false
linting:
showMissingDocs: false
memoryLimit: 2048
"php-integrator-base:confirmed-upgrade-message": true
"php-integrator-linter":
validateDocblockCorrectness: false
pigments:
groupPaletteColors: "by file"
markerType: "native-dot"
mergeColorDuplicates: true
sortPaletteColors: "by color"
"project-manager":
sortBy: "group"
react: {}
"release-notes":
viewedVersion: "0.1.0"
"spell-check":
locales: [
"en-US"
"fr-FR"
]
"split-diff":
diffWords: true
ignoreWhitespace: true
leftEditorColor: "red"
rightEditorColor: "green"
scrollSyncType: "Vertical + Horizontal"
syncHorizontalScroll: true
"standard-formatter":
checkStyleDevDependencies: true
formatOnSave: true
style: "semi-standard"
tablr:
csvEditor:
header: true
tableEditor: {}
tabs:
enableVcsColoring: true
usePreviewTabs: true
"todo-show":
ignoreThesePaths: [
"node_modules"
"vendor"
"bower_components"
"web"
"lib/3rdParties"
]
"tool-bar": {}
"tree-view":
alwaysOpenExisting: true
hideIgnoredNames: true
wakatime:
apikey: "Saved in your ~/.wakatime.cfg file"
"wakatime-hidden":
lastInit: 1492510437
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
ignoreWhitespaceOnCurrentLine: false
removeTrailingWhitespace: false
| true | "*":
_dart:
_firstRun: true
_version: "1.0.1"
"atom-autocomplete-php": {}
"atom-beautify":
apex: {}
arduino: {}
c: {}
cfml: {}
coffeescript: {}
cpp: {}
cs: {}
css: {}
d: {}
ejs: {}
erb: {}
fortran: {}
general:
_analyticsUserId: "a6669e24-585f-4be5-abcf-1009aef1b8a8"
gherkin: {}
handlebars: {}
html:
brace_style: "expand"
jade: {}
java: {}
js: {}
json: {}
jsx: {}
latex: {}
less: {}
marko: {}
mustache: {}
objectivec: {}
pawn: {}
perl: {}
php: {}
python: {}
riot: {}
ruby: {}
rust: {}
sass: {}
scss: {}
spacebars: {}
sql: {}
svg: {}
swig: {}
tss: {}
twig: {}
typescript: {}
vala: {}
visualforce: {}
xml:
brace_style: "expand"
default_beautifier: "JS Beautify"
indent_inner_html: true
xtemplate: {}
yaml:
disabled: true
"atom-ide-ui":
"atom-ide-diagnostics-ui": {}
use:
"atom-ide-refactor": "never"
"autocomplete-emojis": {}
"autocomplete-plus":
autoActivationDelay: 500
"compatibility-reporter":
browsers: "last 1 version, last 3 IE versions, last 1 Samsung version, last 1 Android version"
excludeVcsIgnoredFiles: false
core:
autoHideMenuBar: true
closeDeletedFileTabs: true
customFileTypes:
"source.ini": [
".buckconfig"
".flowconfig"
".hgrc"
]
"source.json": [
"BUCK.autodeps"
]
"source.python": [
"BUCK"
]
"source.yaml": [
"neon"
"yaml.dist"
"yml.dist"
]
"text.html.basic": [
"thtm"
]
disabledPackages: [
"language-twig"
"linter-jshint"
"linter-jsxhint"
]
followSymlinks: false
ignoredNames: [
".git"
".hg"
".svn"
".wercker"
"_site"
".DS_Store"
"Thumbs.db"
"app/cache"
"_cache"
"_tmp"
"_tools"
"cgi-bin"
".vagrant"
"userfiles"
".nyc_output"
".capistrano"
".serenata"
"_builds"
"_projects"
"_steps"
".rules"
"**/var/cache"
"**/.php_cs.cache"
]
projectHome: "~/src/github.com/aegypius/"
telemetryConsent: "limited"
uriHandlerRegistration: "always"
"custom-folds":
areRegionsFoldedOnLoad: true
"disable-keybindings":
communityPackages: [
"regex-railroad-diagram"
]
docblockr:
per_section_indent: true
editor:
fontFamily: "Fira Code, Source Code Pro"
invisibles: {}
scrollPastEnd: true
showIndentGuide: true
showInvisibles: true
emmet: {}
"exception-reporting":
userId: "042b8423-000c-3604-2877-fa18bc8ce717"
"file-icons": {}
"file-types":
Cargofile$: "source.coffee"
gotpl: "source.gotemplate"
thtm: "source.html"
"find-and-replace":
projectSearchResultsPaneSplitDirection: "right"
"fuzzy-finder":
ignoredNames: [
"app/cache*"
"node_modules"
"vendor"
]
scoringSystem: "fast"
"git-diff": {}
"language-gentoo":
updateManifestOnSave: true
linter:
ignoredLinterErrors: [
"jsonlint"
"php"
"phpcs"
]
subtleLinterErrors: [
"jscs"
]
"linter-docker": {}
"linter-eslint":
disableWhenNoEslintrcFileInPath: true
fixOnSave: true
useGlobalEslint: true
"linter-js-standard-engine":
enabledProjects: 3
"linter-jscs":
harmony: true
onlyConfig: true
"linter-less":
includePath: [
"vendor/bower_components"
]
"linter-phpcs":
disableWhenNoConfigFile: true
"linter-ui-default":
panelHeight: 108
metrics:
userId: "5PI:KEY:<KEY>END_PI"
"mocha-test-runner":
nodeBinaryPath: "/usr/bin/node"
nuclide:
installRecommendedPackages: true
"nuclide-file-tree":
allowKeyboardPrefixNavigation: false
showOpenFiles: false
showUncommittedChanges: false
"one-dark-ui": {}
"php-cs-fixer":
executablePath: "/home/laurentn/.local/bin/php-cs-fixer"
executeOnSave: true
"php-ide-serenata":
core: {}
general:
doNotAskForSupport: true
doNotShowProjectChangeMessage: true
linting:
showMissingDocs: false
storagePath: "/home/laurentn/.cache/php-ide-serenata"
"php-integrator-base":
general:
indexContinuously: false
linting:
showMissingDocs: false
memoryLimit: 2048
"php-integrator-base:confirmed-upgrade-message": true
"php-integrator-linter":
validateDocblockCorrectness: false
pigments:
groupPaletteColors: "by file"
markerType: "native-dot"
mergeColorDuplicates: true
sortPaletteColors: "by color"
"project-manager":
sortBy: "group"
react: {}
"release-notes":
viewedVersion: "0.1.0"
"spell-check":
locales: [
"en-US"
"fr-FR"
]
"split-diff":
diffWords: true
ignoreWhitespace: true
leftEditorColor: "red"
rightEditorColor: "green"
scrollSyncType: "Vertical + Horizontal"
syncHorizontalScroll: true
"standard-formatter":
checkStyleDevDependencies: true
formatOnSave: true
style: "semi-standard"
tablr:
csvEditor:
header: true
tableEditor: {}
tabs:
enableVcsColoring: true
usePreviewTabs: true
"todo-show":
ignoreThesePaths: [
"node_modules"
"vendor"
"bower_components"
"web"
"lib/3rdParties"
]
"tool-bar": {}
"tree-view":
alwaysOpenExisting: true
hideIgnoredNames: true
wakatime:
apikey: "Saved in your ~/.wakatime.cfg file"
"wakatime-hidden":
lastInit: 1492510437
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
ignoreWhitespaceOnCurrentLine: false
removeTrailingWhitespace: false
|
[
{
"context": "# (C) 2015-present Antoine Catton <devel at antoine dot catton dot fr>\n#\n# This fil",
"end": 33,
"score": 0.9998884201049805,
"start": 19,
"tag": "NAME",
"value": "Antoine Catton"
}
] | src/script.coffee | acatton/mhc.js | 0 | # (C) 2015-present Antoine Catton <devel at antoine dot catton dot fr>
#
# This file is part of mhc.js.
#
# mhc.js is a free software: you can redistribute it and/or modify it under the
# terms of the MIT License as published by the Open Source Initiative.
#
# mhc.js is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the MIT License for more details.
#
# You should have received a copy of the MIT License along with mhc.js. If not,
# see <http://opensource.org/licenses/MIT>.
BYTE_LENGTH = 8
ASCII_LEN = 7
ASCII_MASK = ( 1 << ASCII_LEN ) - 1
BASE_64 = 64
UTF8_MAX_BIT_AMOUNT = 31
UTF8_BYTE_BITS = 6
UTF8_BYTE_MASK = ( 1 << UTF8_BYTE_BITS ) - 1
UTF8_EXTRA_BITS = 0x80
uint8ArrayFrom = (arr) ->
ret = new Uint8Array arr.length
ret.set arr
return ret
baseLog = (n, base) -> ( Math.log n ) / ( Math.log base )
utf8Encode = (val) ->
countBits = (n) -> 1 + Math.floor baseLog n, 2
bitsAmount = countBits val
if bitsAmount <= ASCII_LEN
[val]
else
if bitsAmount > UTF8_MAX_BIT_AMOUNT
throw new Error(
"UTF-8 doesn't support characters encoded on more than " +
UTF8_MAX_BIT_AMOUNT + "bits"
)
byteAmount = 0
bytes = []
lastByteBitAmount = () -> BYTE_LENGTH - 2 - byteAmount
lastByteExtraBits = () ->
extraBits = (1 << byteAmount) - 1
extraBits <<= 2
extraBits |= 0x2
extraBits <<= lastByteBitAmount()
extraBits
while countBits(val) > lastByteBitAmount()
bytes.unshift ( val & UTF8_BYTE_MASK ) | UTF8_EXTRA_BITS
val >>= UTF8_BYTE_BITS
byteAmount++
bytes.unshift ( val | lastByteExtraBits() )
return bytes
strToUint8Array = (str) ->
# TODO: Replace me by Uint8Array.from in Firefox 38
uint8ArrayFrom str.split('').reduce (accumulator, element) ->
accumulator.concat utf8Encode element.charCodeAt(0)
, []
sha1Uint8Array = (arr) ->
( window.crypto.subtle.digest "SHA-1", arr.buffer ).then (result) ->
new Uint8Array result
arrayToHex = (arr) ->
toHex = (i) -> i.toString(16)
reduce = (arr, callback, initialValue) ->
acc = initialValue
for i in [0..arr.length-1]
acc = callback(acc, arr[i])
return acc
reduce arr, (accumulator, element) ->
accumulator + if element > 16
toHex element
else
"0" + toHex element
, ""
HASHCASH_DEFAULT_VERSION = 1
HASHCASH_DEFAULT_BITS = 20 # This is a default recommended amount of bits
HASHCASH_DEFAULT_RAND_LEN = 16
hashcashDate = (d) ->
toTwoNums = (n) ->
n = n % 100
if n < 10
"0" + n
else
"" + n
[d.getFullYear(), d.getMonth(), d.getHours(), d.getMinutes(),
d.getSeconds()].reduce (accumulator, elem) ->
accumulator + toTwoNums elem
, ""
hashcashToString = (version, bits, date, resource, extension, rand, counter) ->
version ?= HASHCASH_DEFAULT_VERSION
bits ?= HASHCASH_DEFAULT_BITS
date ?= new Date()
date = if typeof date is "string" then date else hashcashDate date
resource ?= ""
extension ?= ""
counter ?= ""
return [version, bits, date, resource, extension, rand, counter].join ':'
intToBase64Char = (i) ->
charOffset = (chr, offset) -> String.fromCharCode chr.charCodeAt(0) + offset
if 0 <= i <= 25
charOffset 'A', i
else if 26 <= i <= 51
charOffset 'a', i - 26
else if 52 <= i <= 61
charOffset '0', i - 52
else if i == 62
'+'
else if i == 63
'/'
else
throw new Error("intToBase64Char is supposed to get an ingeter " +
"between 0 and 63")
randomBase64String = (len) ->
(intToBase64Char 0 | 63 * Math.random() for i in [1..len]).join ''
strToHexDigest = (str) ->
(sha1Uint8Array strToUint8Array str).then (arr) -> arrayToHex arr
toResource = (val) ->
# This is not cryptographically safe
#
# The goal of this function is to have a safe and stable string encoding
# of a resource.
hashKeyValue = (key, value) ->
Promise.all([key, value].map toResource) \
.then (resources) ->
Promise.all resources.map strToHexDigest
.then (digests) ->
strToHexDigest digests.join ''
hashArray = (arr) ->
Promise.all( arr.map toResource ) \
.then (resources) ->
Promise.all( resources.map strToHexDigest )
.then (hashes) ->
hashes.sort()
strToHexDigest hashes.join ''
hashObject = (obj) ->
Promise.all( hashKeyValue key, value for own key, value of obj \
when typeof value isnt "function" ) \
.then (arr) -> hashArray arr
stringToResource = (str) ->
if str.match /:/
strToHexDigest str
else
new Promise (resolve, reject) -> resolve str
numberToResource = (n) -> new Promise (resolve, reject) -> resolve "" + n
arrayToResource = (arr) -> hashArray arr
objectToResource = (obj) -> hashObject obj
if typeof val is "string"
stringToResource val
else if typeof val is "number"
numberToResource val
else if val.isArray
arrayToResource val
else if typeof val is "object"
objectToResource val
else
throw new Error("toResource only supports strings, numbers and objects")
class window.Hashcash
constructor: (str) ->
[@version, @bits, @date, @resource, @extension, @rand, @counter] = \
str.split(':')
@bits ?= HASHCASH_DEFAULT_BITS
@bits = @bits | 0
toString: () ->
hashcashToString @version, @bits, @date, @resource, @extension, @rand, \
@counter
findSolution: () ->
stringOfCounter = (counter) -> ( counter.map intToBase64Char ).join ''
hashcashStringOfCounter = (counter) =>
hashcashToString @version, @bits, @date, @resource, @extension, \
@rand, stringOfCounter counter
isValid = (counter) =>
str = hashcashStringOfCounter counter
( sha1Uint8Array strToUint8Array str ).then (result) =>
( uint8ArrayNullBitCount result ) >= @bits
incrementCounter = (counter) ->
for i, val of counter
counter[i] = (val + 1) % BASE_64
if counter[i] > 0
return counter
counter.push 0
return counter
hashcashFromCounter = (counter) ->
new Hashcash hashcashStringOfCounter counter
new Promise (resolve, reject) ->
counter = [0]
check = (counter) ->
isValid(counter).then (counterIsValid) ->
if counterIsValid
resolve hashcashFromCounter counter
else
check incrementCounter counter
check counter
window.Hashcash.fromResource = (resource) ->
rand = randomBase64String HASHCASH_DEFAULT_RAND_LEN
new Hashcash hashcashToString null, null, null, resource, null, rand, null
headNullBitCount = (n, length) ->
if n == 0
length
else
length - (1 + Math.floor baseLog n, 2)
uint8ArrayNullBitCount = (arr) ->
acc = 0
for i in [0..arr.length-1]
value = arr[i]
acc += headNullBitCount value, BYTE_LENGTH
if value > 0
break
return acc
| 24478 | # (C) 2015-present <NAME> <devel at antoine dot catton dot fr>
#
# This file is part of mhc.js.
#
# mhc.js is a free software: you can redistribute it and/or modify it under the
# terms of the MIT License as published by the Open Source Initiative.
#
# mhc.js is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the MIT License for more details.
#
# You should have received a copy of the MIT License along with mhc.js. If not,
# see <http://opensource.org/licenses/MIT>.
BYTE_LENGTH = 8
ASCII_LEN = 7
ASCII_MASK = ( 1 << ASCII_LEN ) - 1
BASE_64 = 64
UTF8_MAX_BIT_AMOUNT = 31
UTF8_BYTE_BITS = 6
UTF8_BYTE_MASK = ( 1 << UTF8_BYTE_BITS ) - 1
UTF8_EXTRA_BITS = 0x80
uint8ArrayFrom = (arr) ->
ret = new Uint8Array arr.length
ret.set arr
return ret
baseLog = (n, base) -> ( Math.log n ) / ( Math.log base )
utf8Encode = (val) ->
countBits = (n) -> 1 + Math.floor baseLog n, 2
bitsAmount = countBits val
if bitsAmount <= ASCII_LEN
[val]
else
if bitsAmount > UTF8_MAX_BIT_AMOUNT
throw new Error(
"UTF-8 doesn't support characters encoded on more than " +
UTF8_MAX_BIT_AMOUNT + "bits"
)
byteAmount = 0
bytes = []
lastByteBitAmount = () -> BYTE_LENGTH - 2 - byteAmount
lastByteExtraBits = () ->
extraBits = (1 << byteAmount) - 1
extraBits <<= 2
extraBits |= 0x2
extraBits <<= lastByteBitAmount()
extraBits
while countBits(val) > lastByteBitAmount()
bytes.unshift ( val & UTF8_BYTE_MASK ) | UTF8_EXTRA_BITS
val >>= UTF8_BYTE_BITS
byteAmount++
bytes.unshift ( val | lastByteExtraBits() )
return bytes
strToUint8Array = (str) ->
# TODO: Replace me by Uint8Array.from in Firefox 38
uint8ArrayFrom str.split('').reduce (accumulator, element) ->
accumulator.concat utf8Encode element.charCodeAt(0)
, []
sha1Uint8Array = (arr) ->
( window.crypto.subtle.digest "SHA-1", arr.buffer ).then (result) ->
new Uint8Array result
arrayToHex = (arr) ->
toHex = (i) -> i.toString(16)
reduce = (arr, callback, initialValue) ->
acc = initialValue
for i in [0..arr.length-1]
acc = callback(acc, arr[i])
return acc
reduce arr, (accumulator, element) ->
accumulator + if element > 16
toHex element
else
"0" + toHex element
, ""
HASHCASH_DEFAULT_VERSION = 1
HASHCASH_DEFAULT_BITS = 20 # This is a default recommended amount of bits
HASHCASH_DEFAULT_RAND_LEN = 16
hashcashDate = (d) ->
toTwoNums = (n) ->
n = n % 100
if n < 10
"0" + n
else
"" + n
[d.getFullYear(), d.getMonth(), d.getHours(), d.getMinutes(),
d.getSeconds()].reduce (accumulator, elem) ->
accumulator + toTwoNums elem
, ""
hashcashToString = (version, bits, date, resource, extension, rand, counter) ->
version ?= HASHCASH_DEFAULT_VERSION
bits ?= HASHCASH_DEFAULT_BITS
date ?= new Date()
date = if typeof date is "string" then date else hashcashDate date
resource ?= ""
extension ?= ""
counter ?= ""
return [version, bits, date, resource, extension, rand, counter].join ':'
intToBase64Char = (i) ->
charOffset = (chr, offset) -> String.fromCharCode chr.charCodeAt(0) + offset
if 0 <= i <= 25
charOffset 'A', i
else if 26 <= i <= 51
charOffset 'a', i - 26
else if 52 <= i <= 61
charOffset '0', i - 52
else if i == 62
'+'
else if i == 63
'/'
else
throw new Error("intToBase64Char is supposed to get an ingeter " +
"between 0 and 63")
randomBase64String = (len) ->
(intToBase64Char 0 | 63 * Math.random() for i in [1..len]).join ''
strToHexDigest = (str) ->
(sha1Uint8Array strToUint8Array str).then (arr) -> arrayToHex arr
toResource = (val) ->
# This is not cryptographically safe
#
# The goal of this function is to have a safe and stable string encoding
# of a resource.
hashKeyValue = (key, value) ->
Promise.all([key, value].map toResource) \
.then (resources) ->
Promise.all resources.map strToHexDigest
.then (digests) ->
strToHexDigest digests.join ''
hashArray = (arr) ->
Promise.all( arr.map toResource ) \
.then (resources) ->
Promise.all( resources.map strToHexDigest )
.then (hashes) ->
hashes.sort()
strToHexDigest hashes.join ''
hashObject = (obj) ->
Promise.all( hashKeyValue key, value for own key, value of obj \
when typeof value isnt "function" ) \
.then (arr) -> hashArray arr
stringToResource = (str) ->
if str.match /:/
strToHexDigest str
else
new Promise (resolve, reject) -> resolve str
numberToResource = (n) -> new Promise (resolve, reject) -> resolve "" + n
arrayToResource = (arr) -> hashArray arr
objectToResource = (obj) -> hashObject obj
if typeof val is "string"
stringToResource val
else if typeof val is "number"
numberToResource val
else if val.isArray
arrayToResource val
else if typeof val is "object"
objectToResource val
else
throw new Error("toResource only supports strings, numbers and objects")
class window.Hashcash
constructor: (str) ->
[@version, @bits, @date, @resource, @extension, @rand, @counter] = \
str.split(':')
@bits ?= HASHCASH_DEFAULT_BITS
@bits = @bits | 0
toString: () ->
hashcashToString @version, @bits, @date, @resource, @extension, @rand, \
@counter
findSolution: () ->
stringOfCounter = (counter) -> ( counter.map intToBase64Char ).join ''
hashcashStringOfCounter = (counter) =>
hashcashToString @version, @bits, @date, @resource, @extension, \
@rand, stringOfCounter counter
isValid = (counter) =>
str = hashcashStringOfCounter counter
( sha1Uint8Array strToUint8Array str ).then (result) =>
( uint8ArrayNullBitCount result ) >= @bits
incrementCounter = (counter) ->
for i, val of counter
counter[i] = (val + 1) % BASE_64
if counter[i] > 0
return counter
counter.push 0
return counter
hashcashFromCounter = (counter) ->
new Hashcash hashcashStringOfCounter counter
new Promise (resolve, reject) ->
counter = [0]
check = (counter) ->
isValid(counter).then (counterIsValid) ->
if counterIsValid
resolve hashcashFromCounter counter
else
check incrementCounter counter
check counter
window.Hashcash.fromResource = (resource) ->
rand = randomBase64String HASHCASH_DEFAULT_RAND_LEN
new Hashcash hashcashToString null, null, null, resource, null, rand, null
headNullBitCount = (n, length) ->
if n == 0
length
else
length - (1 + Math.floor baseLog n, 2)
uint8ArrayNullBitCount = (arr) ->
acc = 0
for i in [0..arr.length-1]
value = arr[i]
acc += headNullBitCount value, BYTE_LENGTH
if value > 0
break
return acc
| true | # (C) 2015-present PI:NAME:<NAME>END_PI <devel at antoine dot catton dot fr>
#
# This file is part of mhc.js.
#
# mhc.js is a free software: you can redistribute it and/or modify it under the
# terms of the MIT License as published by the Open Source Initiative.
#
# mhc.js is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the MIT License for more details.
#
# You should have received a copy of the MIT License along with mhc.js. If not,
# see <http://opensource.org/licenses/MIT>.
BYTE_LENGTH = 8
ASCII_LEN = 7
ASCII_MASK = ( 1 << ASCII_LEN ) - 1
BASE_64 = 64
UTF8_MAX_BIT_AMOUNT = 31
UTF8_BYTE_BITS = 6
UTF8_BYTE_MASK = ( 1 << UTF8_BYTE_BITS ) - 1
UTF8_EXTRA_BITS = 0x80
uint8ArrayFrom = (arr) ->
ret = new Uint8Array arr.length
ret.set arr
return ret
baseLog = (n, base) -> ( Math.log n ) / ( Math.log base )
utf8Encode = (val) ->
countBits = (n) -> 1 + Math.floor baseLog n, 2
bitsAmount = countBits val
if bitsAmount <= ASCII_LEN
[val]
else
if bitsAmount > UTF8_MAX_BIT_AMOUNT
throw new Error(
"UTF-8 doesn't support characters encoded on more than " +
UTF8_MAX_BIT_AMOUNT + "bits"
)
byteAmount = 0
bytes = []
lastByteBitAmount = () -> BYTE_LENGTH - 2 - byteAmount
lastByteExtraBits = () ->
extraBits = (1 << byteAmount) - 1
extraBits <<= 2
extraBits |= 0x2
extraBits <<= lastByteBitAmount()
extraBits
while countBits(val) > lastByteBitAmount()
bytes.unshift ( val & UTF8_BYTE_MASK ) | UTF8_EXTRA_BITS
val >>= UTF8_BYTE_BITS
byteAmount++
bytes.unshift ( val | lastByteExtraBits() )
return bytes
strToUint8Array = (str) ->
# TODO: Replace me by Uint8Array.from in Firefox 38
uint8ArrayFrom str.split('').reduce (accumulator, element) ->
accumulator.concat utf8Encode element.charCodeAt(0)
, []
sha1Uint8Array = (arr) ->
( window.crypto.subtle.digest "SHA-1", arr.buffer ).then (result) ->
new Uint8Array result
arrayToHex = (arr) ->
toHex = (i) -> i.toString(16)
reduce = (arr, callback, initialValue) ->
acc = initialValue
for i in [0..arr.length-1]
acc = callback(acc, arr[i])
return acc
reduce arr, (accumulator, element) ->
accumulator + if element > 16
toHex element
else
"0" + toHex element
, ""
HASHCASH_DEFAULT_VERSION = 1
HASHCASH_DEFAULT_BITS = 20 # This is a default recommended amount of bits
HASHCASH_DEFAULT_RAND_LEN = 16
hashcashDate = (d) ->
toTwoNums = (n) ->
n = n % 100
if n < 10
"0" + n
else
"" + n
[d.getFullYear(), d.getMonth(), d.getHours(), d.getMinutes(),
d.getSeconds()].reduce (accumulator, elem) ->
accumulator + toTwoNums elem
, ""
hashcashToString = (version, bits, date, resource, extension, rand, counter) ->
version ?= HASHCASH_DEFAULT_VERSION
bits ?= HASHCASH_DEFAULT_BITS
date ?= new Date()
date = if typeof date is "string" then date else hashcashDate date
resource ?= ""
extension ?= ""
counter ?= ""
return [version, bits, date, resource, extension, rand, counter].join ':'
intToBase64Char = (i) ->
charOffset = (chr, offset) -> String.fromCharCode chr.charCodeAt(0) + offset
if 0 <= i <= 25
charOffset 'A', i
else if 26 <= i <= 51
charOffset 'a', i - 26
else if 52 <= i <= 61
charOffset '0', i - 52
else if i == 62
'+'
else if i == 63
'/'
else
throw new Error("intToBase64Char is supposed to get an ingeter " +
"between 0 and 63")
randomBase64String = (len) ->
(intToBase64Char 0 | 63 * Math.random() for i in [1..len]).join ''
strToHexDigest = (str) ->
(sha1Uint8Array strToUint8Array str).then (arr) -> arrayToHex arr
toResource = (val) ->
# This is not cryptographically safe
#
# The goal of this function is to have a safe and stable string encoding
# of a resource.
hashKeyValue = (key, value) ->
Promise.all([key, value].map toResource) \
.then (resources) ->
Promise.all resources.map strToHexDigest
.then (digests) ->
strToHexDigest digests.join ''
hashArray = (arr) ->
Promise.all( arr.map toResource ) \
.then (resources) ->
Promise.all( resources.map strToHexDigest )
.then (hashes) ->
hashes.sort()
strToHexDigest hashes.join ''
hashObject = (obj) ->
Promise.all( hashKeyValue key, value for own key, value of obj \
when typeof value isnt "function" ) \
.then (arr) -> hashArray arr
stringToResource = (str) ->
if str.match /:/
strToHexDigest str
else
new Promise (resolve, reject) -> resolve str
numberToResource = (n) -> new Promise (resolve, reject) -> resolve "" + n
arrayToResource = (arr) -> hashArray arr
objectToResource = (obj) -> hashObject obj
if typeof val is "string"
stringToResource val
else if typeof val is "number"
numberToResource val
else if val.isArray
arrayToResource val
else if typeof val is "object"
objectToResource val
else
throw new Error("toResource only supports strings, numbers and objects")
class window.Hashcash
constructor: (str) ->
[@version, @bits, @date, @resource, @extension, @rand, @counter] = \
str.split(':')
@bits ?= HASHCASH_DEFAULT_BITS
@bits = @bits | 0
toString: () ->
hashcashToString @version, @bits, @date, @resource, @extension, @rand, \
@counter
findSolution: () ->
stringOfCounter = (counter) -> ( counter.map intToBase64Char ).join ''
hashcashStringOfCounter = (counter) =>
hashcashToString @version, @bits, @date, @resource, @extension, \
@rand, stringOfCounter counter
isValid = (counter) =>
str = hashcashStringOfCounter counter
( sha1Uint8Array strToUint8Array str ).then (result) =>
( uint8ArrayNullBitCount result ) >= @bits
incrementCounter = (counter) ->
for i, val of counter
counter[i] = (val + 1) % BASE_64
if counter[i] > 0
return counter
counter.push 0
return counter
hashcashFromCounter = (counter) ->
new Hashcash hashcashStringOfCounter counter
new Promise (resolve, reject) ->
counter = [0]
check = (counter) ->
isValid(counter).then (counterIsValid) ->
if counterIsValid
resolve hashcashFromCounter counter
else
check incrementCounter counter
check counter
window.Hashcash.fromResource = (resource) ->
rand = randomBase64String HASHCASH_DEFAULT_RAND_LEN
new Hashcash hashcashToString null, null, null, resource, null, rand, null
headNullBitCount = (n, length) ->
if n == 0
length
else
length - (1 + Math.floor baseLog n, 2)
uint8ArrayNullBitCount = (arr) ->
acc = 0
for i in [0..arr.length-1]
value = arr[i]
acc += headNullBitCount value, BYTE_LENGTH
if value > 0
break
return acc
|
[
{
"context": "# Copyright (c) 2014 sobataro <sobatarooo@gmail.com>\n# Released under the MIT l",
"end": 29,
"score": 0.9272405505180359,
"start": 21,
"tag": "USERNAME",
"value": "sobataro"
},
{
"context": "# Copyright (c) 2014 sobataro <sobatarooo@gmail.com>\n# Released under the MIT... | src/pull_request_tuple.coffee | sobataro/robotaro | 2 | # Copyright (c) 2014 sobataro <sobatarooo@gmail.com>
# Released under the MIT license
# http://opensource.org/licenses/mit-license.php
Q = require("q")
config = require("config")
class PullRequestTuple
constructor: (issue, githubot) ->
@issue = issue
@_reposMustWatchStatuses = config.get("github.reposMustWatchStatuses")
@_githubot = githubot
@_lastComment = null
@_lastCommentDate = new Date(0)
@_lgtmCount = 0
@_lastStatus = null
fetch: () ->
d = Q.defer()
Q.when()
.then () => @_fetchComment()
.then () => @_fetchPullRequest()
.then () => @_fetchStatuses()
.done (tuple) ->
process.stdout.write(".")
d.resolve tuple
d.promise
_fetchComment: () ->
d = Q.defer()
if @issue.comments > 0
@_githubot.get @issue.comments_url, (comments) =>
@comments = comments
d.resolve this
else
@comments = []
d.resolve this
d.promise
_fetchPullRequest: () ->
d = Q.defer()
@_githubot.get @issue.pull_request.url, (pullRequest) =>
@pullRequest = pullRequest
d.resolve this
d.promise
_fetchStatuses: () ->
d = Q.defer()
if @_mustWatchStatuses()
@_githubot.get @pullRequest.statuses_url, (statuses) =>
@statuses = statuses
d.resolve this
else
d.resolve this
d.promise
makeIndicators: () ->
for comment in @comments
date = new Date(comment.created_at)
if comment.body? and @_lastCommentDate <= date
@_lastComment = comment
@_lastCommentDate = date
if @_isLGTM comment
@_lgtmCount = @_lgtmCount + 1
if @_mustWatchStatuses()
lastStatusUpdated = new Date(0)
for status in @statuses
date = new Date(status.updated_at)
if lastStatusUpdated < date
lastStatusUpdated = date
@_lastStatus = status
_mustWatchStatuses: () ->
array = @issue.url.split("/")
repo = array[array.length - 3] # https://api.github.com/repos/[org-name]/[repo-name]/issues/[number]
@_reposMustWatchStatuses.indexOf(repo) >= 0
_getLastCommenter: () ->
@_lastComment?.user.login
_getPullRequestAuthor: () ->
@pullRequest?.user.login
_isLastStatusSuccessOrPending: () ->
return true if not @_mustWatchStatuses()
state = @_lastStatus?.state
state == "success" or state == "pending"
# if PR is labeled as "wip" or "do not merge", it is ok to be ignored...
_isWIP: () ->
for label in @issue.labels
l = label.name.toLowerCase().replace(/\ /g, "")
return true if l == "wip" or l == "donotmerge" or l == "dontmerge"
return @pullRequest.title.toLowerCase().search(/wip/) > -1
_isLGTM: (comment) ->
comment?.body?.toLowerCase().replace(/(.)\1{2,}/gi, "$1").replace(/\s/g, "")
.search(/lgtm|loksgodtome|lgtm/) > -1
_isOld: (date) ->
return true if not date?
now = new Date()
now - date >= config.get("waitAndSeeMinutes") * 60 * 1000
# check if this PR has ignored and not commented
isNotCommented: () ->
@comments?.length == 0 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has two or more LGTM and last comment is LGTM, but is not merged
isUnmergedLGTM: () ->
@_lgtmCount >= 2 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has any comments, but seems to be ignored
isHalfwayReviewedAndForgotten: () ->
@comments?.length > 0 and @_isOld(@_lastCommentDate) and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR is conflicting or not
isConflicting: () ->
@pullRequest.mergeable == false and not @_isWIP()
# text formatter
makeReplyToPRCreaterCommenterText: () ->
lastCommenter = @_getLastCommenter()
prAuthor = @_getPullRequestAuthor()
return "@channel" if lastCommenter == prAuthor
return "@#{lastCommenter} @#{prAuthor}"
makeReplyToPRCreaterText: () ->
"@#{@_getPullRequestAuthor()}"
makeDescriptionText: () ->
"#{@pullRequest.title} (#{@pullRequest.html_url})"
module.exports = PullRequestTuple
| 91152 | # Copyright (c) 2014 sobataro <<EMAIL>>
# Released under the MIT license
# http://opensource.org/licenses/mit-license.php
Q = require("q")
config = require("config")
class PullRequestTuple
constructor: (issue, githubot) ->
@issue = issue
@_reposMustWatchStatuses = config.get("github.reposMustWatchStatuses")
@_githubot = githubot
@_lastComment = null
@_lastCommentDate = new Date(0)
@_lgtmCount = 0
@_lastStatus = null
fetch: () ->
d = Q.defer()
Q.when()
.then () => @_fetchComment()
.then () => @_fetchPullRequest()
.then () => @_fetchStatuses()
.done (tuple) ->
process.stdout.write(".")
d.resolve tuple
d.promise
_fetchComment: () ->
d = Q.defer()
if @issue.comments > 0
@_githubot.get @issue.comments_url, (comments) =>
@comments = comments
d.resolve this
else
@comments = []
d.resolve this
d.promise
_fetchPullRequest: () ->
d = Q.defer()
@_githubot.get @issue.pull_request.url, (pullRequest) =>
@pullRequest = pullRequest
d.resolve this
d.promise
_fetchStatuses: () ->
d = Q.defer()
if @_mustWatchStatuses()
@_githubot.get @pullRequest.statuses_url, (statuses) =>
@statuses = statuses
d.resolve this
else
d.resolve this
d.promise
makeIndicators: () ->
for comment in @comments
date = new Date(comment.created_at)
if comment.body? and @_lastCommentDate <= date
@_lastComment = comment
@_lastCommentDate = date
if @_isLGTM comment
@_lgtmCount = @_lgtmCount + 1
if @_mustWatchStatuses()
lastStatusUpdated = new Date(0)
for status in @statuses
date = new Date(status.updated_at)
if lastStatusUpdated < date
lastStatusUpdated = date
@_lastStatus = status
_mustWatchStatuses: () ->
array = @issue.url.split("/")
repo = array[array.length - 3] # https://api.github.com/repos/[org-name]/[repo-name]/issues/[number]
@_reposMustWatchStatuses.indexOf(repo) >= 0
_getLastCommenter: () ->
@_lastComment?.user.login
_getPullRequestAuthor: () ->
@pullRequest?.user.login
_isLastStatusSuccessOrPending: () ->
return true if not @_mustWatchStatuses()
state = @_lastStatus?.state
state == "success" or state == "pending"
# if PR is labeled as "wip" or "do not merge", it is ok to be ignored...
_isWIP: () ->
for label in @issue.labels
l = label.name.toLowerCase().replace(/\ /g, "")
return true if l == "wip" or l == "donotmerge" or l == "dontmerge"
return @pullRequest.title.toLowerCase().search(/wip/) > -1
_isLGTM: (comment) ->
comment?.body?.toLowerCase().replace(/(.)\1{2,}/gi, "$1").replace(/\s/g, "")
.search(/lgtm|loksgodtome|lgtm/) > -1
_isOld: (date) ->
return true if not date?
now = new Date()
now - date >= config.get("waitAndSeeMinutes") * 60 * 1000
# check if this PR has ignored and not commented
isNotCommented: () ->
@comments?.length == 0 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has two or more LGTM and last comment is LGTM, but is not merged
isUnmergedLGTM: () ->
@_lgtmCount >= 2 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has any comments, but seems to be ignored
isHalfwayReviewedAndForgotten: () ->
@comments?.length > 0 and @_isOld(@_lastCommentDate) and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR is conflicting or not
isConflicting: () ->
@pullRequest.mergeable == false and not @_isWIP()
# text formatter
makeReplyToPRCreaterCommenterText: () ->
lastCommenter = @_getLastCommenter()
prAuthor = @_getPullRequestAuthor()
return "@channel" if lastCommenter == prAuthor
return "@#{lastCommenter} @#{prAuthor}"
makeReplyToPRCreaterText: () ->
"@#{@_getPullRequestAuthor()}"
makeDescriptionText: () ->
"#{@pullRequest.title} (#{@pullRequest.html_url})"
module.exports = PullRequestTuple
| true | # Copyright (c) 2014 sobataro <PI:EMAIL:<EMAIL>END_PI>
# Released under the MIT license
# http://opensource.org/licenses/mit-license.php
Q = require("q")
config = require("config")
class PullRequestTuple
constructor: (issue, githubot) ->
@issue = issue
@_reposMustWatchStatuses = config.get("github.reposMustWatchStatuses")
@_githubot = githubot
@_lastComment = null
@_lastCommentDate = new Date(0)
@_lgtmCount = 0
@_lastStatus = null
fetch: () ->
d = Q.defer()
Q.when()
.then () => @_fetchComment()
.then () => @_fetchPullRequest()
.then () => @_fetchStatuses()
.done (tuple) ->
process.stdout.write(".")
d.resolve tuple
d.promise
_fetchComment: () ->
d = Q.defer()
if @issue.comments > 0
@_githubot.get @issue.comments_url, (comments) =>
@comments = comments
d.resolve this
else
@comments = []
d.resolve this
d.promise
_fetchPullRequest: () ->
d = Q.defer()
@_githubot.get @issue.pull_request.url, (pullRequest) =>
@pullRequest = pullRequest
d.resolve this
d.promise
_fetchStatuses: () ->
d = Q.defer()
if @_mustWatchStatuses()
@_githubot.get @pullRequest.statuses_url, (statuses) =>
@statuses = statuses
d.resolve this
else
d.resolve this
d.promise
makeIndicators: () ->
for comment in @comments
date = new Date(comment.created_at)
if comment.body? and @_lastCommentDate <= date
@_lastComment = comment
@_lastCommentDate = date
if @_isLGTM comment
@_lgtmCount = @_lgtmCount + 1
if @_mustWatchStatuses()
lastStatusUpdated = new Date(0)
for status in @statuses
date = new Date(status.updated_at)
if lastStatusUpdated < date
lastStatusUpdated = date
@_lastStatus = status
_mustWatchStatuses: () ->
array = @issue.url.split("/")
repo = array[array.length - 3] # https://api.github.com/repos/[org-name]/[repo-name]/issues/[number]
@_reposMustWatchStatuses.indexOf(repo) >= 0
_getLastCommenter: () ->
@_lastComment?.user.login
_getPullRequestAuthor: () ->
@pullRequest?.user.login
_isLastStatusSuccessOrPending: () ->
return true if not @_mustWatchStatuses()
state = @_lastStatus?.state
state == "success" or state == "pending"
# if PR is labeled as "wip" or "do not merge", it is ok to be ignored...
_isWIP: () ->
for label in @issue.labels
l = label.name.toLowerCase().replace(/\ /g, "")
return true if l == "wip" or l == "donotmerge" or l == "dontmerge"
return @pullRequest.title.toLowerCase().search(/wip/) > -1
_isLGTM: (comment) ->
comment?.body?.toLowerCase().replace(/(.)\1{2,}/gi, "$1").replace(/\s/g, "")
.search(/lgtm|loksgodtome|lgtm/) > -1
_isOld: (date) ->
return true if not date?
now = new Date()
now - date >= config.get("waitAndSeeMinutes") * 60 * 1000
# check if this PR has ignored and not commented
isNotCommented: () ->
@comments?.length == 0 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has two or more LGTM and last comment is LGTM, but is not merged
isUnmergedLGTM: () ->
@_lgtmCount >= 2 and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR has any comments, but seems to be ignored
isHalfwayReviewedAndForgotten: () ->
@comments?.length > 0 and @_isOld(@_lastCommentDate) and not @_isWIP() and @_isLastStatusSuccessOrPending()
# check if this PR is conflicting or not
isConflicting: () ->
@pullRequest.mergeable == false and not @_isWIP()
# text formatter
makeReplyToPRCreaterCommenterText: () ->
lastCommenter = @_getLastCommenter()
prAuthor = @_getPullRequestAuthor()
return "@channel" if lastCommenter == prAuthor
return "@#{lastCommenter} @#{prAuthor}"
makeReplyToPRCreaterText: () ->
"@#{@_getPullRequestAuthor()}"
makeDescriptionText: () ->
"#{@pullRequest.title} (#{@pullRequest.html_url})"
module.exports = PullRequestTuple
|
[
{
"context": "###\n# models.coffee\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# This module pr",
"end": 42,
"score": 0.9997896552085876,
"start": 31,
"tag": "NAME",
"value": "Dan Nichols"
}
] | lib/models.coffee | dlnichols/h_media | 0 | ###
# models.coffee
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# This module preloads our Mongoose models, and allows exporting them to an
# object. This saves us having to require every model individually.
###
'use strict'
# External libs
_s = require 'underscore.string'
fs = require 'fs'
path = require 'path'
debug = require('debug') 'hMedia:models'
modelsPath = path.join __dirname, 'models'
###
# requireWithContext
#
# Take a filename, convert it to a string useable by require, and require the
# module. If given a context, export each model to a property on the context
# object.
###
requireModel = (moduleName, context) ->
return unless moduleName?
return unless /^(.*)\.(js|coffee)$/.test moduleName
return unless fs.existsSync moduleName
exportName = _s.capitalize(path.basename(moduleName).split('.')[0])
debug 'Loading ' + exportName + ' from ' + moduleName + '...'
module = require moduleName
context[exportName] = module
###
# Load our models
###
module.exports = (context) ->
models = context or {}
debug 'Loading models...'
files = fs.readdirSync modelsPath
requireModel path.join(modelsPath, file), models for file in files
models
| 13391 | ###
# models.coffee
#
# © 2014 <NAME>
# See LICENSE for more details
#
# This module preloads our Mongoose models, and allows exporting them to an
# object. This saves us having to require every model individually.
###
'use strict'
# External libs
_s = require 'underscore.string'
fs = require 'fs'
path = require 'path'
debug = require('debug') 'hMedia:models'
modelsPath = path.join __dirname, 'models'
###
# requireWithContext
#
# Take a filename, convert it to a string useable by require, and require the
# module. If given a context, export each model to a property on the context
# object.
###
requireModel = (moduleName, context) ->
return unless moduleName?
return unless /^(.*)\.(js|coffee)$/.test moduleName
return unless fs.existsSync moduleName
exportName = _s.capitalize(path.basename(moduleName).split('.')[0])
debug 'Loading ' + exportName + ' from ' + moduleName + '...'
module = require moduleName
context[exportName] = module
###
# Load our models
###
module.exports = (context) ->
models = context or {}
debug 'Loading models...'
files = fs.readdirSync modelsPath
requireModel path.join(modelsPath, file), models for file in files
models
| true | ###
# models.coffee
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# This module preloads our Mongoose models, and allows exporting them to an
# object. This saves us having to require every model individually.
###
'use strict'
# External libs
_s = require 'underscore.string'
fs = require 'fs'
path = require 'path'
debug = require('debug') 'hMedia:models'
modelsPath = path.join __dirname, 'models'
###
# requireWithContext
#
# Take a filename, convert it to a string useable by require, and require the
# module. If given a context, export each model to a property on the context
# object.
###
requireModel = (moduleName, context) ->
return unless moduleName?
return unless /^(.*)\.(js|coffee)$/.test moduleName
return unless fs.existsSync moduleName
exportName = _s.capitalize(path.basename(moduleName).split('.')[0])
debug 'Loading ' + exportName + ' from ' + moduleName + '...'
module = require moduleName
context[exportName] = module
###
# Load our models
###
module.exports = (context) ->
models = context or {}
debug 'Loading models...'
files = fs.readdirSync modelsPath
requireModel path.join(modelsPath, file), models for file in files
models
|
[
{
"context": "sion 1.0.0\n@file Render.js\n@author Welington Sampaio (http://welington.zaez.net/)\n@contact http://",
"end": 139,
"score": 0.9998940229415894,
"start": 122,
"tag": "NAME",
"value": "Welington Sampaio"
}
] | vendor/assets/javascripts/joker/Render.coffee | zaeznet/joker-rails | 0 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file Render.js
@author Welington Sampaio (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.Render extends Joker.Core
@debugPrefix: "Joker_Render"
@className : "Joker_Render"
defaultMethod : undefined
###
Constructor method
###
constructor: ->
super
@debug "Inicializando o Render"
@setDefaults()
@setEvents()
executeScript: (scripts)->
if Object.isArray(scripts)
for script in scripts
eval script
else if Object.isString(scripts)
eval script
else
throw 'Formato enviado não é vealido. Deve ser Array ou String.'
separateScript: (val) ->
regex = /<script\b[^>]*>([\s\S]*?)<\/script>/gmi
result = while scripts = regex.exec val
scripts
result2 = for script in result
val = val.replace script[0], ''
script[1]
[
val,
result2
]
formatTitle: (title)->
title = "#{title} | #{Joker.appName}" if Joker.appName?
title
###
Event triggered when a link to "jrender" is triggered
@param Event
@returns Boolean false
###
linkClickRender: (e)->
@debug "Evento de clique disparados para o elemento: ", e.currentTarget
el = e.currentTarget
if Object.isString(el.dataset.jrender) and el.dataset.jrender != "true"
push = false
target = @libSupport("[data-yield-for=#{el.dataset.jrender}]")
else
push = true
target = @getRenderContainer()
title = if el.dataset.jrenderTitle? then el.dataset.jrenderTitle else document.title
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.getAttribute('href')}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
var newData, target, jwindow;
newData = _this.separateScript(data)
target = _this.libSupport("[data-yield-for=#{el.dataset.jrender}]");
target.empty().html(newData[0]);
_this.executeScript(newData[1]);
jwindow = JokerUtils.getObject(target.closest('.jwindow').attr('id'));
setTimeout(function(){
jwindow.setCenter();
jwindow.setScroll();
}, 10);
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.getAttribute('href')}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(title)
url : el.getAttribute('href')
, false)
false
###
Event triggered when a link to "jwindow" is triggered
@param Event
@returns Boolean false
###
linkClickWindow: (e)->
el = @libSupport e.currentTarget
callback = ""
if el.data "jrender-callback"
callback = ", callbacks: { onCreate: function(w){eval('#{el.data "jrender-callback"}(w)') } }"
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.attr 'href'}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
new Joker.Window({
content: data,
title: "#{el.data "jrender-title"}"#{callback}
});
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.attr 'href'}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(el.data "jrender-title")
url : el.attr('href')
, false)
false
###
Metodo que faz o laod do conteudo html
@param Object obj
@param Boolean add_push
###
load: (obj, add_push=true)->
return undefined unless obj?
eval obj.script
@pushState obj, obj.title, obj.url if add_push
###
Retorna o container default
###
getRenderContainer: ->
@libSupport "[data-yield]"
pushState: (obj)->
history.pushState obj, obj.title, obj.url
renderTo: (target, url)->
new Joker.Ajax
url: url
data: "format=joker"
async: false
callbacks:
success: (data, textStatus, jqXHR)=>
@libSupport("[data-yield-for=#{target}]").empty().html(data)
error: ( jqXHR, textStatus )=>
if (jqXHR.status != 403)
new Joker.Alert
message: "Ocorreu um erro ao solicitar a pagina: #{url}"
type: Joker.Alert.TYPE_ERROR
###
Sets the values of the standard rendering engine
###
setDefaults: ->
@debug "Definindo as configuracoes padroes"
@defaultMethod = "GET"
###
Sets all events from the elements
###
setEvents: ->
@unsetEvents()
@debug "Setando os eventos"
@libSupport(document).on('click.render', '[data-jrender]', @libSupport.proxy(@linkClickRender,@))
@libSupport(document).on('click.render', '[data-jwindow]', @libSupport.proxy(@linkClickWindow,@))
window.onpopstate = (config)=> @load config.state, false
###
Removes all events from the elements with
namespace .render
###
unsetEvents: ->
@debug "Removendo os eventos"
@libSupport(document).off '.render'
###
@type [Joker.Render]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.Render]
###
@getInstance: ->
Joker.Render.instance = new Joker.Render() unless Joker.Render.instance?
Joker.Render.instance | 105652 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file Render.js
@author <NAME> (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.Render extends Joker.Core
@debugPrefix: "Joker_Render"
@className : "Joker_Render"
defaultMethod : undefined
###
Constructor method
###
constructor: ->
super
@debug "Inicializando o Render"
@setDefaults()
@setEvents()
executeScript: (scripts)->
if Object.isArray(scripts)
for script in scripts
eval script
else if Object.isString(scripts)
eval script
else
throw 'Formato enviado não é vealido. Deve ser Array ou String.'
separateScript: (val) ->
regex = /<script\b[^>]*>([\s\S]*?)<\/script>/gmi
result = while scripts = regex.exec val
scripts
result2 = for script in result
val = val.replace script[0], ''
script[1]
[
val,
result2
]
formatTitle: (title)->
title = "#{title} | #{Joker.appName}" if Joker.appName?
title
###
Event triggered when a link to "jrender" is triggered
@param Event
@returns Boolean false
###
linkClickRender: (e)->
@debug "Evento de clique disparados para o elemento: ", e.currentTarget
el = e.currentTarget
if Object.isString(el.dataset.jrender) and el.dataset.jrender != "true"
push = false
target = @libSupport("[data-yield-for=#{el.dataset.jrender}]")
else
push = true
target = @getRenderContainer()
title = if el.dataset.jrenderTitle? then el.dataset.jrenderTitle else document.title
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.getAttribute('href')}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
var newData, target, jwindow;
newData = _this.separateScript(data)
target = _this.libSupport("[data-yield-for=#{el.dataset.jrender}]");
target.empty().html(newData[0]);
_this.executeScript(newData[1]);
jwindow = JokerUtils.getObject(target.closest('.jwindow').attr('id'));
setTimeout(function(){
jwindow.setCenter();
jwindow.setScroll();
}, 10);
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.getAttribute('href')}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(title)
url : el.getAttribute('href')
, false)
false
###
Event triggered when a link to "jwindow" is triggered
@param Event
@returns Boolean false
###
linkClickWindow: (e)->
el = @libSupport e.currentTarget
callback = ""
if el.data "jrender-callback"
callback = ", callbacks: { onCreate: function(w){eval('#{el.data "jrender-callback"}(w)') } }"
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.attr 'href'}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
new Joker.Window({
content: data,
title: "#{el.data "jrender-title"}"#{callback}
});
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.attr 'href'}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(el.data "jrender-title")
url : el.attr('href')
, false)
false
###
Metodo que faz o laod do conteudo html
@param Object obj
@param Boolean add_push
###
load: (obj, add_push=true)->
return undefined unless obj?
eval obj.script
@pushState obj, obj.title, obj.url if add_push
###
Retorna o container default
###
getRenderContainer: ->
@libSupport "[data-yield]"
pushState: (obj)->
history.pushState obj, obj.title, obj.url
renderTo: (target, url)->
new Joker.Ajax
url: url
data: "format=joker"
async: false
callbacks:
success: (data, textStatus, jqXHR)=>
@libSupport("[data-yield-for=#{target}]").empty().html(data)
error: ( jqXHR, textStatus )=>
if (jqXHR.status != 403)
new Joker.Alert
message: "Ocorreu um erro ao solicitar a pagina: #{url}"
type: Joker.Alert.TYPE_ERROR
###
Sets the values of the standard rendering engine
###
setDefaults: ->
@debug "Definindo as configuracoes padroes"
@defaultMethod = "GET"
###
Sets all events from the elements
###
setEvents: ->
@unsetEvents()
@debug "Setando os eventos"
@libSupport(document).on('click.render', '[data-jrender]', @libSupport.proxy(@linkClickRender,@))
@libSupport(document).on('click.render', '[data-jwindow]', @libSupport.proxy(@linkClickWindow,@))
window.onpopstate = (config)=> @load config.state, false
###
Removes all events from the elements with
namespace .render
###
unsetEvents: ->
@debug "Removendo os eventos"
@libSupport(document).off '.render'
###
@type [Joker.Render]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.Render]
###
@getInstance: ->
Joker.Render.instance = new Joker.Render() unless Joker.Render.instance?
Joker.Render.instance | true | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file Render.js
@author PI:NAME:<NAME>END_PI (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.Render extends Joker.Core
@debugPrefix: "Joker_Render"
@className : "Joker_Render"
defaultMethod : undefined
###
Constructor method
###
constructor: ->
super
@debug "Inicializando o Render"
@setDefaults()
@setEvents()
executeScript: (scripts)->
if Object.isArray(scripts)
for script in scripts
eval script
else if Object.isString(scripts)
eval script
else
throw 'Formato enviado não é vealido. Deve ser Array ou String.'
separateScript: (val) ->
regex = /<script\b[^>]*>([\s\S]*?)<\/script>/gmi
result = while scripts = regex.exec val
scripts
result2 = for script in result
val = val.replace script[0], ''
script[1]
[
val,
result2
]
formatTitle: (title)->
title = "#{title} | #{Joker.appName}" if Joker.appName?
title
###
Event triggered when a link to "jrender" is triggered
@param Event
@returns Boolean false
###
linkClickRender: (e)->
@debug "Evento de clique disparados para o elemento: ", e.currentTarget
el = e.currentTarget
if Object.isString(el.dataset.jrender) and el.dataset.jrender != "true"
push = false
target = @libSupport("[data-yield-for=#{el.dataset.jrender}]")
else
push = true
target = @getRenderContainer()
title = if el.dataset.jrenderTitle? then el.dataset.jrenderTitle else document.title
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.getAttribute('href')}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
var newData, target, jwindow;
newData = _this.separateScript(data)
target = _this.libSupport("[data-yield-for=#{el.dataset.jrender}]");
target.empty().html(newData[0]);
_this.executeScript(newData[1]);
jwindow = JokerUtils.getObject(target.closest('.jwindow').attr('id'));
setTimeout(function(){
jwindow.setCenter();
jwindow.setScroll();
}, 10);
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.getAttribute('href')}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(title)
url : el.getAttribute('href')
, false)
false
###
Event triggered when a link to "jwindow" is triggered
@param Event
@returns Boolean false
###
linkClickWindow: (e)->
el = @libSupport e.currentTarget
callback = ""
if el.data "jrender-callback"
callback = ", callbacks: { onCreate: function(w){eval('#{el.data "jrender-callback"}(w)') } }"
@load(
script: """
_this = this;
xhr = new Joker.Ajax({
url: "#{el.attr 'href'}",
data: "format=joker",
async: false,
callbacks: {
success: function (data, textStatus, jqXHR) {
new Joker.Window({
content: data,
title: "#{el.data "jrender-title"}"#{callback}
});
},
error: function ( jqXHR, textStatus ) {
add_push = false;
if (jqXHR.status != 403) {
new Joker.Alert({
message: "Ocorreu um erro ao solicitar a pagina: #{el.attr 'href'}",
type: Joker.Alert.TYPE_ERROR
});
}
}
}
});
"""
title: @formatTitle(el.data "jrender-title")
url : el.attr('href')
, false)
false
###
Metodo que faz o laod do conteudo html
@param Object obj
@param Boolean add_push
###
load: (obj, add_push=true)->
return undefined unless obj?
eval obj.script
@pushState obj, obj.title, obj.url if add_push
###
Retorna o container default
###
getRenderContainer: ->
@libSupport "[data-yield]"
pushState: (obj)->
history.pushState obj, obj.title, obj.url
renderTo: (target, url)->
new Joker.Ajax
url: url
data: "format=joker"
async: false
callbacks:
success: (data, textStatus, jqXHR)=>
@libSupport("[data-yield-for=#{target}]").empty().html(data)
error: ( jqXHR, textStatus )=>
if (jqXHR.status != 403)
new Joker.Alert
message: "Ocorreu um erro ao solicitar a pagina: #{url}"
type: Joker.Alert.TYPE_ERROR
###
Sets the values of the standard rendering engine
###
setDefaults: ->
@debug "Definindo as configuracoes padroes"
@defaultMethod = "GET"
###
Sets all events from the elements
###
setEvents: ->
@unsetEvents()
@debug "Setando os eventos"
@libSupport(document).on('click.render', '[data-jrender]', @libSupport.proxy(@linkClickRender,@))
@libSupport(document).on('click.render', '[data-jwindow]', @libSupport.proxy(@linkClickWindow,@))
window.onpopstate = (config)=> @load config.state, false
###
Removes all events from the elements with
namespace .render
###
unsetEvents: ->
@debug "Removendo os eventos"
@libSupport(document).off '.render'
###
@type [Joker.Render]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.Render]
###
@getInstance: ->
Joker.Render.instance = new Joker.Render() unless Joker.Render.instance?
Joker.Render.instance |
[
{
"context": "ted: now\n\t\tmodified: now\n\t}\n\tif name\n\t\tuser.name = name\n\tuser.services = {weixin: {openid: []}}\n\tif openi",
"end": 381,
"score": 0.6561440229415894,
"start": 377,
"tag": "USERNAME",
"value": "name"
},
{
"context": "Id\n\nWXMini.newSpaceUser = (userId, spac... | creator/packages/steedos-weixin/lib/wx_mini.coffee | yicone/steedos-platform | 42 | request = Npm.require("request")
crypto = Npm.require('crypto')
@WXMini = {}
WXMini.newUser = (appId, openid, unionid, name, locale, phoneNumber)->
now = new Date
user_id = Creator.getCollection("users")._makeNewID()
user = {
_id: user_id
steedos_id: phoneNumber || user_id
locale: locale
is_deleted: false
created: now
modified: now
}
if name
user.name = name
user.services = {weixin: {openid: []}}
if openid
user.services.weixin.openid.push {_id: openid, appid: appId}
if unionid
user.services.weixin.unionid = unionid
userId = Creator.getCollection("users").direct.insert(user)
return userId
WXMini.newSpace = (userId, spaceName)->
now = new Date
space_id = Creator.getCollection("spaces")._makeNewID()
space = {
_id: space_id
space: space_id
name: spaceName
owner: userId
admins: [userId]
is_deleted: false
created: now
created_by: userId
modified: now
modified_by: userId
}
Creator.getCollection("spaces").direct.insert(space)
return space_id
WXMini.newOrganization = (userId, spaceId, orgName)->
now = new Date
org = {
space: spaceId
name: orgName
fullname: orgName
users: [userId]
created: now
created_by: userId
modified: now
modified_by: userId
}
orgId = Creator.getCollection("organizations").direct.insert(org)
return orgId
WXMini.newSpaceUser = (userId, spaceId, orgId, userName, profile, mobile)->
now = new Date
spaceUser = {
user: userId
space: spaceId
profile: profile
mobile: mobile
organization: orgId
organizations: [orgId]
user_accepted: true
name: userName
created: now
created_by: userId
modified: now
modified_by: userId
}
spaceUserId = Creator.getCollection("space_users").direct.insert(spaceUser)
return spaceUserId
WXMini.addUserToSpace = (userId, spaceId, userName, profile)->
console.log("addUserToSpace", userId, spaceId, userName)
space = Creator.getCollection("spaces").findOne({_id: spaceId})
if space
#将用户添加到space的根部门下
root_org = Creator.getCollection("organizations").findOne({space: space._id, parent: null}, {fields: {_id: 1}})
if root_org
Creator.getCollection("organizations").direct.update({_id: root_org._id}, {$push: {users: userId}})
# 新增一条space_user
WXMini.newSpaceUser(userId, spaceId, root_org._id, userName, profile)
else
throw new Meteor.Error(500, "工作区#{spaceId},未找到根部门")
else
throw new Meteor.Error(500, "无效的space: #{spaceId}")
WXMini.updateUser = (userId, options)->
if options.$set.mobile
options.$set.phone = {number: "+86" + options.$set.mobile, mobile: options.$set.mobile, verified:true, modified:new Date()}
# 同步头像avatar/profile.avatar字段值到头像URLavatarUrl
profileAvatar = options.$set.profile?.avatar or options.$set["profile.avatar"]
if options.$set.avatar
options.$set.avatarUrl = "/api/files/avatars/" + options.$set.avatar
else if profileAvatar
user = Creator.getCollection("users").findOne({_id: userId}, fields: {avatarUrl: 1})
unless user.avatarUrl
options.$set.avatarUrl = profileAvatar
Creator.getCollection("users").direct.update({_id: userId}, options)
if Creator.getCollection("vip_customers")
c_options = {$set: {}}
if options.$set.mobile
c_options.$set.mobile = options.$set.mobile
if options.$set.name
c_options.$set.name = options.$set.name
if !_.isEmpty(c_options.$set)
Creator.getCollection("vip_customers").update({owner: userId}, c_options, {multi: true})
# 微信相关接口 #
# 获取access_token
WXMini.getNewAccessTokenSync = (appId, secret) ->
accessToken = ""
resData = Meteor.wrapAsync((appId, secret, cb) ->
request.get {
url:"https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=#{appId}&secret=#{secret}"
}, (err, httpResponse, body) ->
cb err, httpResponse, body
if err
console.error('get access_token failed:', err)
return
if httpResponse.statusCode == 200
return body
)(appId, secret)
if resData
body = JSON.parse(resData.body)
if body
if body.access_token
accessToken = body.access_token
else if body.errcode
console.error body.errmsg
return accessToken
# 发送模板消息
WXMini.sendTemplateMessage = (appId, data) ->
accessToken = Creator.getCollection("vip_apps").findOne(appId)?.access_token
if not accessToken
console.error 'Access_token not found'
return
options = {
data: data
}
url = "https://api.weixin.qq.com/cgi-bin/message/wxopen/template/send?access_token=#{accessToken}"
HTTP.call 'POST', url, options, (error, result) ->
if error
console.error error.stack
return
if (result && result.data && result.data.errcode)
console.error result.data
return
return
WXMini.getTempToken = (userId, secret)->
if userId
now = parseInt(new Date().getTime()/1000).toString()
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = userId + c
else if len >= 32
key32 = userId.slice(0,32)
cipher = crypto.createCipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
console.log('now', now)
cipheredMsg = Buffer.concat([cipher.update(new Buffer(now, 'utf8')), cipher.final()])
steedos_token = cipheredMsg.toString('base64')
return steedos_token
WXMini.decipherToken = (token, userId, secret)->
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = userId + c
else if len >= 32
key32 = userId.slice(0,32)
decipher = crypto.createDecipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
decoded = decipher.update(token, 'base64', 'utf8');
decoded += decipher.final('utf8');
return decoded | 158331 | request = Npm.require("request")
crypto = Npm.require('crypto')
@WXMini = {}
WXMini.newUser = (appId, openid, unionid, name, locale, phoneNumber)->
now = new Date
user_id = Creator.getCollection("users")._makeNewID()
user = {
_id: user_id
steedos_id: phoneNumber || user_id
locale: locale
is_deleted: false
created: now
modified: now
}
if name
user.name = name
user.services = {weixin: {openid: []}}
if openid
user.services.weixin.openid.push {_id: openid, appid: appId}
if unionid
user.services.weixin.unionid = unionid
userId = Creator.getCollection("users").direct.insert(user)
return userId
WXMini.newSpace = (userId, spaceName)->
now = new Date
space_id = Creator.getCollection("spaces")._makeNewID()
space = {
_id: space_id
space: space_id
name: spaceName
owner: userId
admins: [userId]
is_deleted: false
created: now
created_by: userId
modified: now
modified_by: userId
}
Creator.getCollection("spaces").direct.insert(space)
return space_id
WXMini.newOrganization = (userId, spaceId, orgName)->
now = new Date
org = {
space: spaceId
name: orgName
fullname: orgName
users: [userId]
created: now
created_by: userId
modified: now
modified_by: userId
}
orgId = Creator.getCollection("organizations").direct.insert(org)
return orgId
WXMini.newSpaceUser = (userId, spaceId, orgId, userName, profile, mobile)->
now = new Date
spaceUser = {
user: userId
space: spaceId
profile: profile
mobile: mobile
organization: orgId
organizations: [orgId]
user_accepted: true
name: userName
created: now
created_by: userId
modified: now
modified_by: userId
}
spaceUserId = Creator.getCollection("space_users").direct.insert(spaceUser)
return spaceUserId
WXMini.addUserToSpace = (userId, spaceId, userName, profile)->
console.log("addUserToSpace", userId, spaceId, userName)
space = Creator.getCollection("spaces").findOne({_id: spaceId})
if space
#将用户添加到space的根部门下
root_org = Creator.getCollection("organizations").findOne({space: space._id, parent: null}, {fields: {_id: 1}})
if root_org
Creator.getCollection("organizations").direct.update({_id: root_org._id}, {$push: {users: userId}})
# 新增一条space_user
WXMini.newSpaceUser(userId, spaceId, root_org._id, userName, profile)
else
throw new Meteor.Error(500, "工作区#{spaceId},未找到根部门")
else
throw new Meteor.Error(500, "无效的space: #{spaceId}")
WXMini.updateUser = (userId, options)->
if options.$set.mobile
options.$set.phone = {number: "+86" + options.$set.mobile, mobile: options.$set.mobile, verified:true, modified:new Date()}
# 同步头像avatar/profile.avatar字段值到头像URLavatarUrl
profileAvatar = options.$set.profile?.avatar or options.$set["profile.avatar"]
if options.$set.avatar
options.$set.avatarUrl = "/api/files/avatars/" + options.$set.avatar
else if profileAvatar
user = Creator.getCollection("users").findOne({_id: userId}, fields: {avatarUrl: 1})
unless user.avatarUrl
options.$set.avatarUrl = profileAvatar
Creator.getCollection("users").direct.update({_id: userId}, options)
if Creator.getCollection("vip_customers")
c_options = {$set: {}}
if options.$set.mobile
c_options.$set.mobile = options.$set.mobile
if options.$set.name
c_options.$set.name = options.$set.name
if !_.isEmpty(c_options.$set)
Creator.getCollection("vip_customers").update({owner: userId}, c_options, {multi: true})
# 微信相关接口 #
# 获取access_token
WXMini.getNewAccessTokenSync = (appId, secret) ->
accessToken = ""
resData = Meteor.wrapAsync((appId, secret, cb) ->
request.get {
url:"https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=#{appId}&secret=#{secret}"
}, (err, httpResponse, body) ->
cb err, httpResponse, body
if err
console.error('get access_token failed:', err)
return
if httpResponse.statusCode == 200
return body
)(appId, secret)
if resData
body = JSON.parse(resData.body)
if body
if body.access_token
accessToken = body.access_token
else if body.errcode
console.error body.errmsg
return accessToken
# 发送模板消息
WXMini.sendTemplateMessage = (appId, data) ->
accessToken = Creator.getCollection("vip_apps").findOne(appId)?.access_token
if not accessToken
console.error 'Access_token not found'
return
options = {
data: data
}
url = "https://api.weixin.qq.com/cgi-bin/message/wxopen/template/send?access_token=#{accessToken}"
HTTP.call 'POST', url, options, (error, result) ->
if error
console.error error.stack
return
if (result && result.data && result.data.errcode)
console.error result.data
return
return
WXMini.getTempToken = (userId, secret)->
if userId
now = parseInt(new Date().getTime()/1000).toString()
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = userId + c
else if len >= 32
key32 = userId.slice(0,32)
cipher = crypto.createCipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
console.log('now', now)
cipheredMsg = Buffer.concat([cipher.update(new Buffer(now, 'utf8')), cipher.final()])
steedos_token = cipheredMsg.toString('base64')
return steedos_token
WXMini.decipherToken = (token, userId, secret)->
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = <KEY> + c
else if len >= 32
key32 = userId.slice(0,32)
decipher = crypto.createDecipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
decoded = decipher.update(token, 'base64', 'utf8');
decoded += decipher.final('utf8');
return decoded | true | request = Npm.require("request")
crypto = Npm.require('crypto')
@WXMini = {}
WXMini.newUser = (appId, openid, unionid, name, locale, phoneNumber)->
now = new Date
user_id = Creator.getCollection("users")._makeNewID()
user = {
_id: user_id
steedos_id: phoneNumber || user_id
locale: locale
is_deleted: false
created: now
modified: now
}
if name
user.name = name
user.services = {weixin: {openid: []}}
if openid
user.services.weixin.openid.push {_id: openid, appid: appId}
if unionid
user.services.weixin.unionid = unionid
userId = Creator.getCollection("users").direct.insert(user)
return userId
WXMini.newSpace = (userId, spaceName)->
now = new Date
space_id = Creator.getCollection("spaces")._makeNewID()
space = {
_id: space_id
space: space_id
name: spaceName
owner: userId
admins: [userId]
is_deleted: false
created: now
created_by: userId
modified: now
modified_by: userId
}
Creator.getCollection("spaces").direct.insert(space)
return space_id
WXMini.newOrganization = (userId, spaceId, orgName)->
now = new Date
org = {
space: spaceId
name: orgName
fullname: orgName
users: [userId]
created: now
created_by: userId
modified: now
modified_by: userId
}
orgId = Creator.getCollection("organizations").direct.insert(org)
return orgId
WXMini.newSpaceUser = (userId, spaceId, orgId, userName, profile, mobile)->
now = new Date
spaceUser = {
user: userId
space: spaceId
profile: profile
mobile: mobile
organization: orgId
organizations: [orgId]
user_accepted: true
name: userName
created: now
created_by: userId
modified: now
modified_by: userId
}
spaceUserId = Creator.getCollection("space_users").direct.insert(spaceUser)
return spaceUserId
WXMini.addUserToSpace = (userId, spaceId, userName, profile)->
console.log("addUserToSpace", userId, spaceId, userName)
space = Creator.getCollection("spaces").findOne({_id: spaceId})
if space
#将用户添加到space的根部门下
root_org = Creator.getCollection("organizations").findOne({space: space._id, parent: null}, {fields: {_id: 1}})
if root_org
Creator.getCollection("organizations").direct.update({_id: root_org._id}, {$push: {users: userId}})
# 新增一条space_user
WXMini.newSpaceUser(userId, spaceId, root_org._id, userName, profile)
else
throw new Meteor.Error(500, "工作区#{spaceId},未找到根部门")
else
throw new Meteor.Error(500, "无效的space: #{spaceId}")
WXMini.updateUser = (userId, options)->
if options.$set.mobile
options.$set.phone = {number: "+86" + options.$set.mobile, mobile: options.$set.mobile, verified:true, modified:new Date()}
# 同步头像avatar/profile.avatar字段值到头像URLavatarUrl
profileAvatar = options.$set.profile?.avatar or options.$set["profile.avatar"]
if options.$set.avatar
options.$set.avatarUrl = "/api/files/avatars/" + options.$set.avatar
else if profileAvatar
user = Creator.getCollection("users").findOne({_id: userId}, fields: {avatarUrl: 1})
unless user.avatarUrl
options.$set.avatarUrl = profileAvatar
Creator.getCollection("users").direct.update({_id: userId}, options)
if Creator.getCollection("vip_customers")
c_options = {$set: {}}
if options.$set.mobile
c_options.$set.mobile = options.$set.mobile
if options.$set.name
c_options.$set.name = options.$set.name
if !_.isEmpty(c_options.$set)
Creator.getCollection("vip_customers").update({owner: userId}, c_options, {multi: true})
# 微信相关接口 #
# 获取access_token
WXMini.getNewAccessTokenSync = (appId, secret) ->
accessToken = ""
resData = Meteor.wrapAsync((appId, secret, cb) ->
request.get {
url:"https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=#{appId}&secret=#{secret}"
}, (err, httpResponse, body) ->
cb err, httpResponse, body
if err
console.error('get access_token failed:', err)
return
if httpResponse.statusCode == 200
return body
)(appId, secret)
if resData
body = JSON.parse(resData.body)
if body
if body.access_token
accessToken = body.access_token
else if body.errcode
console.error body.errmsg
return accessToken
# 发送模板消息
WXMini.sendTemplateMessage = (appId, data) ->
accessToken = Creator.getCollection("vip_apps").findOne(appId)?.access_token
if not accessToken
console.error 'Access_token not found'
return
options = {
data: data
}
url = "https://api.weixin.qq.com/cgi-bin/message/wxopen/template/send?access_token=#{accessToken}"
HTTP.call 'POST', url, options, (error, result) ->
if error
console.error error.stack
return
if (result && result.data && result.data.errcode)
console.error result.data
return
return
WXMini.getTempToken = (userId, secret)->
if userId
now = parseInt(new Date().getTime()/1000).toString()
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = userId + c
else if len >= 32
key32 = userId.slice(0,32)
cipher = crypto.createCipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
console.log('now', now)
cipheredMsg = Buffer.concat([cipher.update(new Buffer(now, 'utf8')), cipher.final()])
steedos_token = cipheredMsg.toString('base64')
return steedos_token
WXMini.decipherToken = (token, userId, secret)->
key32 = ""
len = userId.length
iv = secret
if len < 32
c = ""
i = 0
m = 32 - len
while i < m
c = " " + c
i++
key32 = PI:KEY:<KEY>END_PI + c
else if len >= 32
key32 = userId.slice(0,32)
decipher = crypto.createDecipheriv('aes-256-cbc', new Buffer(key32, 'utf8'), new Buffer(iv, 'utf8'))
decoded = decipher.update(token, 'base64', 'utf8');
decoded += decipher.final('utf8');
return decoded |
[
{
"context": "[\n {\n name: 'Dr. Brown'\n signal: 3606\n minAmplitude: 200000\n",
"end": 19,
"score": 0.9696065187454224,
"start": 17,
"tag": "NAME",
"value": "Dr"
},
{
"context": "[\n {\n name: 'Dr. Brown'\n signal: 3606\n minAmplitude: 200000\n },\n ",
"end... | people.cson | octoblu/chords | 0 | [
{
name: 'Dr. Brown'
signal: 3606
minAmplitude: 200000
},
{
name: 'Dr. Evil'
signal: 3620
minAmplitude: 200000
},
{
name: 'Dr. Who'
signal: 3608
minAmplitude: 700000
},
{
name: 'Dr. Gonzo'
signal: 3654
minAmplitude: 60000
}
{
name: 'Dr. Moreau'
signal: 3655
minAmplitude: 60000
}
]
| 150627 | [
{
name: '<NAME>. <NAME>'
signal: 3606
minAmplitude: 200000
},
{
name: '<NAME>. <NAME>'
signal: 3620
minAmplitude: 200000
},
{
name: '<NAME>. <NAME>'
signal: 3608
minAmplitude: 700000
},
{
name: '<NAME>. <NAME>'
signal: 3654
minAmplitude: 60000
}
{
name: '<NAME>. <NAME>'
signal: 3655
minAmplitude: 60000
}
]
| true | [
{
name: 'PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI'
signal: 3606
minAmplitude: 200000
},
{
name: 'PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI'
signal: 3620
minAmplitude: 200000
},
{
name: 'PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI'
signal: 3608
minAmplitude: 700000
},
{
name: 'PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI'
signal: 3654
minAmplitude: 60000
}
{
name: 'PI:NAME:<NAME>END_PI. PI:NAME:<NAME>END_PI'
signal: 3655
minAmplitude: 60000
}
]
|
[
{
"context": "false\n\n auth = chap.createAuth({username: 'x'})\n .updateUser({nonce: 'y', nextnonce",
"end": 1804,
"score": 0.522697389125824,
"start": 1803,
"tag": "USERNAME",
"value": "x"
},
{
"context": "false\n\n auth = chap.createAuth({username: 'x', cn... | spec/authenticate.spec.coffee | kixxauth/CHAP | 2 | crypto = require 'crypto'
chap = require '../lib/chap-server'
describe 'Authenticator.validate() invalid parameters', ->
it 'should throw an error if the username string is not available', ->
err = false
auth = chap.createAuth().updateUser()
try
auth.validate()
catch e
err = e
expect(typeof err).toBe 'object'
expect(err.message).toBe 'A user.username string must be provided to authenticate'
return
describe 'create a new user with authenticate()', ->
it 'should "create" a new user', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser()
.validate()
nonce = null
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(typeof user.nonce).toBe 'string'
expect(user.nonce.length).toBe 40
nonce = user.nonce
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe null
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe nonce
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.USER_NA
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe true
return
describe 'authenticate() should deny authentication without creds', ->
it 'should deny authentication', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser({nonce: 'y', nextnonce: 'z'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'y'
expect(newUser.nextnonce).toBe 'z'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.MISSING_CREDS
expect(newUser.username).toBe 'x'
return
expect(testPersistCalled).toBe false
return
describe 'autheticate() should create a new passkey for a user without one', ->
it 'should create a passkey and authenticate', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x', cnonce: 'c', response: 'd'})
.updateUser({nonce: 'a', nextnonce: 'b'})
.validate()
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(user.nonce).toBe 'b'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe 'c'
expect(typeof user.authenticated).toBe 'undefined'
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'b'
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.SET_PASSKEY
expect(testPersistCalled).toBe true
expect(newUser.username).toBe 'x'
return
describe 'authenticate() authentication', ->
it 'should not authenticate if the passkey was not modified', ->
nonce = 'a'
nextnonce = 'b'
response = chap.sha1(nonce)
cnonce = chap.sha1(chap.sha1(nextnonce))
user =
username: 'x'
cnonce: cnonce
response: response
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: nonce, nextnonce: nextnonce, passkey: 'y'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'a'
expect(newUser.nextnonce).toBe 'b'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.UNMODIFIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should not authenticate if the computed passkey does not match', ->
user =
username: 'x'
cnonce: 'x'
response: 'x'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: 'x', nextnonce: 'x', passkey: 'x'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'x'
expect(newUser.nextnonce).toBe 'x'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.DENIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should authenticate if the computed passkey matches', ->
user =
username: 'a'
cnonce: 'd'
response: 'e'
storedUser =
passkey: '58e6b3a414a1e090dfc6029add0f3555ccba127f'
nonce: 'b'
nextnonce: 'c'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser(storedUser)
.validate()
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'a'
expect(user.nonce).toBe 'c'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
expect(user.passkey).toBe 'd'
return
newUser = auth.authenticate(testPersist)
expect(newUser.username).toBe 'a'
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.OK
expect(newUser.nonce).toBe 'c'
expect(typeof newUser.nextnonce).toBe 'string'
expect(newUser.nextnonce.length).toBe 40
expect(testPersistCalled).toBe true
return
| 108372 | crypto = require 'crypto'
chap = require '../lib/chap-server'
describe 'Authenticator.validate() invalid parameters', ->
it 'should throw an error if the username string is not available', ->
err = false
auth = chap.createAuth().updateUser()
try
auth.validate()
catch e
err = e
expect(typeof err).toBe 'object'
expect(err.message).toBe 'A user.username string must be provided to authenticate'
return
describe 'create a new user with authenticate()', ->
it 'should "create" a new user', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser()
.validate()
nonce = null
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(typeof user.nonce).toBe 'string'
expect(user.nonce.length).toBe 40
nonce = user.nonce
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe null
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe nonce
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.USER_NA
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe true
return
describe 'authenticate() should deny authentication without creds', ->
it 'should deny authentication', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser({nonce: 'y', nextnonce: 'z'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'y'
expect(newUser.nextnonce).toBe 'z'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.MISSING_CREDS
expect(newUser.username).toBe 'x'
return
expect(testPersistCalled).toBe false
return
describe 'autheticate() should create a new passkey for a user without one', ->
it 'should create a passkey and authenticate', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x', cnonce: 'c', response: 'd'})
.updateUser({nonce: 'a', nextnonce: 'b'})
.validate()
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(user.nonce).toBe 'b'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe 'c'
expect(typeof user.authenticated).toBe 'undefined'
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'b'
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.SET_PASSKEY
expect(testPersistCalled).toBe true
expect(newUser.username).toBe 'x'
return
describe 'authenticate() authentication', ->
it 'should not authenticate if the passkey was not modified', ->
nonce = 'a'
nextnonce = 'b'
response = chap.sha1(nonce)
cnonce = chap.sha1(chap.sha1(nextnonce))
user =
username: 'x'
cnonce: cnonce
response: response
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: nonce, nextnonce: nextnonce, passkey: 'y'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'a'
expect(newUser.nextnonce).toBe 'b'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.UNMODIFIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should not authenticate if the computed passkey does not match', ->
user =
username: 'x'
cnonce: 'x'
response: 'x'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: 'x', nextnonce: 'x', passkey: 'x'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'x'
expect(newUser.nextnonce).toBe 'x'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.DENIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should authenticate if the computed passkey matches', ->
user =
username: 'a'
cnonce: 'd'
response: 'e'
storedUser =
passkey: '<KEY>'
nonce: 'b'
nextnonce: 'c'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser(storedUser)
.validate()
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'a'
expect(user.nonce).toBe 'c'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
expect(user.passkey).toBe 'd'
return
newUser = auth.authenticate(testPersist)
expect(newUser.username).toBe 'a'
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.OK
expect(newUser.nonce).toBe 'c'
expect(typeof newUser.nextnonce).toBe 'string'
expect(newUser.nextnonce.length).toBe 40
expect(testPersistCalled).toBe true
return
| true | crypto = require 'crypto'
chap = require '../lib/chap-server'
describe 'Authenticator.validate() invalid parameters', ->
it 'should throw an error if the username string is not available', ->
err = false
auth = chap.createAuth().updateUser()
try
auth.validate()
catch e
err = e
expect(typeof err).toBe 'object'
expect(err.message).toBe 'A user.username string must be provided to authenticate'
return
describe 'create a new user with authenticate()', ->
it 'should "create" a new user', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser()
.validate()
nonce = null
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(typeof user.nonce).toBe 'string'
expect(user.nonce.length).toBe 40
nonce = user.nonce
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe null
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe nonce
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.USER_NA
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe true
return
describe 'authenticate() should deny authentication without creds', ->
it 'should deny authentication', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x'})
.updateUser({nonce: 'y', nextnonce: 'z'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'y'
expect(newUser.nextnonce).toBe 'z'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.MISSING_CREDS
expect(newUser.username).toBe 'x'
return
expect(testPersistCalled).toBe false
return
describe 'autheticate() should create a new passkey for a user without one', ->
it 'should create a passkey and authenticate', ->
testPersistCalled = false
auth = chap.createAuth({username: 'x', cnonce: 'c', response: 'd'})
.updateUser({nonce: 'a', nextnonce: 'b'})
.validate()
nextnonce = null
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'x'
expect(user.nonce).toBe 'b'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
nextnonce = user.nextnonce
expect(user.passkey).toBe 'c'
expect(typeof user.authenticated).toBe 'undefined'
return
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'b'
expect(newUser.nextnonce).toBe nextnonce
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.SET_PASSKEY
expect(testPersistCalled).toBe true
expect(newUser.username).toBe 'x'
return
describe 'authenticate() authentication', ->
it 'should not authenticate if the passkey was not modified', ->
nonce = 'a'
nextnonce = 'b'
response = chap.sha1(nonce)
cnonce = chap.sha1(chap.sha1(nextnonce))
user =
username: 'x'
cnonce: cnonce
response: response
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: nonce, nextnonce: nextnonce, passkey: 'y'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'a'
expect(newUser.nextnonce).toBe 'b'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.UNMODIFIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should not authenticate if the computed passkey does not match', ->
user =
username: 'x'
cnonce: 'x'
response: 'x'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser({nonce: 'x', nextnonce: 'x', passkey: 'x'})
.validate()
testPersist = (user) ->
return testPersistCalled = true
newUser = auth.authenticate(testPersist)
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.nonce).toBe 'x'
expect(newUser.nextnonce).toBe 'x'
expect(newUser.authenticated).toBe false
expect(newUser.message).toBe chap.DENIED
expect(newUser.username).toBe 'x'
expect(testPersistCalled).toBe false
return
it 'should authenticate if the computed passkey matches', ->
user =
username: 'a'
cnonce: 'd'
response: 'e'
storedUser =
passkey: 'PI:KEY:<KEY>END_PI'
nonce: 'b'
nextnonce: 'c'
testPersistCalled = false
auth = chap.createAuth(user)
.updateUser(storedUser)
.validate()
testPersist = (user) ->
testPersistCalled = true
expect(user.username).toBe 'a'
expect(user.nonce).toBe 'c'
expect(typeof user.nextnonce).toBe 'string'
expect(user.nextnonce.length).toBe 40
expect(user.passkey).toBe 'd'
return
newUser = auth.authenticate(testPersist)
expect(newUser.username).toBe 'a'
expect(typeof newUser.passkey).toBe 'undefined'
expect(newUser.authenticated).toBe true
expect(newUser.message).toBe chap.OK
expect(newUser.nonce).toBe 'c'
expect(typeof newUser.nextnonce).toBe 'string'
expect(newUser.nextnonce.length).toBe 40
expect(testPersistCalled).toBe true
return
|
[
{
"context": "s.headers.common['Authorization'] = 'Token token=\"8433f4313d49d1a89821e115579eed18\"'\n $httpProvider.defaults.headers.post['Authoriz",
"end": 1325,
"score": 0.9819422960281372,
"start": 1293,
"tag": "PASSWORD",
"value": "8433f4313d49d1a89821e115579eed18"
},
{
"contex... | app/coffeescript/app.coffee | hoitomt/wager-tagger-ui | 0 | # FastClick.attach(document.body)
window.addEventListener 'load', ->
FastClick.attach(document.body);
, false
if window.location.hostname is "localhost"
API_SERVER = "http://localhost:3000/api/v1"
else
API_SERVER = "https://wager-tagger.herokuapp.com/api/v1"
SEASON_START_DATE = '2017-07-01'
SEASON_STOP_DATE = '2018-06-30'
app = angular.module "wagerTagger", ['ticketsControllers',
'homeControllers',
'financeControllers',
'financeServices'
'syncServices',
'ticketServices',
'ticketTagServices',
'tagServices',
'ngResource',
'ngRoute']
app.config ['$routeProvider', ($routeProvider) ->
$routeProvider.
when('/', {
templateUrl: 'public/templates/finances.html',
controller: 'FinanceController'
}).
when('/tickets', {
templateUrl: 'public/templates/tickets.html',
controller: 'HomeController'
}).
when('/sync', {
templateUrl: 'public/templates/sync.html',
controller: 'TicketsController'
}).
when('/finances', {
}).
otherwise({
redirectTo: '/'
})
]
app.config ($httpProvider) ->
$httpProvider.defaults.headers.common['Authorization'] = 'Token token="8433f4313d49d1a89821e115579eed18"'
$httpProvider.defaults.headers.post['Authorization'] = 'Token token="8433f4313d49d1a89821e115579eed18"'
| 215886 | # FastClick.attach(document.body)
window.addEventListener 'load', ->
FastClick.attach(document.body);
, false
if window.location.hostname is "localhost"
API_SERVER = "http://localhost:3000/api/v1"
else
API_SERVER = "https://wager-tagger.herokuapp.com/api/v1"
SEASON_START_DATE = '2017-07-01'
SEASON_STOP_DATE = '2018-06-30'
app = angular.module "wagerTagger", ['ticketsControllers',
'homeControllers',
'financeControllers',
'financeServices'
'syncServices',
'ticketServices',
'ticketTagServices',
'tagServices',
'ngResource',
'ngRoute']
app.config ['$routeProvider', ($routeProvider) ->
$routeProvider.
when('/', {
templateUrl: 'public/templates/finances.html',
controller: 'FinanceController'
}).
when('/tickets', {
templateUrl: 'public/templates/tickets.html',
controller: 'HomeController'
}).
when('/sync', {
templateUrl: 'public/templates/sync.html',
controller: 'TicketsController'
}).
when('/finances', {
}).
otherwise({
redirectTo: '/'
})
]
app.config ($httpProvider) ->
$httpProvider.defaults.headers.common['Authorization'] = 'Token token="<PASSWORD>"'
$httpProvider.defaults.headers.post['Authorization'] = 'Token token="<PASSWORD>"'
| true | # FastClick.attach(document.body)
window.addEventListener 'load', ->
FastClick.attach(document.body);
, false
if window.location.hostname is "localhost"
API_SERVER = "http://localhost:3000/api/v1"
else
API_SERVER = "https://wager-tagger.herokuapp.com/api/v1"
SEASON_START_DATE = '2017-07-01'
SEASON_STOP_DATE = '2018-06-30'
app = angular.module "wagerTagger", ['ticketsControllers',
'homeControllers',
'financeControllers',
'financeServices'
'syncServices',
'ticketServices',
'ticketTagServices',
'tagServices',
'ngResource',
'ngRoute']
app.config ['$routeProvider', ($routeProvider) ->
$routeProvider.
when('/', {
templateUrl: 'public/templates/finances.html',
controller: 'FinanceController'
}).
when('/tickets', {
templateUrl: 'public/templates/tickets.html',
controller: 'HomeController'
}).
when('/sync', {
templateUrl: 'public/templates/sync.html',
controller: 'TicketsController'
}).
when('/finances', {
}).
otherwise({
redirectTo: '/'
})
]
app.config ($httpProvider) ->
$httpProvider.defaults.headers.common['Authorization'] = 'Token token="PI:PASSWORD:<PASSWORD>END_PI"'
$httpProvider.defaults.headers.post['Authorization'] = 'Token token="PI:PASSWORD:<PASSWORD>END_PI"'
|
[
{
"context": " @joyent = options?.joyent or ''\n\n @password = @randomPassword()\n @new_password = true\n @confirmed = optio",
"end": 6234,
"score": 0.9938519597053528,
"start": 6219,
"tag": "PASSWORD",
"value": "@randomPassword"
},
{
"context": "(' ')[0]\n\n resetPassword... | models/models.coffee | InfinitelLoop/website-3 | 2 | Mongo = require('./mongo').Mongo
http = require('./http')
sys = require 'sys'
crypto = require 'crypto'
require '../public/javascripts/Math.uuid'
nko = {}
md5 = (str) ->
hash = crypto.createHash 'md5'
hash.update str
hash.digest 'hex'
validEmail = (email) ->
/^[a-zA-Z0-9+._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/.test email
escapeURL = require('querystring').escape
parseURL = require('url').parse
class Team
serializable_attributes: ['score']
build: (options) ->
@name = options?.name or ''
@createdAt = new Date()
@application = options?.application or ''
@description = options?.description or ''
@colophon = options?.colophon or ''
@link = options?.link or ''
@url = options?.url or ''
@joyentCode = options?.joyentCode or ''
@lastDeployedTo = options?.lastDeployedTo or ''
@lastDeployedAt = options?.lastDeployedAt
@deployHeads = options?.deployHeads or []
constructor: (options, fn) ->
@build options
@token = Math.uuid()
@setMembers options?.emails, fn
# TODO DRY
authKey: ->
@id() + ':' + @token
hasMember: (member) ->
return false unless member?
_.include _.invoke(@members, 'id'), member.id()
emails: ->
_.pluck @members, 'email'
validate: ->
errors = []
errors.push 'Must have team name' unless @name
errors.push 'Team needs at least one member' unless @members?.length
errors.concat _.compact _.flatten [member.validate() for member in @members]
beforeSave: (fn) ->
@generateSlug =>
@generateDeploySlugs()
threads = @members.length
return fn() unless threads
for member in @members
member.save (error, res) ->
fn() if --threads is 0
beforeInstantiate: (fn) ->
query = { _id: { $in: _.pluck @members, '_id' }}
Person.all query, (error, members) =>
@members = members
@invited = _.select @members, (m) -> m.name == ''
fn()
setMembers: (emails, fn) ->
emails = _.compact emails or []
@members = or []
oldEmails = @emails()
keepEmails = _.intersect emails, oldEmails
@members = _.select @members, (m) ->
_.include keepEmails, m.email
newEmails = _.without emails, oldEmails...
threads = newEmails.length
return process.nextTick fn unless threads
for email in newEmails
Person.firstOrNew { email: email }, (error, member) =>
@members.push member
member.type or= 'Participant'
member.inviteTo this, ->
fn() if --threads is 0
generateSlug: (fn, attempt) ->
@slug = attempt || @name.toLowerCase().replace(/\W+/g, '-').replace(/^-|-$/, '')
Team.fromParam @slug, (error, existing) =>
if !existing? or existing.id() == @id()
fn() # no conflicts
else
@generateSlug fn, @slug + '-' # try with another -
generateDeploySlugs: ->
@joyentSlug or= @slug.replace(/^(\d)/, 'ko-$1').replace(/_/g, '-').substring(0, 30)
@herokuSlug or= 'nko-' + @slug.replace(/_/g, '-').substring(0, 26)
nko.Team = Team
class ScoreCalculator
calculate: (fn) ->
@select =>
@merge()
@zero()
@calcConfirmed()
@average()
@calcFinal()
@calcPopularity()
@calcOverall()
fn @scores
select: (fn) ->
threads = 4
@where {}, (error, all) =>
@all = all
fn() if --threads is 0
@where { confirmed: false }, (error, unconfirmed) =>
@unconfirmed = unconfirmed
fn() if --threads is 0
@where { confirmed: true }, (error, confirmed) =>
@confirmed = confirmed
fn() if --threads is 0
Person.all { type: 'Judge' }, (error, judges) =>
judge_ids = _.pluck(judges, '_id');
@where { 'person._id': { $in: judge_ids }}, (error, judged) =>
@judged = judged
fn() if --threads is 0
merge: ->
@scores = {}
for type in @types
for score in this[type]
@scores[score['team._id']] ||= {}
@scores[score['team._id']][type] = score
zero: ->
for k, score of @scores
for type in @types
score[type] ?= {}
delete score[type]['team._id']
for dimension in @dimensions
score[type][dimension] ?= 0
calcConfirmed: ->
for k, score of @scores
for dimension in @dimensions
score.confirmed[dimension] -= score.judged[dimension]
average: ->
for k, score of @scores
for type in @types
for dimension in @dimensions[0..3]
score[type][dimension] = score[type][dimension] / score[type].popularity
calcFinal: ->
for k, score of @scores
score.final = {}
for dimension in @dimensions[0..3]
score.final[dimension] = (score.confirmed[dimension] || 0) + (score.judged[dimension] || 0)
calcPopularity: (scores) ->
popularities = for k, score of @scores
{ key: k, popularity: (score.confirmed?.popularity || 0) }
popularities.sort (a, b) -> b.popularity - a.popularity
rank = popularities.length
for popularity in popularities
score = @scores[popularity.key]
score.final.popularity = 2 + 8 * (rank-- / popularities.length)
calcOverall: ->
for k, score of @scores
score.overall = 0
for dimension in @dimensions
score.overall += score.final[dimension]
where: (cond, fn) ->
initial = {}
for k in @dimensions
initial[k] = 0
Vote.group {
cond: cond
keys: ['team._id']
initial: initial
reduce: (row, memo) ->
memo.popularity += 1
# must be hard coded (passed as a string to mongo)
for dimension in ['utility', 'design', 'innovation', 'completeness']
memo[dimension] += parseInt row[dimension]
}, fn
dimensions: ['utility', 'design', 'innovation', 'completeness', 'popularity']
types: ['unconfirmed', 'confirmed', 'judged', 'all']
_.extend ScoreCalculator, {
calculate: (fn) ->
(new ScoreCalculator).calculate fn
}
nko.ScoreCalculator = ScoreCalculator
class Person
constructor: (options) ->
@name = options?.name or ''
@email = options?.email or ''
@link = options?.link or ''
@github = options?.github or ''
@heroku = options?.heroku or ''
@joyent = options?.joyent or ''
@password = @randomPassword()
@new_password = true
@confirmed = options?.confirmed or false
@confirmKey = Math.uuid()
@type = options?.type # 'Judge', 'Voter', 'Participant'
@description = options?.description or ''
@signature = options?.signature or ''
@token = Math.uuid()
@calculateHashes()
admin: ->
@confirmed and /\@nodeknockout\.com$/.test(@email)
displayName: ->
@name or @email.replace(/\@.*$/,'')
firstName: ->
@displayName().split(' ')[0]
resetPassword: (fn) ->
@password = @randomPassword()
@new_password = true
@calculateHashes()
@save (error, res) =>
# TODO get this into a view
@sendEmail "Password reset for Node.js Knockout", """
Hi,
You (or somebody like you) reset the password for this email address.
Here are your new credentials:
email: #{@email}
password: #{@password}
Thanks!
The Node.js Knockout Organizers
""", fn
inviteTo: (team, fn) ->
@sendEmail "You've been invited to Node.js Knockout", """
Hi,
You've been invited to the #{team.name} Node.js Knockout team!
Here are your credentials:
email: #{@email}
#{if @password then 'password: ' + @password else 'and whatever password you already set'}
You still need to complete your registration.
Please sign in at: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@password} to do so.
Thanks!
The Node.js Knockout Organizers
Node.js Knockout is a 48-hour programming contest using node.js from Aug 28-29, 2010.
""", fn
welcomeVoter: (fn) ->
# TODO get this into a view
@sendEmail "Thanks for voting in Node.js Knockout", """
Hi,
You (or somebody like you) used this email address to vote in Node.js Knockout, so we created an account for you.
Here are your credentials:
email: #{@email}
password: #{@password}
Please sign in to confirm your votes: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@password}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
notifyAboutReply: (vote, reply, fn) ->
@sendEmail "#{reply.person.name} Replied to your Node.js Knockout Vote", """
Hi,
#{reply.person.name} replied to #{if @id() is vote.person.id() then 'your' else 'a'} vote for #{vote.team.name}, writing:
"#{reply.body}"
You can respond at: http://nodeknockout.com/teams/#{vote.team.toParam()}##{vote.id()}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
"""
sendConfirmKey: (fn) ->
@sendEmail "Confirm your Node.js Knockout Email", """
Hi,
You (or somebody like you) requested we resend your Node.js Knockout email confirmation code.
Your confirmaton code is: #{@confirmKey}
You can confirm your email at: http://nodeknockout.com/people/#{@toParam()}/confirm?confirmKey=#{@confirmKey}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
sendEmail: (subject, message, fn) ->
http.post 'http://www.postalgone.com/mail',
{ sender: '"Node.js Knockout" <mail@nodeknockout.com>',
from: 'all@nodeknockout.com',
to: @email,
subject: subject,
body: message }, (error, body, response) ->
fn()
confirmVotes: (fn) ->
return fn() if Date.now() > Date.UTC(2010, 8, 3, 7, 0, 0)
Vote.updateAll { 'person._id': @_id, confirmed: false }, { $set: { confirmed: true }}, fn
loadTeams: (fn) ->
Team.all { 'members._id': @_id }, (error, teams) =>
fn error if error?
@teams = teams or []
fn(error, @teams)
loadVotes: (fn) ->
Vote.all { 'person._id': @_id }, (error, votes) =>
fn error if error?
@votes = votes or []
Vote.loadTeams votes, (error, teams) =>
fn error if error?
fn null, @votes
authKey: ->
@id() + ':' + @token
logout: (fn) ->
@token = null
@save fn
validate: ->
['Invalid email address'] unless validEmail @email
beforeSave: (fn) ->
@email = @email?.trim()?.toLowerCase()
@calculateHashes()
fn()
setPassword: (password) ->
# overwrite the default password
@passwordHash = md5 password
@password = ''
calculateHashes: ->
@emailHash = md5 @email
@passwordHash = md5 @password if @password
# http://e-huned.com/2008/10/13/random-pronounceable-strings-in-ruby/
randomPassword: ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('')
vowels = 'aeiou'.split('')
consonants = _.without alphabet, vowels...
syllables = for i in [0..2]
consonants[Math.floor consonants.length * Math.random()] +
vowels[Math.floor vowels.length * Math.random()] +
alphabet[Math.floor alphabet.length * Math.random()]
syllables.join ''
login: (fn) ->
@token = Math.uuid()
@confirmed ?= true # grandfather old people in
if @new_password or @verifiedConfirmKey
confirm_votes = true
@confirmed = true
@confirmKey = Math.uuid()
@new_password = false
delete @verifiedConfirmKey
@save (errors, resp) =>
if confirm_votes
# TODO flash "your votes have been confirmed"
@confirmVotes (errors) =>
fn errors, this
else
fn null, this
_.extend Person, {
login: (credentials, fn) ->
return fn ['Invalid email address'] unless validEmail credentials.email
@first { email: credentials.email.trim().toLowerCase() }, (error, person) ->
return fn ['Unknown email'] unless person?
return fn ['Invalid password'] unless person.passwordHash is md5 credentials.password
person.login fn
firstByAuthKey: (authKey, fn) ->
[id, token] = authKey.split ':' if authKey?
return fn null, null unless id and token
query = Mongo.queryify id
query.token = token
@first query, fn
}
nko.Person = Person
class Vote
updateable_attributes: ['utility', 'design', 'innovation', 'completeness', 'comment']
constructor: (options, request) ->
@team = options?.team
@utility = parseInt options?.utility
@design = parseInt options?.design
@innovation = parseInt options?.innovation
@completeness = parseInt options?.completeness
@comment = options?.comment
@person = options?.person
@email = options?.email?.trim()?.toLowerCase() || @person?.email
@confirmed = !! @person?.confirmed
@remoteAddress = request?.socket?.remoteAddress
@remotePort = request?.socket?.remotePort
@userAgent = request?.headers?['user-agent']
@referer = request?.headers?['referer']
@accept = request?.headers?['accept']
@requestAt = options?.requestAt
@renderAt = options?.renderAt
@hoverAt = options?.hoverAt
@responseAt = options?.responseAt
@createdAt = @updatedAt = new Date()
beforeSave: (fn) ->
if !@person?
Person.firstOrNew { email: @email }, (error, voter) =>
return fn ['Unauthorized'] unless voter.isNew()
return fn ['"+" not allowed in voter email address'] if @email.split('@')[0].match /\+/
@person = voter
@person.type = 'Voter'
@person.save (error, person) =>
return fn error if error?
@person.welcomeVoter fn
else
@confirmed = !! @person?.confirmed
if @isNew()
@checkDuplicate fn
else
@updatedAt = new Date()
fn()
checkDuplicate: (fn) ->
Vote.firstByTeamAndPerson @team, @person, (errors, existing) =>
return fn errors if errors?.length
return fn ["Duplicate"] if existing?
fn()
beforeInstantiate: (fn) ->
Person.first { _id: @person._id }, (error, voter) =>
@person = voter
Reply.all { 'vote._id': @_id }, { sort: [['createdAt', 1]]}, (error, replies) =>
@replies = replies || []
fn()
instantiateReplyers: ->
pool = _.inject @team.members, {}, ((memo, person) -> memo[person.id()] = person; memo)
pool[@person.id()] = @person
@replies ||= []
for reply in @replies
reply.person = pool[reply.person.id()]
notifyPeopleAboutReply: (reply) ->
for m in @team.members when m.id() isnt reply.person.id()
m.notifyAboutReply this, reply, ->
if reply.person.id() isnt @person.id()
@person.notifyAboutReply this, reply, ->
validate: ->
errors = []
errors.push 'Invalid vote. Ballot stuffing attempt?' if @isNew() and @looksFishy()
for dimension in [ 'Utility', 'Design', 'Innovation', 'Completeness' ]
errors.push "#{dimension} should be between 1 and 5 stars" unless 1 <= this[dimension.toLowerCase()] <= 5
errors.push 'Invalid email address' unless validEmail @email
errors
looksFishy: ->
(!@userAgent or
!(parseURL(@referer).hostname in ['nodeknockout.com', 'localhost', 'knockout.no.de', 'pulp.local']) or
!(@requestAt < @responseAt) or !(@renderAt < @hoverAt))
_.extend Vote, {
firstByTeamAndPerson: (team, person, fn) ->
Vote.first { 'team._id': team._id, 'person._id': person._id }, fn
loadTeams: (votes, fn) ->
teamIds = _(votes).chain().pluck('team').pluck('_id').value()
Team.all { _id: { $in: teamIds }}, (error, teams) ->
fn error if error?
# TODO gross
teamHash = _.inject teams, {}, ((memo, team) -> memo[team._id.id] = team; memo)
for vote in votes
vote.team = teamHash[vote.team._id.id]
vote.instantiateReplyers()
fn null, teams
}
nko.Vote = Vote
class Reply
constructor: (options) ->
@person = options?.person
@vote = options?.vote
@body = options.body || ''
@createdAt = @updatedAt = new Date()
validate: ->
['Reply cannot be blank'] unless @body
nko.Reply = Reply
Mongo.blessAll nko
nko.Mongo = Mongo
Team::toParam = -> @slug
Team.fromParam = (id, options, fn) ->
if id.length == 24
@first { '$or': [ { slug: id }, Mongo.queryify(id) ] }, options, fn
else
@first { slug: id }, options, fn
_.extend exports, nko
| 144045 | Mongo = require('./mongo').Mongo
http = require('./http')
sys = require 'sys'
crypto = require 'crypto'
require '../public/javascripts/Math.uuid'
nko = {}
md5 = (str) ->
hash = crypto.createHash 'md5'
hash.update str
hash.digest 'hex'
validEmail = (email) ->
/^[a-zA-Z0-9+._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/.test email
escapeURL = require('querystring').escape
parseURL = require('url').parse
class Team
serializable_attributes: ['score']
build: (options) ->
@name = options?.name or ''
@createdAt = new Date()
@application = options?.application or ''
@description = options?.description or ''
@colophon = options?.colophon or ''
@link = options?.link or ''
@url = options?.url or ''
@joyentCode = options?.joyentCode or ''
@lastDeployedTo = options?.lastDeployedTo or ''
@lastDeployedAt = options?.lastDeployedAt
@deployHeads = options?.deployHeads or []
constructor: (options, fn) ->
@build options
@token = Math.uuid()
@setMembers options?.emails, fn
# TODO DRY
authKey: ->
@id() + ':' + @token
hasMember: (member) ->
return false unless member?
_.include _.invoke(@members, 'id'), member.id()
emails: ->
_.pluck @members, 'email'
validate: ->
errors = []
errors.push 'Must have team name' unless @name
errors.push 'Team needs at least one member' unless @members?.length
errors.concat _.compact _.flatten [member.validate() for member in @members]
beforeSave: (fn) ->
@generateSlug =>
@generateDeploySlugs()
threads = @members.length
return fn() unless threads
for member in @members
member.save (error, res) ->
fn() if --threads is 0
beforeInstantiate: (fn) ->
query = { _id: { $in: _.pluck @members, '_id' }}
Person.all query, (error, members) =>
@members = members
@invited = _.select @members, (m) -> m.name == ''
fn()
setMembers: (emails, fn) ->
emails = _.compact emails or []
@members = or []
oldEmails = @emails()
keepEmails = _.intersect emails, oldEmails
@members = _.select @members, (m) ->
_.include keepEmails, m.email
newEmails = _.without emails, oldEmails...
threads = newEmails.length
return process.nextTick fn unless threads
for email in newEmails
Person.firstOrNew { email: email }, (error, member) =>
@members.push member
member.type or= 'Participant'
member.inviteTo this, ->
fn() if --threads is 0
generateSlug: (fn, attempt) ->
@slug = attempt || @name.toLowerCase().replace(/\W+/g, '-').replace(/^-|-$/, '')
Team.fromParam @slug, (error, existing) =>
if !existing? or existing.id() == @id()
fn() # no conflicts
else
@generateSlug fn, @slug + '-' # try with another -
generateDeploySlugs: ->
@joyentSlug or= @slug.replace(/^(\d)/, 'ko-$1').replace(/_/g, '-').substring(0, 30)
@herokuSlug or= 'nko-' + @slug.replace(/_/g, '-').substring(0, 26)
nko.Team = Team
class ScoreCalculator
calculate: (fn) ->
@select =>
@merge()
@zero()
@calcConfirmed()
@average()
@calcFinal()
@calcPopularity()
@calcOverall()
fn @scores
select: (fn) ->
threads = 4
@where {}, (error, all) =>
@all = all
fn() if --threads is 0
@where { confirmed: false }, (error, unconfirmed) =>
@unconfirmed = unconfirmed
fn() if --threads is 0
@where { confirmed: true }, (error, confirmed) =>
@confirmed = confirmed
fn() if --threads is 0
Person.all { type: 'Judge' }, (error, judges) =>
judge_ids = _.pluck(judges, '_id');
@where { 'person._id': { $in: judge_ids }}, (error, judged) =>
@judged = judged
fn() if --threads is 0
merge: ->
@scores = {}
for type in @types
for score in this[type]
@scores[score['team._id']] ||= {}
@scores[score['team._id']][type] = score
zero: ->
for k, score of @scores
for type in @types
score[type] ?= {}
delete score[type]['team._id']
for dimension in @dimensions
score[type][dimension] ?= 0
calcConfirmed: ->
for k, score of @scores
for dimension in @dimensions
score.confirmed[dimension] -= score.judged[dimension]
average: ->
for k, score of @scores
for type in @types
for dimension in @dimensions[0..3]
score[type][dimension] = score[type][dimension] / score[type].popularity
calcFinal: ->
for k, score of @scores
score.final = {}
for dimension in @dimensions[0..3]
score.final[dimension] = (score.confirmed[dimension] || 0) + (score.judged[dimension] || 0)
calcPopularity: (scores) ->
popularities = for k, score of @scores
{ key: k, popularity: (score.confirmed?.popularity || 0) }
popularities.sort (a, b) -> b.popularity - a.popularity
rank = popularities.length
for popularity in popularities
score = @scores[popularity.key]
score.final.popularity = 2 + 8 * (rank-- / popularities.length)
calcOverall: ->
for k, score of @scores
score.overall = 0
for dimension in @dimensions
score.overall += score.final[dimension]
where: (cond, fn) ->
initial = {}
for k in @dimensions
initial[k] = 0
Vote.group {
cond: cond
keys: ['team._id']
initial: initial
reduce: (row, memo) ->
memo.popularity += 1
# must be hard coded (passed as a string to mongo)
for dimension in ['utility', 'design', 'innovation', 'completeness']
memo[dimension] += parseInt row[dimension]
}, fn
dimensions: ['utility', 'design', 'innovation', 'completeness', 'popularity']
types: ['unconfirmed', 'confirmed', 'judged', 'all']
_.extend ScoreCalculator, {
calculate: (fn) ->
(new ScoreCalculator).calculate fn
}
nko.ScoreCalculator = ScoreCalculator
class Person
constructor: (options) ->
@name = options?.name or ''
@email = options?.email or ''
@link = options?.link or ''
@github = options?.github or ''
@heroku = options?.heroku or ''
@joyent = options?.joyent or ''
@password = <PASSWORD>()
@new_password = true
@confirmed = options?.confirmed or false
@confirmKey = Math.uuid()
@type = options?.type # 'Judge', 'Voter', 'Participant'
@description = options?.description or ''
@signature = options?.signature or ''
@token = Math.uuid()
@calculateHashes()
admin: ->
@confirmed and /\@nodeknockout\.com$/.test(@email)
displayName: ->
@name or @email.replace(/\@.*$/,'')
firstName: ->
@displayName().split(' ')[0]
resetPassword: (fn) ->
@password = <PASSWORD>()
@new_password = true
@calculateHashes()
@save (error, res) =>
# TODO get this into a view
@sendEmail "Password reset for Node.js Knockout", """
Hi,
You (or somebody like you) reset the password for this email address.
Here are your new credentials:
email: #{@email}
password: <PASSWORD>}
Thanks!
The Node.js Knockout Organizers
""", fn
inviteTo: (team, fn) ->
@sendEmail "You've been invited to Node.js Knockout", """
Hi,
You've been invited to the #{team.name} Node.js Knockout team!
Here are your credentials:
email: #{@email}
#{if @password then 'password: ' + @password else 'and whatever password you already set'}
You still need to complete your registration.
Please sign in at: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@password} to do so.
Thanks!
The Node.js Knockout Organizers
Node.js Knockout is a 48-hour programming contest using node.js from Aug 28-29, 2010.
""", fn
welcomeVoter: (fn) ->
# TODO get this into a view
@sendEmail "Thanks for voting in Node.js Knockout", """
Hi,
You (or somebody like you) used this email address to vote in Node.js Knockout, so we created an account for you.
Here are your credentials:
email: #{@email}
password: <PASSWORD>}
Please sign in to confirm your votes: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@<PASSWORD>}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
notifyAboutReply: (vote, reply, fn) ->
@sendEmail "#{reply.person.name} Replied to your Node.js Knockout Vote", """
Hi,
#{reply.person.name} replied to #{if @id() is vote.person.id() then 'your' else 'a'} vote for #{vote.team.name}, writing:
"#{reply.body}"
You can respond at: http://nodeknockout.com/teams/#{vote.team.toParam()}##{vote.id()}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
"""
sendConfirmKey: (fn) ->
@sendEmail "Confirm your Node.js Knockout Email", """
Hi,
You (or somebody like you) requested we resend your Node.js Knockout email confirmation code.
Your confirmaton code is: #{@confirmKey}
You can confirm your email at: http://nodeknockout.com/people/#{@toParam()}/confirm?confirmKey=#{@confirmKey}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
sendEmail: (subject, message, fn) ->
http.post 'http://www.postalgone.com/mail',
{ sender: '"Node.js Knockout" <<EMAIL>>',
from: '<EMAIL>',
to: @email,
subject: subject,
body: message }, (error, body, response) ->
fn()
confirmVotes: (fn) ->
return fn() if Date.now() > Date.UTC(2010, 8, 3, 7, 0, 0)
Vote.updateAll { 'person._id': @_id, confirmed: false }, { $set: { confirmed: true }}, fn
loadTeams: (fn) ->
Team.all { 'members._id': @_id }, (error, teams) =>
fn error if error?
@teams = teams or []
fn(error, @teams)
loadVotes: (fn) ->
Vote.all { 'person._id': @_id }, (error, votes) =>
fn error if error?
@votes = votes or []
Vote.loadTeams votes, (error, teams) =>
fn error if error?
fn null, @votes
authKey: ->
@id() + ':' + @token
logout: (fn) ->
@token = null
@save fn
validate: ->
['Invalid email address'] unless validEmail @email
beforeSave: (fn) ->
@email = @email?.trim()?.toLowerCase()
@calculateHashes()
fn()
setPassword: (password) ->
# overwrite the default password
@passwordHash = md5 password
@password = ''
calculateHashes: ->
@emailHash = md5 @email
@passwordHash = md5 @password if @password
# http://e-huned.com/2008/10/13/random-pronounceable-strings-in-ruby/
randomPassword: ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('')
vowels = 'aeiou'.split('')
consonants = _.without alphabet, vowels...
syllables = for i in [0..2]
consonants[Math.floor consonants.length * Math.random()] +
vowels[Math.floor vowels.length * Math.random()] +
alphabet[Math.floor alphabet.length * Math.random()]
syllables.join ''
login: (fn) ->
@token = <KEY>()
@confirmed ?= true # grandfather old people in
if @new_password or @verifiedConfirmKey
confirm_votes = true
@confirmed = true
@confirmKey = <KEY>()
@new_password = false
delete @verifiedConfirmKey
@save (errors, resp) =>
if confirm_votes
# TODO flash "your votes have been confirmed"
@confirmVotes (errors) =>
fn errors, this
else
fn null, this
_.extend Person, {
login: (credentials, fn) ->
return fn ['Invalid email address'] unless validEmail credentials.email
@first { email: credentials.email.trim().toLowerCase() }, (error, person) ->
return fn ['Unknown email'] unless person?
return fn ['Invalid password'] unless person.passwordHash is md5 credentials.password
person.login fn
firstByAuthKey: (authKey, fn) ->
[id, token] = authKey.split ':' if authKey?
return fn null, null unless id and token
query = Mongo.queryify id
query.token = token
@first query, fn
}
nko.Person = Person
class Vote
updateable_attributes: ['utility', 'design', 'innovation', 'completeness', 'comment']
constructor: (options, request) ->
@team = options?.team
@utility = parseInt options?.utility
@design = parseInt options?.design
@innovation = parseInt options?.innovation
@completeness = parseInt options?.completeness
@comment = options?.comment
@person = options?.person
@email = options?.email?.trim()?.toLowerCase() || @person?.email
@confirmed = !! @person?.confirmed
@remoteAddress = request?.socket?.remoteAddress
@remotePort = request?.socket?.remotePort
@userAgent = request?.headers?['user-agent']
@referer = request?.headers?['referer']
@accept = request?.headers?['accept']
@requestAt = options?.requestAt
@renderAt = options?.renderAt
@hoverAt = options?.hoverAt
@responseAt = options?.responseAt
@createdAt = @updatedAt = new Date()
beforeSave: (fn) ->
if !@person?
Person.firstOrNew { email: @email }, (error, voter) =>
return fn ['Unauthorized'] unless voter.isNew()
return fn ['"+" not allowed in voter email address'] if @email.split('@')[0].match /\+/
@person = voter
@person.type = 'Voter'
@person.save (error, person) =>
return fn error if error?
@person.welcomeVoter fn
else
@confirmed = !! @person?.confirmed
if @isNew()
@checkDuplicate fn
else
@updatedAt = new Date()
fn()
checkDuplicate: (fn) ->
Vote.firstByTeamAndPerson @team, @person, (errors, existing) =>
return fn errors if errors?.length
return fn ["Duplicate"] if existing?
fn()
beforeInstantiate: (fn) ->
Person.first { _id: @person._id }, (error, voter) =>
@person = voter
Reply.all { 'vote._id': @_id }, { sort: [['createdAt', 1]]}, (error, replies) =>
@replies = replies || []
fn()
instantiateReplyers: ->
pool = _.inject @team.members, {}, ((memo, person) -> memo[person.id()] = person; memo)
pool[@person.id()] = @person
@replies ||= []
for reply in @replies
reply.person = pool[reply.person.id()]
notifyPeopleAboutReply: (reply) ->
for m in @team.members when m.id() isnt reply.person.id()
m.notifyAboutReply this, reply, ->
if reply.person.id() isnt @person.id()
@person.notifyAboutReply this, reply, ->
validate: ->
errors = []
errors.push 'Invalid vote. Ballot stuffing attempt?' if @isNew() and @looksFishy()
for dimension in [ 'Utility', 'Design', 'Innovation', 'Completeness' ]
errors.push "#{dimension} should be between 1 and 5 stars" unless 1 <= this[dimension.toLowerCase()] <= 5
errors.push 'Invalid email address' unless validEmail @email
errors
looksFishy: ->
(!@userAgent or
!(parseURL(@referer).hostname in ['nodeknockout.com', 'localhost', 'knockout.no.de', 'pulp.local']) or
!(@requestAt < @responseAt) or !(@renderAt < @hoverAt))
_.extend Vote, {
firstByTeamAndPerson: (team, person, fn) ->
Vote.first { 'team._id': team._id, 'person._id': person._id }, fn
loadTeams: (votes, fn) ->
teamIds = _(votes).chain().pluck('team').pluck('_id').value()
Team.all { _id: { $in: teamIds }}, (error, teams) ->
fn error if error?
# TODO gross
teamHash = _.inject teams, {}, ((memo, team) -> memo[team._id.id] = team; memo)
for vote in votes
vote.team = teamHash[vote.team._id.id]
vote.instantiateReplyers()
fn null, teams
}
nko.Vote = Vote
class Reply
constructor: (options) ->
@person = options?.person
@vote = options?.vote
@body = options.body || ''
@createdAt = @updatedAt = new Date()
validate: ->
['Reply cannot be blank'] unless @body
nko.Reply = Reply
Mongo.blessAll nko
nko.Mongo = Mongo
Team::toParam = -> @slug
Team.fromParam = (id, options, fn) ->
if id.length == 24
@first { '$or': [ { slug: id }, Mongo.queryify(id) ] }, options, fn
else
@first { slug: id }, options, fn
_.extend exports, nko
| true | Mongo = require('./mongo').Mongo
http = require('./http')
sys = require 'sys'
crypto = require 'crypto'
require '../public/javascripts/Math.uuid'
nko = {}
md5 = (str) ->
hash = crypto.createHash 'md5'
hash.update str
hash.digest 'hex'
validEmail = (email) ->
/^[a-zA-Z0-9+._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/.test email
escapeURL = require('querystring').escape
parseURL = require('url').parse
class Team
serializable_attributes: ['score']
build: (options) ->
@name = options?.name or ''
@createdAt = new Date()
@application = options?.application or ''
@description = options?.description or ''
@colophon = options?.colophon or ''
@link = options?.link or ''
@url = options?.url or ''
@joyentCode = options?.joyentCode or ''
@lastDeployedTo = options?.lastDeployedTo or ''
@lastDeployedAt = options?.lastDeployedAt
@deployHeads = options?.deployHeads or []
constructor: (options, fn) ->
@build options
@token = Math.uuid()
@setMembers options?.emails, fn
# TODO DRY
authKey: ->
@id() + ':' + @token
hasMember: (member) ->
return false unless member?
_.include _.invoke(@members, 'id'), member.id()
emails: ->
_.pluck @members, 'email'
validate: ->
errors = []
errors.push 'Must have team name' unless @name
errors.push 'Team needs at least one member' unless @members?.length
errors.concat _.compact _.flatten [member.validate() for member in @members]
beforeSave: (fn) ->
@generateSlug =>
@generateDeploySlugs()
threads = @members.length
return fn() unless threads
for member in @members
member.save (error, res) ->
fn() if --threads is 0
beforeInstantiate: (fn) ->
query = { _id: { $in: _.pluck @members, '_id' }}
Person.all query, (error, members) =>
@members = members
@invited = _.select @members, (m) -> m.name == ''
fn()
setMembers: (emails, fn) ->
emails = _.compact emails or []
@members = or []
oldEmails = @emails()
keepEmails = _.intersect emails, oldEmails
@members = _.select @members, (m) ->
_.include keepEmails, m.email
newEmails = _.without emails, oldEmails...
threads = newEmails.length
return process.nextTick fn unless threads
for email in newEmails
Person.firstOrNew { email: email }, (error, member) =>
@members.push member
member.type or= 'Participant'
member.inviteTo this, ->
fn() if --threads is 0
generateSlug: (fn, attempt) ->
@slug = attempt || @name.toLowerCase().replace(/\W+/g, '-').replace(/^-|-$/, '')
Team.fromParam @slug, (error, existing) =>
if !existing? or existing.id() == @id()
fn() # no conflicts
else
@generateSlug fn, @slug + '-' # try with another -
generateDeploySlugs: ->
@joyentSlug or= @slug.replace(/^(\d)/, 'ko-$1').replace(/_/g, '-').substring(0, 30)
@herokuSlug or= 'nko-' + @slug.replace(/_/g, '-').substring(0, 26)
nko.Team = Team
class ScoreCalculator
calculate: (fn) ->
@select =>
@merge()
@zero()
@calcConfirmed()
@average()
@calcFinal()
@calcPopularity()
@calcOverall()
fn @scores
select: (fn) ->
threads = 4
@where {}, (error, all) =>
@all = all
fn() if --threads is 0
@where { confirmed: false }, (error, unconfirmed) =>
@unconfirmed = unconfirmed
fn() if --threads is 0
@where { confirmed: true }, (error, confirmed) =>
@confirmed = confirmed
fn() if --threads is 0
Person.all { type: 'Judge' }, (error, judges) =>
judge_ids = _.pluck(judges, '_id');
@where { 'person._id': { $in: judge_ids }}, (error, judged) =>
@judged = judged
fn() if --threads is 0
merge: ->
@scores = {}
for type in @types
for score in this[type]
@scores[score['team._id']] ||= {}
@scores[score['team._id']][type] = score
zero: ->
for k, score of @scores
for type in @types
score[type] ?= {}
delete score[type]['team._id']
for dimension in @dimensions
score[type][dimension] ?= 0
calcConfirmed: ->
for k, score of @scores
for dimension in @dimensions
score.confirmed[dimension] -= score.judged[dimension]
average: ->
for k, score of @scores
for type in @types
for dimension in @dimensions[0..3]
score[type][dimension] = score[type][dimension] / score[type].popularity
calcFinal: ->
for k, score of @scores
score.final = {}
for dimension in @dimensions[0..3]
score.final[dimension] = (score.confirmed[dimension] || 0) + (score.judged[dimension] || 0)
calcPopularity: (scores) ->
popularities = for k, score of @scores
{ key: k, popularity: (score.confirmed?.popularity || 0) }
popularities.sort (a, b) -> b.popularity - a.popularity
rank = popularities.length
for popularity in popularities
score = @scores[popularity.key]
score.final.popularity = 2 + 8 * (rank-- / popularities.length)
calcOverall: ->
for k, score of @scores
score.overall = 0
for dimension in @dimensions
score.overall += score.final[dimension]
where: (cond, fn) ->
initial = {}
for k in @dimensions
initial[k] = 0
Vote.group {
cond: cond
keys: ['team._id']
initial: initial
reduce: (row, memo) ->
memo.popularity += 1
# must be hard coded (passed as a string to mongo)
for dimension in ['utility', 'design', 'innovation', 'completeness']
memo[dimension] += parseInt row[dimension]
}, fn
dimensions: ['utility', 'design', 'innovation', 'completeness', 'popularity']
types: ['unconfirmed', 'confirmed', 'judged', 'all']
_.extend ScoreCalculator, {
calculate: (fn) ->
(new ScoreCalculator).calculate fn
}
nko.ScoreCalculator = ScoreCalculator
class Person
constructor: (options) ->
@name = options?.name or ''
@email = options?.email or ''
@link = options?.link or ''
@github = options?.github or ''
@heroku = options?.heroku or ''
@joyent = options?.joyent or ''
@password = PI:PASSWORD:<PASSWORD>END_PI()
@new_password = true
@confirmed = options?.confirmed or false
@confirmKey = Math.uuid()
@type = options?.type # 'Judge', 'Voter', 'Participant'
@description = options?.description or ''
@signature = options?.signature or ''
@token = Math.uuid()
@calculateHashes()
admin: ->
@confirmed and /\@nodeknockout\.com$/.test(@email)
displayName: ->
@name or @email.replace(/\@.*$/,'')
firstName: ->
@displayName().split(' ')[0]
resetPassword: (fn) ->
@password = PI:PASSWORD:<PASSWORD>END_PI()
@new_password = true
@calculateHashes()
@save (error, res) =>
# TODO get this into a view
@sendEmail "Password reset for Node.js Knockout", """
Hi,
You (or somebody like you) reset the password for this email address.
Here are your new credentials:
email: #{@email}
password: PI:PASSWORD:<PASSWORD>END_PI}
Thanks!
The Node.js Knockout Organizers
""", fn
inviteTo: (team, fn) ->
@sendEmail "You've been invited to Node.js Knockout", """
Hi,
You've been invited to the #{team.name} Node.js Knockout team!
Here are your credentials:
email: #{@email}
#{if @password then 'password: ' + @password else 'and whatever password you already set'}
You still need to complete your registration.
Please sign in at: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@password} to do so.
Thanks!
The Node.js Knockout Organizers
Node.js Knockout is a 48-hour programming contest using node.js from Aug 28-29, 2010.
""", fn
welcomeVoter: (fn) ->
# TODO get this into a view
@sendEmail "Thanks for voting in Node.js Knockout", """
Hi,
You (or somebody like you) used this email address to vote in Node.js Knockout, so we created an account for you.
Here are your credentials:
email: #{@email}
password: PI:PASSWORD:<PASSWORD>END_PI}
Please sign in to confirm your votes: http://nodeknockout.com/login?email=#{escapeURL @email}&password=#{@PI:PASSWORD:<PASSWORD>END_PI}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
notifyAboutReply: (vote, reply, fn) ->
@sendEmail "#{reply.person.name} Replied to your Node.js Knockout Vote", """
Hi,
#{reply.person.name} replied to #{if @id() is vote.person.id() then 'your' else 'a'} vote for #{vote.team.name}, writing:
"#{reply.body}"
You can respond at: http://nodeknockout.com/teams/#{vote.team.toParam()}##{vote.id()}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
"""
sendConfirmKey: (fn) ->
@sendEmail "Confirm your Node.js Knockout Email", """
Hi,
You (or somebody like you) requested we resend your Node.js Knockout email confirmation code.
Your confirmaton code is: #{@confirmKey}
You can confirm your email at: http://nodeknockout.com/people/#{@toParam()}/confirm?confirmKey=#{@confirmKey}
Thanks!
The Node.js Knockout Organizers
http://nodeknockout.com/
""", fn
sendEmail: (subject, message, fn) ->
http.post 'http://www.postalgone.com/mail',
{ sender: '"Node.js Knockout" <PI:EMAIL:<EMAIL>END_PI>',
from: 'PI:EMAIL:<EMAIL>END_PI',
to: @email,
subject: subject,
body: message }, (error, body, response) ->
fn()
confirmVotes: (fn) ->
return fn() if Date.now() > Date.UTC(2010, 8, 3, 7, 0, 0)
Vote.updateAll { 'person._id': @_id, confirmed: false }, { $set: { confirmed: true }}, fn
loadTeams: (fn) ->
Team.all { 'members._id': @_id }, (error, teams) =>
fn error if error?
@teams = teams or []
fn(error, @teams)
loadVotes: (fn) ->
Vote.all { 'person._id': @_id }, (error, votes) =>
fn error if error?
@votes = votes or []
Vote.loadTeams votes, (error, teams) =>
fn error if error?
fn null, @votes
authKey: ->
@id() + ':' + @token
logout: (fn) ->
@token = null
@save fn
validate: ->
['Invalid email address'] unless validEmail @email
beforeSave: (fn) ->
@email = @email?.trim()?.toLowerCase()
@calculateHashes()
fn()
setPassword: (password) ->
# overwrite the default password
@passwordHash = md5 password
@password = ''
calculateHashes: ->
@emailHash = md5 @email
@passwordHash = md5 @password if @password
# http://e-huned.com/2008/10/13/random-pronounceable-strings-in-ruby/
randomPassword: ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('')
vowels = 'aeiou'.split('')
consonants = _.without alphabet, vowels...
syllables = for i in [0..2]
consonants[Math.floor consonants.length * Math.random()] +
vowels[Math.floor vowels.length * Math.random()] +
alphabet[Math.floor alphabet.length * Math.random()]
syllables.join ''
login: (fn) ->
@token = PI:KEY:<KEY>END_PI()
@confirmed ?= true # grandfather old people in
if @new_password or @verifiedConfirmKey
confirm_votes = true
@confirmed = true
@confirmKey = PI:KEY:<KEY>END_PI()
@new_password = false
delete @verifiedConfirmKey
@save (errors, resp) =>
if confirm_votes
# TODO flash "your votes have been confirmed"
@confirmVotes (errors) =>
fn errors, this
else
fn null, this
_.extend Person, {
login: (credentials, fn) ->
return fn ['Invalid email address'] unless validEmail credentials.email
@first { email: credentials.email.trim().toLowerCase() }, (error, person) ->
return fn ['Unknown email'] unless person?
return fn ['Invalid password'] unless person.passwordHash is md5 credentials.password
person.login fn
firstByAuthKey: (authKey, fn) ->
[id, token] = authKey.split ':' if authKey?
return fn null, null unless id and token
query = Mongo.queryify id
query.token = token
@first query, fn
}
nko.Person = Person
class Vote
updateable_attributes: ['utility', 'design', 'innovation', 'completeness', 'comment']
constructor: (options, request) ->
@team = options?.team
@utility = parseInt options?.utility
@design = parseInt options?.design
@innovation = parseInt options?.innovation
@completeness = parseInt options?.completeness
@comment = options?.comment
@person = options?.person
@email = options?.email?.trim()?.toLowerCase() || @person?.email
@confirmed = !! @person?.confirmed
@remoteAddress = request?.socket?.remoteAddress
@remotePort = request?.socket?.remotePort
@userAgent = request?.headers?['user-agent']
@referer = request?.headers?['referer']
@accept = request?.headers?['accept']
@requestAt = options?.requestAt
@renderAt = options?.renderAt
@hoverAt = options?.hoverAt
@responseAt = options?.responseAt
@createdAt = @updatedAt = new Date()
beforeSave: (fn) ->
if !@person?
Person.firstOrNew { email: @email }, (error, voter) =>
return fn ['Unauthorized'] unless voter.isNew()
return fn ['"+" not allowed in voter email address'] if @email.split('@')[0].match /\+/
@person = voter
@person.type = 'Voter'
@person.save (error, person) =>
return fn error if error?
@person.welcomeVoter fn
else
@confirmed = !! @person?.confirmed
if @isNew()
@checkDuplicate fn
else
@updatedAt = new Date()
fn()
checkDuplicate: (fn) ->
Vote.firstByTeamAndPerson @team, @person, (errors, existing) =>
return fn errors if errors?.length
return fn ["Duplicate"] if existing?
fn()
beforeInstantiate: (fn) ->
Person.first { _id: @person._id }, (error, voter) =>
@person = voter
Reply.all { 'vote._id': @_id }, { sort: [['createdAt', 1]]}, (error, replies) =>
@replies = replies || []
fn()
instantiateReplyers: ->
pool = _.inject @team.members, {}, ((memo, person) -> memo[person.id()] = person; memo)
pool[@person.id()] = @person
@replies ||= []
for reply in @replies
reply.person = pool[reply.person.id()]
notifyPeopleAboutReply: (reply) ->
for m in @team.members when m.id() isnt reply.person.id()
m.notifyAboutReply this, reply, ->
if reply.person.id() isnt @person.id()
@person.notifyAboutReply this, reply, ->
validate: ->
errors = []
errors.push 'Invalid vote. Ballot stuffing attempt?' if @isNew() and @looksFishy()
for dimension in [ 'Utility', 'Design', 'Innovation', 'Completeness' ]
errors.push "#{dimension} should be between 1 and 5 stars" unless 1 <= this[dimension.toLowerCase()] <= 5
errors.push 'Invalid email address' unless validEmail @email
errors
looksFishy: ->
(!@userAgent or
!(parseURL(@referer).hostname in ['nodeknockout.com', 'localhost', 'knockout.no.de', 'pulp.local']) or
!(@requestAt < @responseAt) or !(@renderAt < @hoverAt))
_.extend Vote, {
firstByTeamAndPerson: (team, person, fn) ->
Vote.first { 'team._id': team._id, 'person._id': person._id }, fn
loadTeams: (votes, fn) ->
teamIds = _(votes).chain().pluck('team').pluck('_id').value()
Team.all { _id: { $in: teamIds }}, (error, teams) ->
fn error if error?
# TODO gross
teamHash = _.inject teams, {}, ((memo, team) -> memo[team._id.id] = team; memo)
for vote in votes
vote.team = teamHash[vote.team._id.id]
vote.instantiateReplyers()
fn null, teams
}
nko.Vote = Vote
class Reply
constructor: (options) ->
@person = options?.person
@vote = options?.vote
@body = options.body || ''
@createdAt = @updatedAt = new Date()
validate: ->
['Reply cannot be blank'] unless @body
nko.Reply = Reply
Mongo.blessAll nko
nko.Mongo = Mongo
Team::toParam = -> @slug
Team.fromParam = (id, options, fn) ->
if id.length == 24
@first { '$or': [ { slug: id }, Mongo.queryify(id) ] }, options, fn
else
@first { slug: id }, options, fn
_.extend exports, nko
|
[
{
"context": " <div>\n On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:\n </div>\n\n ",
"end": 683,
"score": 0.9998258948326111,
"start": 680,
"tag": "NAME",
"value": "Bob"
},
{
"context": "iv>\n On 11-Apr-2011, at 6:54 PM, Bob <bob@ex... | test/planerHtml.test.coffee | acterry/planer | 1 | {expect} = require 'chai'
fs = require 'fs'
jsdom = require 'jsdom'
path = require 'path'
planer = require '../src/planer'
describe 'planer#extractFromHtml', ->
before 'configure jsdom', ->
jsdom.defaultDocumentFeatures = {
FetchExternalResources: false,
ProcessExternalResources: false
}
@dom = new jsdom.JSDOM().window.document
it 'should return an empty body when given an empty body', ->
msgBody = ''
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('')
it 'should return a the text of a message with splitter inside blockqouote', ->
msgBody = """Reply
<blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
</div>
<div>
Test
</div>
</blockquote>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'should return a the text of a message with splitter outside blockqouote', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
</div>
<blockquote>
<div>
Test
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n\n</body></html>')
it 'should not be fooled by a regular blockquote', ->
msgBody = """Reply
<blockquote>Regular</blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
</div>
<blockquote>
<div>
<blockquote>Nested</blockquote>
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal("<html><body>Reply\n<blockquote> Regular </blockquote>\n\n</body></html>")
it 'should not be fooled by a regular blockquote', ->
msgBody = """
<html>
<body>
Reply
<div>
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
</div>
<div>
Test
</div>
</body>
</html>
"""
reply = """
<html><body>
Reply
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'handles invalid html', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
<blockquote>
<div>
Test
</div>
</blockquote>
</div>
<div/>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n<div> </div></body></html>')
it 'handles gmail quotes', ->
msgBody = """Reply
<div class="gmail_quote">
<div class="gmail_quote">
On 11-Apr-2011, at 6:54 PM, Bob <bob@example.com> wrote:
<div>
Test
</div>
</div>
</div>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'does not miss a disclaimer after a blockquote', ->
msgBody = """
<html>
<body>
<div>
<div>
message
</div>
<blockquote>
Quote
</blockquote>
</div>
<div>
disclaimer
</div>
</body>
</html>
"""
reply = "<html><body>\n <div>\n <div>\n message\n </div>\n \n </div>\n <div>\n disclaimer\n </div>\n \n</body></html>"
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "Date:"', ->
msgBody = """
<div>
message<br>
<div>
<hr>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:bob@example.com">bob@example.com</a><br>
From: <a href="mailto:rob@example.com">rob@example.com</a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div>
</div>
"""
reply = '<html><body><div>\n message<br>\n \n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "From:"', ->
msgBody = """<div>
message<br>
<div>
<hr>
From: <a href="mailto:bob@example.com">bob@example.com</a><br>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:rob@example.com">rob@example.com</a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div></div>
"""
reply = '<html><body><div>\nmessage<br>\n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'is not fooled if the reply shared a div with the quotation', ->
msgBody = """
<body>
<div>
Blah<br><br>
<hr>Date: Tue, 22 May 2012 18:29:16 -0600<br>
To: xx@hotmail.ca<br>
From: quickemail@ashleymadison.com<br>
Subject: You Have New Mail From x!<br><br>
</div>
</body>"""
reply = """
<html><body>
<div>
Blah<br><br>
</div>
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
describe 'examples from files', ->
absolutePath = (relativePath) ->
path.join(__dirname, relativePath)
it 'handles emails with numerous microsoft namespaces', ->
replySnippet = 'Lorem ipsum dolor sit amet'
originalMsgSnippet = 'Odio et pretium rutrum neque'
msgBody = fs.readFileSync(absolutePath('examples/html/microsoft-namespaces.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 365', ->
replySnippet = "I really hope that you're doing well!"
originalMsgSnippet = 'Do you like the holidays?'
msgBody = fs.readFileSync(absolutePath('examples/html/office-365.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from various Outlook versions', ->
replySnippet = 'We can talk tomorrow.'
originalMsgSnippet = "We'd love to set up a quick phone call with you"
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-mixed.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 American', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 International', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 American', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 International', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Apple iOS Mail', ->
replySnippet = "html reply"
originalMsgSnippet = 'original message from GMail'
msgBody = fs.readFileSync(absolutePath('examples/html/iosMail.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails reply chains involving multiple email clients', ->
replySnippet = "Here is the answer"
originalMsgSnippet = 'I am having trouble'
msgBody = fs.readFileSync(absolutePath('examples/html/mixedEmailClientReplyChain.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails Yahoo replies using the yahooo_quoted class', ->
replySnippet = "such a big problem"
originalMsgSnippet = 'new process'
msgBody = fs.readFileSync(absolutePath('examples/html/yahooMail2020.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
| 96005 | {expect} = require 'chai'
fs = require 'fs'
jsdom = require 'jsdom'
path = require 'path'
planer = require '../src/planer'
describe 'planer#extractFromHtml', ->
before 'configure jsdom', ->
jsdom.defaultDocumentFeatures = {
FetchExternalResources: false,
ProcessExternalResources: false
}
@dom = new jsdom.JSDOM().window.document
it 'should return an empty body when given an empty body', ->
msgBody = ''
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('')
it 'should return a the text of a message with splitter inside blockqouote', ->
msgBody = """Reply
<blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
</div>
<div>
Test
</div>
</blockquote>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'should return a the text of a message with splitter outside blockqouote', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
</div>
<blockquote>
<div>
Test
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n\n</body></html>')
it 'should not be fooled by a regular blockquote', ->
msgBody = """Reply
<blockquote>Regular</blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
</div>
<blockquote>
<div>
<blockquote>Nested</blockquote>
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal("<html><body>Reply\n<blockquote> Regular </blockquote>\n\n</body></html>")
it 'should not be fooled by a regular blockquote', ->
msgBody = """
<html>
<body>
Reply
<div>
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
</div>
<div>
Test
</div>
</body>
</html>
"""
reply = """
<html><body>
Reply
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'handles invalid html', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
<blockquote>
<div>
Test
</div>
</blockquote>
</div>
<div/>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n<div> </div></body></html>')
it 'handles gmail quotes', ->
msgBody = """Reply
<div class="gmail_quote">
<div class="gmail_quote">
On 11-Apr-2011, at 6:54 PM, <NAME> <<EMAIL>> wrote:
<div>
Test
</div>
</div>
</div>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'does not miss a disclaimer after a blockquote', ->
msgBody = """
<html>
<body>
<div>
<div>
message
</div>
<blockquote>
Quote
</blockquote>
</div>
<div>
disclaimer
</div>
</body>
</html>
"""
reply = "<html><body>\n <div>\n <div>\n message\n </div>\n \n </div>\n <div>\n disclaimer\n </div>\n \n</body></html>"
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "Date:"', ->
msgBody = """
<div>
message<br>
<div>
<hr>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:<EMAIL>"><EMAIL></a><br>
From: <a href="mailto:<EMAIL>"><EMAIL></a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div>
</div>
"""
reply = '<html><body><div>\n message<br>\n \n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "From:"', ->
msgBody = """<div>
message<br>
<div>
<hr>
From: <a href="mailto:<EMAIL>"><EMAIL></a><br>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:<EMAIL>"><EMAIL></a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div></div>
"""
reply = '<html><body><div>\nmessage<br>\n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'is not fooled if the reply shared a div with the quotation', ->
msgBody = """
<body>
<div>
Blah<br><br>
<hr>Date: Tue, 22 May 2012 18:29:16 -0600<br>
To: <EMAIL><br>
From: <EMAIL><br>
Subject: You Have New Mail From x!<br><br>
</div>
</body>"""
reply = """
<html><body>
<div>
Blah<br><br>
</div>
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
describe 'examples from files', ->
absolutePath = (relativePath) ->
path.join(__dirname, relativePath)
it 'handles emails with numerous microsoft namespaces', ->
replySnippet = 'Lorem ipsum dolor sit amet'
originalMsgSnippet = 'Odio et pretium rutrum neque'
msgBody = fs.readFileSync(absolutePath('examples/html/microsoft-namespaces.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 365', ->
replySnippet = "I really hope that you're doing well!"
originalMsgSnippet = 'Do you like the holidays?'
msgBody = fs.readFileSync(absolutePath('examples/html/office-365.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from various Outlook versions', ->
replySnippet = 'We can talk tomorrow.'
originalMsgSnippet = "We'd love to set up a quick phone call with you"
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-mixed.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 American', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 International', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 American', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 International', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Apple iOS Mail', ->
replySnippet = "html reply"
originalMsgSnippet = 'original message from GMail'
msgBody = fs.readFileSync(absolutePath('examples/html/iosMail.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails reply chains involving multiple email clients', ->
replySnippet = "Here is the answer"
originalMsgSnippet = 'I am having trouble'
msgBody = fs.readFileSync(absolutePath('examples/html/mixedEmailClientReplyChain.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails Yahoo replies using the yahooo_quoted class', ->
replySnippet = "such a big problem"
originalMsgSnippet = 'new process'
msgBody = fs.readFileSync(absolutePath('examples/html/yahooMail2020.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
| true | {expect} = require 'chai'
fs = require 'fs'
jsdom = require 'jsdom'
path = require 'path'
planer = require '../src/planer'
describe 'planer#extractFromHtml', ->
before 'configure jsdom', ->
jsdom.defaultDocumentFeatures = {
FetchExternalResources: false,
ProcessExternalResources: false
}
@dom = new jsdom.JSDOM().window.document
it 'should return an empty body when given an empty body', ->
msgBody = ''
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('')
it 'should return a the text of a message with splitter inside blockqouote', ->
msgBody = """Reply
<blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
</div>
<div>
Test
</div>
</blockquote>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'should return a the text of a message with splitter outside blockqouote', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
</div>
<blockquote>
<div>
Test
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n\n</body></html>')
it 'should not be fooled by a regular blockquote', ->
msgBody = """Reply
<blockquote>Regular</blockquote>
<div>
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
</div>
<blockquote>
<div>
<blockquote>Nested</blockquote>
</div>
</blockquote>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal("<html><body>Reply\n<blockquote> Regular </blockquote>\n\n</body></html>")
it 'should not be fooled by a regular blockquote', ->
msgBody = """
<html>
<body>
Reply
<div>
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
</div>
<div>
Test
</div>
</body>
</html>
"""
reply = """
<html><body>
Reply
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'handles invalid html', ->
msgBody = """Reply
<div>
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
<blockquote>
<div>
Test
</div>
</blockquote>
</div>
<div/>
"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n<div> </div></body></html>')
it 'handles gmail quotes', ->
msgBody = """Reply
<div class="gmail_quote">
<div class="gmail_quote">
On 11-Apr-2011, at 6:54 PM, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> wrote:
<div>
Test
</div>
</div>
</div>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal('<html><body>Reply\n</body></html>')
it 'does not miss a disclaimer after a blockquote', ->
msgBody = """
<html>
<body>
<div>
<div>
message
</div>
<blockquote>
Quote
</blockquote>
</div>
<div>
disclaimer
</div>
</body>
</html>
"""
reply = "<html><body>\n <div>\n <div>\n message\n </div>\n \n </div>\n <div>\n disclaimer\n </div>\n \n</body></html>"
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "Date:"', ->
msgBody = """
<div>
message<br>
<div>
<hr>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:PI:EMAIL:<EMAIL>END_PI">PI:EMAIL:<EMAIL>END_PI</a><br>
From: <a href="mailto:PI:EMAIL:<EMAIL>END_PI">PI:EMAIL:<EMAIL>END_PI</a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div>
</div>
"""
reply = '<html><body><div>\n message<br>\n \n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'removes the tag with a quotation block that starts with "From:"', ->
msgBody = """<div>
message<br>
<div>
<hr>
From: <a href="mailto:PI:EMAIL:<EMAIL>END_PI">PI:EMAIL:<EMAIL>END_PI</a><br>
Date: Fri, 23 Mar 2012 12:35:31 -0600<br>
To: <a href="mailto:PI:EMAIL:<EMAIL>END_PI">PI:EMAIL:<EMAIL>END_PI</a><br>
Subject: You Have New Mail From Mary!<br><br>
text
</div></div>
"""
reply = '<html><body><div>\nmessage<br>\n</div></body></html>'
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
it 'is not fooled if the reply shared a div with the quotation', ->
msgBody = """
<body>
<div>
Blah<br><br>
<hr>Date: Tue, 22 May 2012 18:29:16 -0600<br>
To: PI:EMAIL:<EMAIL>END_PI<br>
From: PI:EMAIL:<EMAIL>END_PI<br>
Subject: You Have New Mail From x!<br><br>
</div>
</body>"""
reply = """
<html><body>
<div>
Blah<br><br>
</div>
</body></html>"""
expect(planer.extractFromHtml(msgBody, @dom)).to.equal(reply)
describe 'examples from files', ->
absolutePath = (relativePath) ->
path.join(__dirname, relativePath)
it 'handles emails with numerous microsoft namespaces', ->
replySnippet = 'Lorem ipsum dolor sit amet'
originalMsgSnippet = 'Odio et pretium rutrum neque'
msgBody = fs.readFileSync(absolutePath('examples/html/microsoft-namespaces.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 365', ->
replySnippet = "I really hope that you're doing well!"
originalMsgSnippet = 'Do you like the holidays?'
msgBody = fs.readFileSync(absolutePath('examples/html/office-365.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from various Outlook versions', ->
replySnippet = 'We can talk tomorrow.'
originalMsgSnippet = "We'd love to set up a quick phone call with you"
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-mixed.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 American', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2007/2010 International', ->
replySnippet = "OK by me"
originalMsgSnippet = 'further revised'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2010-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 American', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-american.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Office 2013/2016/2019 International', ->
replySnippet = "That time works for me."
originalMsgSnippet = 'I can meet tomorrow.'
msgBody = fs.readFileSync(absolutePath('examples/html/outlook-2016-international.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails from Apple iOS Mail', ->
replySnippet = "html reply"
originalMsgSnippet = 'original message from GMail'
msgBody = fs.readFileSync(absolutePath('examples/html/iosMail.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails reply chains involving multiple email clients', ->
replySnippet = "Here is the answer"
originalMsgSnippet = 'I am having trouble'
msgBody = fs.readFileSync(absolutePath('examples/html/mixedEmailClientReplyChain.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
it 'handles emails Yahoo replies using the yahooo_quoted class', ->
replySnippet = "such a big problem"
originalMsgSnippet = 'new process'
msgBody = fs.readFileSync(absolutePath('examples/html/yahooMail2020.html'), 'utf8')
expect(msgBody).to.contain(replySnippet)
expect(msgBody).to.contain(originalMsgSnippet)
extractedHtml = planer.extractFromHtml(msgBody, @dom)
expect(extractedHtml).to.exist
expect(extractedHtml).to.contain(replySnippet)
expect(extractedHtml).not.to.contain(originalMsgSnippet)
|
[
{
"context": "idirectional visual effects\n#\n# Copyright (C) 2011 Nikolay Nemshilov\n#\nclass Fx.Twin extends Fx.Style\n\n #\n # Hidding",
"end": 95,
"score": 0.9998877048492432,
"start": 78,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/fx/src/fx/twin.coffee | lovely-io/lovely.io-stl | 2 | #
# An abstract class for bidirectional visual effects
#
# Copyright (C) 2011 Nikolay Nemshilov
#
class Fx.Twin extends Fx.Style
#
# Hidding the element after the effect
#
# @return {Fx.Twin} this
#
finish: ->
if @direction is 'out'
# calling 'prototype' to prevent circular calls from subclasses
Element.prototype.hide.call(@element)
super
# protected
#
# Picking up the direction
#
setDirection: (direction)->
if !direction or direction is 'toggle'
direction = if @element.visible() then 'out' else 'in'
@direction = direction
return;
| 54851 | #
# An abstract class for bidirectional visual effects
#
# Copyright (C) 2011 <NAME>
#
class Fx.Twin extends Fx.Style
#
# Hidding the element after the effect
#
# @return {Fx.Twin} this
#
finish: ->
if @direction is 'out'
# calling 'prototype' to prevent circular calls from subclasses
Element.prototype.hide.call(@element)
super
# protected
#
# Picking up the direction
#
setDirection: (direction)->
if !direction or direction is 'toggle'
direction = if @element.visible() then 'out' else 'in'
@direction = direction
return;
| true | #
# An abstract class for bidirectional visual effects
#
# Copyright (C) 2011 PI:NAME:<NAME>END_PI
#
class Fx.Twin extends Fx.Style
#
# Hidding the element after the effect
#
# @return {Fx.Twin} this
#
finish: ->
if @direction is 'out'
# calling 'prototype' to prevent circular calls from subclasses
Element.prototype.hide.call(@element)
super
# protected
#
# Picking up the direction
#
setDirection: (direction)->
if !direction or direction is 'toggle'
direction = if @element.visible() then 'out' else 'in'
@direction = direction
return;
|
[
{
"context": "'*'\n 'anon': [ 'public' ]\n 'user': [\n 'user'\n 'translator'\n 'programmer'\n 'man",
"end": 201,
"score": 0.6115214228630066,
"start": 197,
"tag": "USERNAME",
"value": "user"
},
{
"context": "': [ 'public' ]\n 'user': [\n 'user'\n ... | common/permissionsConfig.coffee | Contactis/translation-manager | 0 | module.exports =
roles: [
'public'
'user'
'translator'
'programmer'
'manager'
'admin'
]
accessLevels:
'public': '*'
'anon': [ 'public' ]
'user': [
'user'
'translator'
'programmer'
'manager'
'admin'
]
'translator': [
'translator'
'admin'
]
'programmer': [
'programmer'
'admin'
]
'manager': [
'manager'
'admin'
]
'admin': [
'admin'
]
'translator-only': [ 'translator' ]
'programmer-only': [ 'programmer' ]
'manager-only': [ 'manager' ]
'admin-only': [ 'admin' ]
| 181746 | module.exports =
roles: [
'public'
'user'
'translator'
'programmer'
'manager'
'admin'
]
accessLevels:
'public': '*'
'anon': [ 'public' ]
'user': [
'user'
'<NAME>'
'<NAME>'
'manager'
'admin'
]
'translator': [
'translator'
'admin'
]
'programmer': [
'programmer'
'admin'
]
'manager': [
'manager'
'admin'
]
'admin': [
'admin'
]
'translator-only': [ 'translator' ]
'programmer-only': [ 'programmer' ]
'manager-only': [ 'manager' ]
'admin-only': [ 'admin' ]
| true | module.exports =
roles: [
'public'
'user'
'translator'
'programmer'
'manager'
'admin'
]
accessLevels:
'public': '*'
'anon': [ 'public' ]
'user': [
'user'
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
'manager'
'admin'
]
'translator': [
'translator'
'admin'
]
'programmer': [
'programmer'
'admin'
]
'manager': [
'manager'
'admin'
]
'admin': [
'admin'
]
'translator-only': [ 'translator' ]
'programmer-only': [ 'programmer' ]
'manager-only': [ 'manager' ]
'admin-only': [ 'admin' ]
|
[
{
"context": "mEvents(\n -> Bacon.once({ bacon: Bacon.once(\"sir francis\")}).flatMap(\".bacon\")\n [\"sir francis\"], semi",
"end": 2386,
"score": 0.9969730973243713,
"start": 2375,
"tag": "NAME",
"value": "sir francis"
},
{
"context": "n.once(\"sir francis\")}).flatMap(\"... | spec/specs/flatmap.coffee | phadej/bacon.js | 0 | # build-dependencies: concat
#
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
series(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], semiunstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2], semiunstable)
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
fromArray([value, value*10])
[1, 10, 2, 20], semiunstable)
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
later(0, value)
[1], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], semiunstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], semiunstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], semiunstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMap(".bacon")
["sir francis"], semiunstable)
expectStreamEvents(
-> Bacon.once({ bacon: "sir francis"}).flatMap(".bacon")
["sir francis"], semiunstable)
expectStreamEvents(
->
glorify = (x, y) -> fromArray([x, y])
Bacon.once("francis").flatMap(glorify, "sir")
["sir", "francis"], semiunstable)
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], semiunstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], semiunstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], semiunstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMapFirst(".bacon")
["sir francis"])
expectStreamEvents(
-> Bacon.once({ bacon: "sir francis"}).flatMapFirst(".bacon")
["sir francis"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
| 91558 | # build-dependencies: concat
#
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
series(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], semiunstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2], semiunstable)
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
fromArray([value, value*10])
[1, 10, 2, 20], semiunstable)
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
later(0, value)
[1], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], semiunstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], semiunstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], semiunstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMap(".bacon")
["<NAME>"], semiunstable)
expectStreamEvents(
-> Bacon.once({ bacon: "<NAME>"}).flatMap(".bacon")
["<NAME>"], semiunstable)
expectStreamEvents(
->
glorify = (x, y) -> fromArray([x, y])
Bacon.once("fr<NAME>").flatMap(glorify, "<NAME>")
["<NAME>", "<NAME>"], semiunstable)
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], semiunstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], semiunstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], semiunstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMapFirst(".bacon")
["<NAME>"])
expectStreamEvents(
-> Bacon.once({ bacon: "<NAME>"}).flatMapFirst(".bacon")
["<NAME>"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
| true | # build-dependencies: concat
#
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
series(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], semiunstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2], semiunstable)
expectStreamEvents(
-> fromArray([1, 2]).flatMap (value) ->
fromArray([value, value*10])
[1, 10, 2, 20], semiunstable)
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
later(0, value)
[1], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], semiunstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], semiunstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], semiunstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMap(".bacon")
["PI:NAME:<NAME>END_PI"], semiunstable)
expectStreamEvents(
-> Bacon.once({ bacon: "PI:NAME:<NAME>END_PI"}).flatMap(".bacon")
["PI:NAME:<NAME>END_PI"], semiunstable)
expectStreamEvents(
->
glorify = (x, y) -> fromArray([x, y])
Bacon.once("frPI:NAME:<NAME>END_PI").flatMap(glorify, "PI:NAME:<NAME>END_PI")
["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"], semiunstable)
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], semiunstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], semiunstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], semiunstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], semiunstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMapFirst(".bacon")
["PI:NAME:<NAME>END_PI"])
expectStreamEvents(
-> Bacon.once({ bacon: "PI:NAME:<NAME>END_PI"}).flatMapFirst(".bacon")
["PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
|
[
{
"context": " data:\n email: email\n password: password\n headers:\n 'Content-Type': 'applicati",
"end": 1421,
"score": 0.9986249208450317,
"start": 1413,
"tag": "PASSWORD",
"value": "password"
}
] | src/fleep.coffee | anroots/fleep-stream | 0 | Client = require('node-rest-client').Client
fleepClient = new Client
fleepClient.registerMethod 'login', 'https://fleep.io/api/account/login', 'POST'
fleepClient.registerMethod 'poll', 'https://fleep.io/api/account/poll', 'POST'
fleepClient.registerMethod 'sync', 'https://fleep.io/api/account/sync', 'POST'
fleepClient.registerMethod 'sync_conversations', 'https://fleep.io/api/account/sync_conversations', 'POST'
class Fleep
constructor: (emitter) ->
@ticket
@cookie
@horizon = 0
@emitter = emitter
poll: ->
args =
data:
ticket: @ticket
event_horizon: @horizon
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.poll args, (data) =>
@horizon = data.event_horizon
for event in data.stream
@emitter.emit event.mk_rec_type, event, @
@poll args
sync: (sync_cursor = null) ->
args =
data:
ticket: @ticket
sync_cursor: sync_cursor
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.sync_conversations args, (data) =>
for event in data.stream
@emitter.emit event.mk_rec_type, event, @ unless event.mk_rec_type is 'message'
if data.sync_cursor
@sync data.sync_cursor
login: (email, password) ->
args =
data:
email: email
password: password
headers:
'Content-Type': 'application/json'
fleepClient.methods.login args, (data, response) =>
@ticket = data.ticket
@cookie = response.headers['set-cookie'][0]
args =
data:
ticket: @ticket
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
@sync()
fleepClient.methods.sync args, (data) =>
@horizon = data.event_horizon
@emitter.emit 'login.complete', @
module.exports = Fleep | 213373 | Client = require('node-rest-client').Client
fleepClient = new Client
fleepClient.registerMethod 'login', 'https://fleep.io/api/account/login', 'POST'
fleepClient.registerMethod 'poll', 'https://fleep.io/api/account/poll', 'POST'
fleepClient.registerMethod 'sync', 'https://fleep.io/api/account/sync', 'POST'
fleepClient.registerMethod 'sync_conversations', 'https://fleep.io/api/account/sync_conversations', 'POST'
class Fleep
constructor: (emitter) ->
@ticket
@cookie
@horizon = 0
@emitter = emitter
poll: ->
args =
data:
ticket: @ticket
event_horizon: @horizon
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.poll args, (data) =>
@horizon = data.event_horizon
for event in data.stream
@emitter.emit event.mk_rec_type, event, @
@poll args
sync: (sync_cursor = null) ->
args =
data:
ticket: @ticket
sync_cursor: sync_cursor
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.sync_conversations args, (data) =>
for event in data.stream
@emitter.emit event.mk_rec_type, event, @ unless event.mk_rec_type is 'message'
if data.sync_cursor
@sync data.sync_cursor
login: (email, password) ->
args =
data:
email: email
password: <PASSWORD>
headers:
'Content-Type': 'application/json'
fleepClient.methods.login args, (data, response) =>
@ticket = data.ticket
@cookie = response.headers['set-cookie'][0]
args =
data:
ticket: @ticket
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
@sync()
fleepClient.methods.sync args, (data) =>
@horizon = data.event_horizon
@emitter.emit 'login.complete', @
module.exports = Fleep | true | Client = require('node-rest-client').Client
fleepClient = new Client
fleepClient.registerMethod 'login', 'https://fleep.io/api/account/login', 'POST'
fleepClient.registerMethod 'poll', 'https://fleep.io/api/account/poll', 'POST'
fleepClient.registerMethod 'sync', 'https://fleep.io/api/account/sync', 'POST'
fleepClient.registerMethod 'sync_conversations', 'https://fleep.io/api/account/sync_conversations', 'POST'
class Fleep
constructor: (emitter) ->
@ticket
@cookie
@horizon = 0
@emitter = emitter
poll: ->
args =
data:
ticket: @ticket
event_horizon: @horizon
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.poll args, (data) =>
@horizon = data.event_horizon
for event in data.stream
@emitter.emit event.mk_rec_type, event, @
@poll args
sync: (sync_cursor = null) ->
args =
data:
ticket: @ticket
sync_cursor: sync_cursor
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
fleepClient.methods.sync_conversations args, (data) =>
for event in data.stream
@emitter.emit event.mk_rec_type, event, @ unless event.mk_rec_type is 'message'
if data.sync_cursor
@sync data.sync_cursor
login: (email, password) ->
args =
data:
email: email
password: PI:PASSWORD:<PASSWORD>END_PI
headers:
'Content-Type': 'application/json'
fleepClient.methods.login args, (data, response) =>
@ticket = data.ticket
@cookie = response.headers['set-cookie'][0]
args =
data:
ticket: @ticket
headers:
'Content-Type': 'application/json'
'Cookie': @cookie
@sync()
fleepClient.methods.sync args, (data) =>
@horizon = data.event_horizon
@emitter.emit 'login.complete', @
module.exports = Fleep |
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998874664306641,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/taskboard/sortable.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/taskboard/sortable.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
module = angular.module("taigaBacklog")
#############################################################################
## Sortable Directive
#############################################################################
TaskboardSortableDirective = ($repo, $rs, $rootscope) ->
link = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el
deleteElement = (itemEl) ->
# Completelly remove item and its scope from dom
itemEl.scope().$destroy()
itemEl.off()
itemEl.remove()
tdom.sortable({
handle: ".taskboard-task-inner",
dropOnEmpty: true
connectWith: ".taskboard-tasks-box"
revert: 400
})
tdom.on "sortstop", (event, ui) ->
parentEl = ui.item.parent()
itemEl = ui.item
itemTask = itemEl.scope().task
itemIndex = itemEl.index()
newParentScope = parentEl.scope()
oldUsId = if oldParentScope.us then oldParentScope.us.id else null
oldStatusId = oldParentScope.st.id
newUsId = if newParentScope.us then newParentScope.us.id else null
newStatusId = newParentScope.st.id
if newStatusId != oldStatusId or newUsId != oldUsId
deleteElement(itemEl)
$scope.$apply ->
$rootscope.$broadcast("taskboard:task:move", itemTask, newUsId, newStatusId, itemIndex)
ui.item.find('a').removeClass('noclick')
tdom.on "sortstart", (event, ui) ->
oldParentScope = ui.item.parent().scope()
ui.item.find('a').addClass('noclick')
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgTaskboardSortable", [
"$tgRepo",
"$tgResources",
"$rootScope",
TaskboardSortableDirective
])
| 14331 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/taskboard/sortable.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
module = angular.module("taigaBacklog")
#############################################################################
## Sortable Directive
#############################################################################
TaskboardSortableDirective = ($repo, $rs, $rootscope) ->
link = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el
deleteElement = (itemEl) ->
# Completelly remove item and its scope from dom
itemEl.scope().$destroy()
itemEl.off()
itemEl.remove()
tdom.sortable({
handle: ".taskboard-task-inner",
dropOnEmpty: true
connectWith: ".taskboard-tasks-box"
revert: 400
})
tdom.on "sortstop", (event, ui) ->
parentEl = ui.item.parent()
itemEl = ui.item
itemTask = itemEl.scope().task
itemIndex = itemEl.index()
newParentScope = parentEl.scope()
oldUsId = if oldParentScope.us then oldParentScope.us.id else null
oldStatusId = oldParentScope.st.id
newUsId = if newParentScope.us then newParentScope.us.id else null
newStatusId = newParentScope.st.id
if newStatusId != oldStatusId or newUsId != oldUsId
deleteElement(itemEl)
$scope.$apply ->
$rootscope.$broadcast("taskboard:task:move", itemTask, newUsId, newStatusId, itemIndex)
ui.item.find('a').removeClass('noclick')
tdom.on "sortstart", (event, ui) ->
oldParentScope = ui.item.parent().scope()
ui.item.find('a').addClass('noclick')
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgTaskboardSortable", [
"$tgRepo",
"$tgResources",
"$rootScope",
TaskboardSortableDirective
])
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/taskboard/sortable.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
module = angular.module("taigaBacklog")
#############################################################################
## Sortable Directive
#############################################################################
TaskboardSortableDirective = ($repo, $rs, $rootscope) ->
link = ($scope, $el, $attrs) ->
oldParentScope = null
newParentScope = null
itemEl = null
tdom = $el
deleteElement = (itemEl) ->
# Completelly remove item and its scope from dom
itemEl.scope().$destroy()
itemEl.off()
itemEl.remove()
tdom.sortable({
handle: ".taskboard-task-inner",
dropOnEmpty: true
connectWith: ".taskboard-tasks-box"
revert: 400
})
tdom.on "sortstop", (event, ui) ->
parentEl = ui.item.parent()
itemEl = ui.item
itemTask = itemEl.scope().task
itemIndex = itemEl.index()
newParentScope = parentEl.scope()
oldUsId = if oldParentScope.us then oldParentScope.us.id else null
oldStatusId = oldParentScope.st.id
newUsId = if newParentScope.us then newParentScope.us.id else null
newStatusId = newParentScope.st.id
if newStatusId != oldStatusId or newUsId != oldUsId
deleteElement(itemEl)
$scope.$apply ->
$rootscope.$broadcast("taskboard:task:move", itemTask, newUsId, newStatusId, itemIndex)
ui.item.find('a').removeClass('noclick')
tdom.on "sortstart", (event, ui) ->
oldParentScope = ui.item.parent().scope()
ui.item.find('a').addClass('noclick')
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgTaskboardSortable", [
"$tgRepo",
"$tgResources",
"$rootScope",
TaskboardSortableDirective
])
|
[
{
"context": "# Tabs based on this script from Jack Moore: http://www.jacklmoore.com/notes/jquery-tabs/\n$('",
"end": 43,
"score": 0.9760231375694275,
"start": 33,
"tag": "NAME",
"value": "Jack Moore"
}
] | app/_src/tabs.coffee | caitlynmayers/livewires | 0 | # Tabs based on this script from Jack Moore: http://www.jacklmoore.com/notes/jquery-tabs/
$('.tabs').each ->
# Find the tabs
$links = $(@).find('a')
# If the location.hash matches one of the links, use that as the active tab.
# If no match is found, use the first link as the initial active tab.
$active = $($links.filter('[href=\'' + location.hash + '\']')[0] or $links[0])
$active.addClass 'active'
$content = $($active.attr('href'))
# Hide the remaining content
$links.not($active).each ->
$($(@).attr('href')).hide()
# Bind the click event handler
$(@).on 'click', 'a', (e) ->
# Make the old tab inactive.
$active.removeClass 'active'
$content.hide()
# Update the variables with the new link and content
$active = $(@)
$content = $($(@).attr('href'))
# Make the tab active.
$active.addClass 'active'
$content.show()
# Prevent the anchor's default click action
e.preventDefault() | 22248 | # Tabs based on this script from <NAME>: http://www.jacklmoore.com/notes/jquery-tabs/
$('.tabs').each ->
# Find the tabs
$links = $(@).find('a')
# If the location.hash matches one of the links, use that as the active tab.
# If no match is found, use the first link as the initial active tab.
$active = $($links.filter('[href=\'' + location.hash + '\']')[0] or $links[0])
$active.addClass 'active'
$content = $($active.attr('href'))
# Hide the remaining content
$links.not($active).each ->
$($(@).attr('href')).hide()
# Bind the click event handler
$(@).on 'click', 'a', (e) ->
# Make the old tab inactive.
$active.removeClass 'active'
$content.hide()
# Update the variables with the new link and content
$active = $(@)
$content = $($(@).attr('href'))
# Make the tab active.
$active.addClass 'active'
$content.show()
# Prevent the anchor's default click action
e.preventDefault() | true | # Tabs based on this script from PI:NAME:<NAME>END_PI: http://www.jacklmoore.com/notes/jquery-tabs/
$('.tabs').each ->
# Find the tabs
$links = $(@).find('a')
# If the location.hash matches one of the links, use that as the active tab.
# If no match is found, use the first link as the initial active tab.
$active = $($links.filter('[href=\'' + location.hash + '\']')[0] or $links[0])
$active.addClass 'active'
$content = $($active.attr('href'))
# Hide the remaining content
$links.not($active).each ->
$($(@).attr('href')).hide()
# Bind the click event handler
$(@).on 'click', 'a', (e) ->
# Make the old tab inactive.
$active.removeClass 'active'
$content.hide()
# Update the variables with the new link and content
$active = $(@)
$content = $($(@).attr('href'))
# Make the tab active.
$active.addClass 'active'
$content.show()
# Prevent the anchor's default click action
e.preventDefault() |
[
{
"context": "et et sw=2 ts=2 sts=2 ff=unix fenc=utf8:\n# Author: Binux<i@binux.me>\n# http://binux.me\n# Created o",
"end": 64,
"score": 0.9583234786987305,
"start": 59,
"tag": "USERNAME",
"value": "Binux"
},
{
"context": "w=2 ts=2 sts=2 ff=unix fenc=utf8:\n# Author: Binux<i... | web/static/coffee/utils.coffee | dhbowen1/qiandao | 0 | # vim: set et sw=2 ts=2 sts=2 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-08-03 07:42:45
define (require) ->
require '/static/node_components'
RegExp.escape = (s) ->
s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')
url = node_url
tough = node_tough
querystring = node_querystring
exports =
cookie_parse: (cookie_string) ->
cookie = {}
for each in cookie_string?.split(';')
index = each.indexOf('=')
index = if index < 0 then each.length else index
key = each[..index]
value = each[index+1..]
cookie[decodeURIComponent(key)] = decodeURIComponent(value)
return cookie
cookie_unparse: (cookie) ->
(encodeURIComponent(key)+'='+encodeURIComponent(value) for key, value in cookie).join(';')
url_parse: node_url.parse
url_unparse: node_url.format
querystring_parse: node_querystring.parse
querystring_unparse: node_querystring.stringify
querystring_unparse_with_variables: (obj) ->
query = node_querystring.stringify(obj)
replace_list = {}
for key, value of obj
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(key)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(value)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
if node_querystring.stringify(replace_list)
console.log('The replace_list is',replace_list)
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return query
querystring_parse_with_variables: (query) ->
replace_list = {}
re = /{{\s*([\w]+)[^}]*?\s*\|urlencode}}/g
_query = decodeURIComponent(query)
while m = re.exec(_query)
replace_list[encodeURIComponent(m[0])] = m[0][..-13]+'}}'
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return exports.querystring_parse(query)
CookieJar: node_tough.CookieJar
Cookie: node_tough.Cookie
dict2list: (dict) ->
({name: k, value: v} for k, v of dict)
list2dict: (list) ->
dict = {}
if list
for each in list
dict[each.name] = each.value
return dict
get_public_suffix: node_tough.getPublicSuffix
get_domain: (url) ->
exports.get_public_suffix exports.url_parse(url).hostname
debounce: (func, wait, immediate) ->
timestamp = 0
timeout = 0
later = () ->
last = (new Date().getTime()) - timestamp
if 0 < last < wait
timeout = setTimeout(later, wait - last)
else
timeout = null
if not immediate
result = func.apply(context, args)
if not timeout
context = args = null
return () ->
context = this
args = arguments
timestamp = (new Date().getTime())
callNow = immediate and not timeout
if not timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
return result
storage:
set: (key, value) ->
if not window.localStorage?
return false
try
return window.localStorage.setItem(key, angular.toJson(value))
catch error
return null
get: (key) ->
if not window.localStorage?
return null
try
return angular.fromJson(window.localStorage.getItem(key))
catch error
return null
del: (key) ->
if not window.localStorage?
return false
try
return window.localStorage.removeItem(key)
catch error
return null
tpl2har: (tpl) ->
return {
log:
creator:
name: 'binux'
version: 'qiandao'
entries: ({
comment: en.comment
checked: true
startedDateTime: (new Date()).toISOString()
time: 1
request:
method: en.request.method
url: en.request.url
headers: ({
name: x.name
value: x.value
checked: true
} for x in en.request.headers or [])
queryString: []
cookies: ({
name: x.name
value: x.value
checked: true
} for x in en.request.cookies or [])
headersSize: -1
bodySize: if en.request.data then en.request.data.length else 0
postData:
mimeType: en.request.mimeType
text: en.request.data
response: {}
cache: {}
timings: {}
connections: "0"
pageref: "page_0"
success_asserts: en.rule?.success_asserts
failed_asserts: en.rule?.failed_asserts
extract_variables: en.rule?.extract_variables
} for en in tpl)
pages: []
version: '1.2'
}
return exports
| 56654 | # vim: set et sw=2 ts=2 sts=2 ff=unix fenc=utf8:
# Author: Binux<<EMAIL>>
# http://binux.me
# Created on 2014-08-03 07:42:45
define (require) ->
require '/static/node_components'
RegExp.escape = (s) ->
s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')
url = node_url
tough = node_tough
querystring = node_querystring
exports =
cookie_parse: (cookie_string) ->
cookie = {}
for each in cookie_string?.split(';')
index = each.indexOf('=')
index = if index < 0 then each.length else index
key = each[..index]
value = each[index+1..]
cookie[decodeURIComponent(key)] = decodeURIComponent(value)
return cookie
cookie_unparse: (cookie) ->
(encodeURIComponent(key)+'='+encodeURIComponent(value) for key, value in cookie).join(';')
url_parse: node_url.parse
url_unparse: node_url.format
querystring_parse: node_querystring.parse
querystring_unparse: node_querystring.stringify
querystring_unparse_with_variables: (obj) ->
query = node_querystring.stringify(obj)
replace_list = {}
for key, value of obj
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(key)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(value)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
if node_querystring.stringify(replace_list)
console.log('The replace_list is',replace_list)
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return query
querystring_parse_with_variables: (query) ->
replace_list = {}
re = /{{\s*([\w]+)[^}]*?\s*\|urlencode}}/g
_query = decodeURIComponent(query)
while m = re.exec(_query)
replace_list[encodeURIComponent(m[0])] = m[0][..-13]+'}}'
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return exports.querystring_parse(query)
CookieJar: node_tough.CookieJar
Cookie: node_tough.Cookie
dict2list: (dict) ->
({name: k, value: v} for k, v of dict)
list2dict: (list) ->
dict = {}
if list
for each in list
dict[each.name] = each.value
return dict
get_public_suffix: node_tough.getPublicSuffix
get_domain: (url) ->
exports.get_public_suffix exports.url_parse(url).hostname
debounce: (func, wait, immediate) ->
timestamp = 0
timeout = 0
later = () ->
last = (new Date().getTime()) - timestamp
if 0 < last < wait
timeout = setTimeout(later, wait - last)
else
timeout = null
if not immediate
result = func.apply(context, args)
if not timeout
context = args = null
return () ->
context = this
args = arguments
timestamp = (new Date().getTime())
callNow = immediate and not timeout
if not timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
return result
storage:
set: (key, value) ->
if not window.localStorage?
return false
try
return window.localStorage.setItem(key, angular.toJson(value))
catch error
return null
get: (key) ->
if not window.localStorage?
return null
try
return angular.fromJson(window.localStorage.getItem(key))
catch error
return null
del: (key) ->
if not window.localStorage?
return false
try
return window.localStorage.removeItem(key)
catch error
return null
tpl2har: (tpl) ->
return {
log:
creator:
name: 'binux'
version: 'qiandao'
entries: ({
comment: en.comment
checked: true
startedDateTime: (new Date()).toISOString()
time: 1
request:
method: en.request.method
url: en.request.url
headers: ({
name: x.name
value: x.value
checked: true
} for x in en.request.headers or [])
queryString: []
cookies: ({
name: x.name
value: x.value
checked: true
} for x in en.request.cookies or [])
headersSize: -1
bodySize: if en.request.data then en.request.data.length else 0
postData:
mimeType: en.request.mimeType
text: en.request.data
response: {}
cache: {}
timings: {}
connections: "0"
pageref: "page_0"
success_asserts: en.rule?.success_asserts
failed_asserts: en.rule?.failed_asserts
extract_variables: en.rule?.extract_variables
} for en in tpl)
pages: []
version: '1.2'
}
return exports
| true | # vim: set et sw=2 ts=2 sts=2 ff=unix fenc=utf8:
# Author: Binux<PI:EMAIL:<EMAIL>END_PI>
# http://binux.me
# Created on 2014-08-03 07:42:45
define (require) ->
require '/static/node_components'
RegExp.escape = (s) ->
s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')
url = node_url
tough = node_tough
querystring = node_querystring
exports =
cookie_parse: (cookie_string) ->
cookie = {}
for each in cookie_string?.split(';')
index = each.indexOf('=')
index = if index < 0 then each.length else index
key = each[..index]
value = each[index+1..]
cookie[decodeURIComponent(key)] = decodeURIComponent(value)
return cookie
cookie_unparse: (cookie) ->
(encodeURIComponent(key)+'='+encodeURIComponent(value) for key, value in cookie).join(';')
url_parse: node_url.parse
url_unparse: node_url.format
querystring_parse: node_querystring.parse
querystring_unparse: node_querystring.stringify
querystring_unparse_with_variables: (obj) ->
query = node_querystring.stringify(obj)
replace_list = {}
for key, value of obj
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(key)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
re = /{{\s*([\w]+)[^}]*?\s*}}/g
while m = re.exec(value)
if m[0].slice(-12) != '|urlencode}}'
replace_list[encodeURIComponent(m[0])] = m[0][..-3] + '|urlencode}}'
else
replace_list[encodeURIComponent(m[0])] = m[0]
if node_querystring.stringify(replace_list)
console.log('The replace_list is',replace_list)
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return query
querystring_parse_with_variables: (query) ->
replace_list = {}
re = /{{\s*([\w]+)[^}]*?\s*\|urlencode}}/g
_query = decodeURIComponent(query)
while m = re.exec(_query)
replace_list[encodeURIComponent(m[0])] = m[0][..-13]+'}}'
for key, value of replace_list
query = query.replace(new RegExp(RegExp.escape(key), 'g'), value)
return exports.querystring_parse(query)
CookieJar: node_tough.CookieJar
Cookie: node_tough.Cookie
dict2list: (dict) ->
({name: k, value: v} for k, v of dict)
list2dict: (list) ->
dict = {}
if list
for each in list
dict[each.name] = each.value
return dict
get_public_suffix: node_tough.getPublicSuffix
get_domain: (url) ->
exports.get_public_suffix exports.url_parse(url).hostname
debounce: (func, wait, immediate) ->
timestamp = 0
timeout = 0
later = () ->
last = (new Date().getTime()) - timestamp
if 0 < last < wait
timeout = setTimeout(later, wait - last)
else
timeout = null
if not immediate
result = func.apply(context, args)
if not timeout
context = args = null
return () ->
context = this
args = arguments
timestamp = (new Date().getTime())
callNow = immediate and not timeout
if not timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
return result
storage:
set: (key, value) ->
if not window.localStorage?
return false
try
return window.localStorage.setItem(key, angular.toJson(value))
catch error
return null
get: (key) ->
if not window.localStorage?
return null
try
return angular.fromJson(window.localStorage.getItem(key))
catch error
return null
del: (key) ->
if not window.localStorage?
return false
try
return window.localStorage.removeItem(key)
catch error
return null
tpl2har: (tpl) ->
return {
log:
creator:
name: 'binux'
version: 'qiandao'
entries: ({
comment: en.comment
checked: true
startedDateTime: (new Date()).toISOString()
time: 1
request:
method: en.request.method
url: en.request.url
headers: ({
name: x.name
value: x.value
checked: true
} for x in en.request.headers or [])
queryString: []
cookies: ({
name: x.name
value: x.value
checked: true
} for x in en.request.cookies or [])
headersSize: -1
bodySize: if en.request.data then en.request.data.length else 0
postData:
mimeType: en.request.mimeType
text: en.request.data
response: {}
cache: {}
timings: {}
connections: "0"
pageref: "page_0"
success_asserts: en.rule?.success_asserts
failed_asserts: en.rule?.failed_asserts
extract_variables: en.rule?.extract_variables
} for en in tpl)
pages: []
version: '1.2'
}
return exports
|
[
{
"context": "jects-->list projects\\ncreate user-->create user <username> <pwd> <email>\\nprovide permission to user for pr",
"end": 1615,
"score": 0.998633623123169,
"start": 1607,
"tag": "USERNAME",
"value": "username"
},
{
"context": "EATE,ADMIN,SYS_ADMIN)\\ndelete user-->delete ... | scripts/bitbucket5.11.1/scripts-hipchat/bitbucket.coffee | akash1233/OnBot_Demo | 4 | #-------------------------------------------------------------------------------
# Copyright 2018 Cognizant Technology Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
request =require('request')
fs = require('fs')
eindex = require('./index')
listproject = require('./listproject.js')
listrepo = require('./listrepo.js')
createuser = require('./createuser.js')
deleteuser = require('./deleteuser.js')
createproj = require('./createproject.js')
deleteproj = require('./deleteproject.js')
createrepo = require('./createrepo.js')
deleterepo = require('./deleterepo.js')
createbranch = require('./createbranch.js')
deletebranch = require('./deletebranch.js')
projpermission = require('./projpermission.js')
userpermission = require('./userpermission.js')
readjson = require ('./readjson.js');
generate_id = require('./mongoConnt');
module.exports = (robot) ->
robot.respond /help/, (res) ->
res.send "list repo-->list repo <prjkey>\nlist projects-->list projects\ncreate user-->create user <username> <pwd> <email>\nprovide permission to user for project-->project permission <permission> to <user> for <prjkey>(possible permissions::PROJECT_READ,PROJECT_WRITE,PROJECT_ADMIN)\ngrant user permission--><user> premission <permission>(possible permissions::LICENSED_USER,PROJECT_CREATE,ADMIN,SYS_ADMIN)\ndelete user-->delete user <username>\ncreate project-->create project <prjkey> with <prjname> desc <prjdecsc>\ndelete project-->delete project <prj key>\ncreate repo-->create repo <prjkey> <reponame>\ndelete repo-->delete repo <reposlug> in <prjkey>\ncreate branch-->create branch <branchname> in <prjkey> repo <reposlug> from <frombranch>\ndelete branch--> delete branch <branchname> from <prjkey> in <reposlug>"
robot.respond /list repo (.*)/, (res) ->
projectkey=res.match[1]
listrepo.listrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD,projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /list projects/, (res) ->
listproject.listproject process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /create user (.*) (.*) (.*)/, (res) ->
user=res.match[1]
userpassword=res.match[2]
emailaddress=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateuser",user:user,userpassword:userpassword,emailaddress:emailaddress}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
userpassword = request.body.userpassword;
emailaddress = request.body.emailaddress;
# Call from create_project file for project creation
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Create bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket user.';
robot.respond /delete user (.*)/, (res) ->
user=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteuser",user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
# Call from create_project file for project creation
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Delete bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket user.';
robot.respond /create project (.*) with (.*) desc (.*)/, (res) ->
projectkey=res.match[1]
projectname=res.match[2]
description=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateproj",projectkey:projectkey,projectname:projectname,description:description}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket project '+projectname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
projectname = request.body.projectname;
description = request.body.description;
# Call from create_project file for project creation
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket project.';
robot.respond /delete project (.*)/, (res) ->
projectkey=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteproj",projectkey:projectkey}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete project '+projectkey+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
# Call from create_project file for project creation
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket project.';
robot.respond /create repo (.*) (.*)/, (res) ->
projectkey=res.match[1]
reponame=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreaterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreaterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreaterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreaterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreaterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to creating bitbucket repo.';
robot.respond /delete repo (.*) in (.*)/, (res) ->
projectkey=res.match[2]
reponame=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket repo.';
robot.respond /create branch (.*) in (.*) repo (.*) from (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
frombranch=res.match[4]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreatebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreatebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname,frombranch:frombranch}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreatebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreatebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreatebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
frombranch = request.body.frombranch;
# Call from create_project file for project creation
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for creating bitbucket branch.';
robot.respond /delete branch (.*) from (.*) in (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeletebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeletebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeletebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeletebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeletebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
# Call from create_project file for project creation
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket branch.';
robot.respond /project permission (.*) to (.*) for (.*)/, (res) ->
permission=res.match[1]
user=res.match[2]
projectkey=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketprojpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketprojpermission",projectkey:projectkey,permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: project permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketprojpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketprojpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketprojpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing project permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing project permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing project permission to user.';
robot.respond /(.*) permission (.*)/, (res) ->
user=res.match[1]
permission=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketuserpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketuserpermission",permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketuserpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketuserpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketuserpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing permission to user.';
| 107137 | #-------------------------------------------------------------------------------
# Copyright 2018 Cognizant Technology Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
request =require('request')
fs = require('fs')
eindex = require('./index')
listproject = require('./listproject.js')
listrepo = require('./listrepo.js')
createuser = require('./createuser.js')
deleteuser = require('./deleteuser.js')
createproj = require('./createproject.js')
deleteproj = require('./deleteproject.js')
createrepo = require('./createrepo.js')
deleterepo = require('./deleterepo.js')
createbranch = require('./createbranch.js')
deletebranch = require('./deletebranch.js')
projpermission = require('./projpermission.js')
userpermission = require('./userpermission.js')
readjson = require ('./readjson.js');
generate_id = require('./mongoConnt');
module.exports = (robot) ->
robot.respond /help/, (res) ->
res.send "list repo-->list repo <prjkey>\nlist projects-->list projects\ncreate user-->create user <username> <pwd> <email>\nprovide permission to user for project-->project permission <permission> to <user> for <prjkey>(possible permissions::PROJECT_READ,PROJECT_WRITE,PROJECT_ADMIN)\ngrant user permission--><user> premission <permission>(possible permissions::LICENSED_USER,PROJECT_CREATE,ADMIN,SYS_ADMIN)\ndelete user-->delete user <username>\ncreate project-->create project <prjkey> with <prjname> desc <prjdecsc>\ndelete project-->delete project <prj key>\ncreate repo-->create repo <prjkey> <reponame>\ndelete repo-->delete repo <reposlug> in <prjkey>\ncreate branch-->create branch <branchname> in <prjkey> repo <reposlug> from <frombranch>\ndelete branch--> delete branch <branchname> from <prjkey> in <reposlug>"
robot.respond /list repo (.*)/, (res) ->
projectkey=res.match[1]
listrepo.listrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD,projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /list projects/, (res) ->
listproject.listproject process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /create user (.*) (.*) (.*)/, (res) ->
user=res.match[1]
userpassword=res.match[2]
emailaddress=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateuser",user:user,userpassword:<PASSWORD>,emailaddress:emailaddress}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
userpassword = request.body.userpassword;
emailaddress = request.body.emailaddress;
# Call from create_project file for project creation
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Create bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket user.';
robot.respond /delete user (.*)/, (res) ->
user=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteuser",user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
# Call from create_project file for project creation
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Delete bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket user.';
robot.respond /create project (.*) with (.*) desc (.*)/, (res) ->
projectkey=res.match[1]
projectname=res.match[2]
description=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateproj",projectkey:projectkey,projectname:projectname,description:description}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket project '+projectname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
projectname = request.body.projectname;
description = request.body.description;
# Call from create_project file for project creation
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket project.';
robot.respond /delete project (.*)/, (res) ->
projectkey=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteproj",projectkey:projectkey}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete project '+projectkey+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
# Call from create_project file for project creation
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket project.';
robot.respond /create repo (.*) (.*)/, (res) ->
projectkey=res.match[1]
reponame=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreaterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreaterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreaterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreaterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreaterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to creating bitbucket repo.';
robot.respond /delete repo (.*) in (.*)/, (res) ->
projectkey=res.match[2]
reponame=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket repo.';
robot.respond /create branch (.*) in (.*) repo (.*) from (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
frombranch=res.match[4]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreatebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreatebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname,frombranch:frombranch}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreatebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreatebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreatebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
frombranch = request.body.frombranch;
# Call from create_project file for project creation
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for creating bitbucket branch.';
robot.respond /delete branch (.*) from (.*) in (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeletebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeletebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeletebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeletebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeletebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
# Call from create_project file for project creation
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket branch.';
robot.respond /project permission (.*) to (.*) for (.*)/, (res) ->
permission=res.match[1]
user=res.match[2]
projectkey=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketprojpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketprojpermission",projectkey:projectkey,permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: project permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketprojpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketprojpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketprojpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing project permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing project permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing project permission to user.';
robot.respond /(.*) permission (.*)/, (res) ->
user=res.match[1]
permission=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketuserpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketuserpermission",permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketuserpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketuserpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketuserpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing permission to user.';
| true | #-------------------------------------------------------------------------------
# Copyright 2018 Cognizant Technology Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
request =require('request')
fs = require('fs')
eindex = require('./index')
listproject = require('./listproject.js')
listrepo = require('./listrepo.js')
createuser = require('./createuser.js')
deleteuser = require('./deleteuser.js')
createproj = require('./createproject.js')
deleteproj = require('./deleteproject.js')
createrepo = require('./createrepo.js')
deleterepo = require('./deleterepo.js')
createbranch = require('./createbranch.js')
deletebranch = require('./deletebranch.js')
projpermission = require('./projpermission.js')
userpermission = require('./userpermission.js')
readjson = require ('./readjson.js');
generate_id = require('./mongoConnt');
module.exports = (robot) ->
robot.respond /help/, (res) ->
res.send "list repo-->list repo <prjkey>\nlist projects-->list projects\ncreate user-->create user <username> <pwd> <email>\nprovide permission to user for project-->project permission <permission> to <user> for <prjkey>(possible permissions::PROJECT_READ,PROJECT_WRITE,PROJECT_ADMIN)\ngrant user permission--><user> premission <permission>(possible permissions::LICENSED_USER,PROJECT_CREATE,ADMIN,SYS_ADMIN)\ndelete user-->delete user <username>\ncreate project-->create project <prjkey> with <prjname> desc <prjdecsc>\ndelete project-->delete project <prj key>\ncreate repo-->create repo <prjkey> <reponame>\ndelete repo-->delete repo <reposlug> in <prjkey>\ncreate branch-->create branch <branchname> in <prjkey> repo <reposlug> from <frombranch>\ndelete branch--> delete branch <branchname> from <prjkey> in <reposlug>"
robot.respond /list repo (.*)/, (res) ->
projectkey=res.match[1]
listrepo.listrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD,projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /list projects/, (res) ->
listproject.listproject process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.respond /create user (.*) (.*) (.*)/, (res) ->
user=res.match[1]
userpassword=res.match[2]
emailaddress=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateuser",user:user,userpassword:PI:PASSWORD:<PASSWORD>END_PI,emailaddress:emailaddress}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
userpassword = request.body.userpassword;
emailaddress = request.body.emailaddress;
# Call from create_project file for project creation
createuser.createuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, userpassword, emailaddress, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user created ';
actionmsg = 'bitbucket user created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Create bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket user.';
robot.respond /delete user (.*)/, (res) ->
user=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteuser.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteuser",user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete bitbucket user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteuser.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteuser.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteuser', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting of user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
# Call from create_project file for project creation
deleteuser.deleteuser process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket user deleted ';
actionmsg = 'bitbucket user deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="Delete bitbucket user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket user.';
robot.respond /create project (.*) with (.*) desc (.*)/, (res) ->
projectkey=res.match[1]
projectname=res.match[2]
description=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreateproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreateproj",projectkey:projectkey,projectname:projectname,description:description}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create bitbucket project '+projectname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreateproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreateproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreateproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
projectname = request.body.projectname;
description = request.body.description;
# Call from create_project file for project creation
createproj.createproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, projectname, description, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project created ';
actionmsg = 'bitbucket project created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to create bitbucket project.';
robot.respond /delete project (.*)/, (res) ->
projectkey=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleteproj.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleteproj",projectkey:projectkey}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete project '+projectkey+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleteproj.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleteproj.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleteproj', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting project';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
# Call from create_project file for project creation
deleteproj.deleteproj process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket project deleted ';
actionmsg = 'bitbucket project deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete project request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to delete bitbucket project.';
robot.respond /create repo (.*) (.*)/, (res) ->
projectkey=res.match[1]
reponame=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreaterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreaterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreaterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreaterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreaterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
createrepo.createrepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo created ';
actionmsg = 'bitbucket repo created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized to creating bitbucket repo.';
robot.respond /delete repo (.*) in (.*)/, (res) ->
projectkey=res.match[2]
reponame=res.match[1]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeleterepo.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeleterepo",projectkey:projectkey,reponame:reponame}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete repo '+reponame+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeleterepo.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeleterepo.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeleterepo', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting repo';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reponame = request.body.reponame;
# Call from create_project file for project creation
deleterepo.deleterepo process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reponame, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket repo deleted ';
actionmsg = 'bitbucket repo deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete repo request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket repo.';
robot.respond /create branch (.*) in (.*) repo (.*) from (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
frombranch=res.match[4]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketcreatebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketcreatebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname,frombranch:frombranch}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: create branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketcreatebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketcreatebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketcreatebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the creating branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
frombranch = request.body.frombranch;
# Call from create_project file for project creation
createbranch.createbranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, frombranch, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch created ';
actionmsg = 'bitbucket branch created ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="create branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for creating bitbucket branch.';
robot.respond /delete branch (.*) from (.*) in (.*)/, (res) ->
branchname=res.match[1]
projectkey=res.match[2]
reposlug=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketdeletebranch.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketdeletebranch",projectkey:projectkey,reposlug:reposlug,branchname:branchname}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: delete branch '+branchname+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketdeletebranch.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketdeletebranch.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketdeletebranch', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for the deleting branch';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
reposlug = request.body.reposlug;
branchname = request.body.branchname;
# Call from create_project file for project creation
deletebranch.deletebranch process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, reposlug, branchname, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
message = 'bitbucket branch deleted ';
actionmsg = 'bitbucket branch deleted ';
statusmsg = 'Success';
eindex.wallData botname, message, actionmsg, statusmsg;
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="delete branch request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for deleting bitbucket branch.';
robot.respond /project permission (.*) to (.*) for (.*)/, (res) ->
permission=res.match[1]
user=res.match[2]
projectkey=res.match[3]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketprojpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketprojpermission",projectkey:projectkey,permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: project permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketprojpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketprojpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketprojpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing project permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
projectkey = request.body.projectkey;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
projpermission.projpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, projectkey, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing project permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing project permission to user.';
robot.respond /(.*) permission (.*)/, (res) ->
user=res.match[1]
permission=res.match[2]
readjson.readworkflow_coffee (error,stdout,stderr) ->
#Action Flow with workflow flag
if stdout.bitbucketuserpermission.workflowflag == true
#Generate Random Ticket Number
generate_id.getNextSequence (err,id) ->
tckid=id
payload={botname:process.env.HUBOT_NAME,username:res.message.user.name,userid:res.message.user.id,podIp:process.env.MY_POD_IP,"callback_id":"bitbucketuserpermission",permission:permission,user:user}
message='Ticket Id : '+tckid+'\n Raised By: '+res.message.user.name+'\n Command: permission '+permission+' to user '+user+'\n approve or reject the request'
robot.messageRoom(stdout.bitbucketuserpermission.adminid, message);
res.send 'Your request is waiting for approval by '+stdout.bitbucketuserpermission.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
#Casual workflow
else
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
robot.router.post '/bitbucketuserpermission', (request, response) ->
data_http = if request.body.payload? then JSON.parse request.body.payload else request.body
if data_http.action == "Approved"
dt='Your request is approved by '+data_http.approver+' for providing permission to user';
# Approved Message send to the user chat room
robot.messageRoom data_http.userid, dt;
user = request.body.user;
permission = request.body.permission;
# Call from create_project file for project creation
userpermission.userpermission process.env.BITBUCKET_URL, process.env.USERNAME, process.env.PASSWORD, user, permission, (error,stdout,stderr) ->
if(stdout)
console.log(stdout)
res.send stdout
setTimeout (->eindex.passData stdout),1000
if(stderr)
console.log(stderr)
res.send stderr
setTimeout (->eindex.passData stderr),1000
if(error)
console.log(error)
res.send error
setTimeout (->eindex.passData error),1000
else
dt="providing permsiion to user request was rejected by "+data_http.approver
setTimeout (->eindex.passData dt),1000
# Rejected Message send to the user chat room
robot.messageRoom data_http.userid, dt;
robot.messageRoom data_http.userid, 'Sorry, You are not authorized for providing permission to user.';
|
[
{
"context": " new Date(startDate)\n while d < endDate\n key = \"#{d.getFullYear()}-#{d.getMonth()}\"\n found = months.find (m) => m.getKey() == key\n",
"end": 303,
"score": 0.9272151589393616,
"start": 267,
"tag": "KEY",
"value": "\"#{d.getFullYear()}-#{d.getMonth()}\""
}
] | src/components/calendar/setupMonths.coffee | brianshaler/kerplunk-location-calendar | 0 | Month = require './month'
module.exports = setupMonths = (regl, months, startDate, endDate, config) ->
{
monthColumns
pageMargin
squareSize
squareMargin
textHeight
width
} = config
d = new Date(startDate)
while d < endDate
key = "#{d.getFullYear()}-#{d.getMonth()}"
found = months.find (m) => m.getKey() == key
if !found
months.push(Month(regl, new Date(d), config))
d.setMonth(d.getMonth() + 1)
months.sort (a, b) -> if a.getDate() > b.getDate() then 1 else -1
# console.log('setupMonths', monthColumns, startDate, endDate)
months
.filter (m) => m.getDate() >= startDate and m.getDate() < endDate
.forEach (month, index) ->
calMonthRow = Math.floor(index / monthColumns)
calMonthCol = index - calMonthRow * monthColumns
colWidth = Math.floor(width / monthColumns)
calMonthX = pageMargin[3] + calMonthCol * colWidth
calMonthY = pageMargin[0] + calMonthRow * (squareSize + squareMargin) * 7.5 + textHeight
month.setConfig(config)
month.updateCalendarPosition(calMonthX, calMonthY)
| 29314 | Month = require './month'
module.exports = setupMonths = (regl, months, startDate, endDate, config) ->
{
monthColumns
pageMargin
squareSize
squareMargin
textHeight
width
} = config
d = new Date(startDate)
while d < endDate
key = <KEY>
found = months.find (m) => m.getKey() == key
if !found
months.push(Month(regl, new Date(d), config))
d.setMonth(d.getMonth() + 1)
months.sort (a, b) -> if a.getDate() > b.getDate() then 1 else -1
# console.log('setupMonths', monthColumns, startDate, endDate)
months
.filter (m) => m.getDate() >= startDate and m.getDate() < endDate
.forEach (month, index) ->
calMonthRow = Math.floor(index / monthColumns)
calMonthCol = index - calMonthRow * monthColumns
colWidth = Math.floor(width / monthColumns)
calMonthX = pageMargin[3] + calMonthCol * colWidth
calMonthY = pageMargin[0] + calMonthRow * (squareSize + squareMargin) * 7.5 + textHeight
month.setConfig(config)
month.updateCalendarPosition(calMonthX, calMonthY)
| true | Month = require './month'
module.exports = setupMonths = (regl, months, startDate, endDate, config) ->
{
monthColumns
pageMargin
squareSize
squareMargin
textHeight
width
} = config
d = new Date(startDate)
while d < endDate
key = PI:KEY:<KEY>END_PI
found = months.find (m) => m.getKey() == key
if !found
months.push(Month(regl, new Date(d), config))
d.setMonth(d.getMonth() + 1)
months.sort (a, b) -> if a.getDate() > b.getDate() then 1 else -1
# console.log('setupMonths', monthColumns, startDate, endDate)
months
.filter (m) => m.getDate() >= startDate and m.getDate() < endDate
.forEach (month, index) ->
calMonthRow = Math.floor(index / monthColumns)
calMonthCol = index - calMonthRow * monthColumns
colWidth = Math.floor(width / monthColumns)
calMonthX = pageMargin[3] + calMonthCol * colWidth
calMonthY = pageMargin[0] + calMonthRow * (squareSize + squareMargin) * 7.5 + textHeight
month.setConfig(config)
month.updateCalendarPosition(calMonthX, calMonthY)
|
[
{
"context": " user: {\n id: 1,\n email: \"voter1@example.com\",\n name: \"Voter1\",\n gravatar_ha",
"end": 805,
"score": 0.9999073147773743,
"start": 787,
"tag": "EMAIL",
"value": "voter1@example.com"
},
{
"context": " email: \"voter1@exa... | test/spec/services/api_loaders_spec.coffee | Spokenvote/spokenvote | 21 | describe 'API Loaders Tests', ->
beforeEach module 'spokenvote'
$httpBackend = undefined
$rootScope = undefined
$route = undefined
returnedProposalList =
id: 15,
related_proposals: [
{
id: 49,
statement: "Voter 1 has something better that is the max length you can make a proposal so Voter 2 can test supporting that max length text layout here.",
user_id: 1,
created_at: "2013-05-16 02:08:32 UTC",
votes_count: 2,
ancestry: "14/15",
created_by: null,
hub_id: 2,
votes_in_tree: 6,
votes_percentage: 33,
is_editable: false,
has_support: true,
current_user_support: true,
related_proposals_count: 1,
user: {
id: 1,
email: "voter1@example.com",
name: "Voter1",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null
},
hub: {
id: 2,
group_name: "Marriage Equality",
formatted_location: "Solapur, Maharashtra, India",
full_hub: "Marriage Equality - Solapur, Maharashtra, India"
},
votes: [
{
id: 63,
comment: "ghghfdfdfd",
username: "Kim ManAuth Miller",
created_at: "2013-02-10 16:34:28 UTC",
user_id: 42,
email: "kimardenmiller@gmail.com",
gravatar_hash: "3423a20950bd9efcc25a2e7657ff990c",
facebook_auth: null,
updated_at: "Over a year ago"
},
{
id: 71,
comment: "dfkdfdfjdjfk",
username: "Voter1",
created_at: "2013-05-16 02:08:32 UTC",
user_id: 1,
email: "voter1@example.com",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null,
updated_at: "Over a year ago"
}
]
}
]
clicked_proposal =
id: 17
statement: 'My proposal statement'
votes: [
id: 22
comment: 'Why you should vote for this proposal']
returnedProposalResponse =
ancestry: null
created_at: "2015-07-10T20:56:49.970Z"
created_by: null
hub_id: 1
id: 258
statement: "My Proposal"
supporting_statement: null
updated_at: "2015-07-10T22:46:48.516Z"
user_id: 44
votes_count: 1
describe "CurrentUserLoader should respond to requests", ->
currentUserLoader = undefined
CurrentUser = undefined
returnedUser =
email: "term...mail.com"
facebook_auth: "1014514417"
first_name: "Kim"
gravatar_hash: "bbd5398ccde904d92ba0d5b8fc6c7344"
id: 44
'is_admin?': false
name: "Kim Miller"
username: "Kim Miller"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentUserLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentUserLoader = _CurrentUserLoader_
CurrentUser = -> {}
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current USER", ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (current_user) ->
user = current_user
$httpBackend.flush()
expect user instanceof Object
.toBeTruthy()
expect user
.toEqual jasmine.objectContaining returnedUser
it 'should return a promise', ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (data) ->
user = data
$httpBackend.flush()
expect user.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/currentuser'
.respond 500
promise = currentUserLoader()
user = undefined
promise.then (fruits) ->
user = fruits
, (reason) ->
user = reason
$httpBackend.flush()
expect user
.toContain 'Unable'
describe "CurrentHubLoader should respond to requests", ->
currentHubLoader = undefined
returnedHub =
id: 1
group_name: "Hacker Dojo"
description: "Hacker Dojo"
created_at: "2013-02-10T00:01:58.914Z"
updated_at: "2013-02-10T00:01:58.914Z"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentHubLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentHubLoader = _CurrentHubLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnedHub
it "should return a promise", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs/1'
.respond 500
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
$route.current.params.hub = null
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "SelectHubLoader should respond to requests", ->
selectHubLoader = undefined
returnHubsList = [
id: 1
group_name: "Hacker Dojo"
,
id: 321
group_name: "Hacker Doggies"
,
id: 676
group_name: "Hacker Dummies"
]
beforeEach inject (_$httpBackend_, _$rootScope_, SelectHubLoader) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
selectHubLoader = SelectHubLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond returnHubsList
hub_filter = 'ha'
promise = selectHubLoader(hub_filter)
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnHubsList
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond 500
hub_filter = 'ha'
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
console.log 'fruits: ', fruits
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
hub_filter = null
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "ProposalLoader should load a proposal", ->
proposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 23
beforeEach inject (_$httpBackend_, ProposalLoader) ->
$httpBackend = _$httpBackend_
proposalLoader = ProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should have proposalLoader defined", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
expect proposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals.$resolved
.toEqual true
it "should return a proposal via promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposal = undefined
promise.then (data) ->
proposal = data
$httpBackend.flush()
expect proposal instanceof Object
.toBeTruthy()
expect proposal
.toEqual jasmine.objectContaining returnedProposalResponse
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/23'
.respond 500
promise = proposalLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
describe "MultiProposalLoader should load three proposals", ->
multiProposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, MultiProposalLoader) ->
$httpBackend = _$httpBackend_
multiProposalLoader = MultiProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of proposals", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
expect multiProposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an array via promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Array
.toBeTruthy()
expect proposals
.toEqual [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond 500
promise = multiProposalLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedProposalsLoader should load three proposals", ->
RelatedProposalsLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
# hub: 1
# filter: 'active'
# user: 42
beforeEach inject (_$httpBackend_, _RelatedProposalsLoader_) ->
$httpBackend = _$httpBackend_
RelatedProposalsLoader = _RelatedProposalsLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of RELATED proposals", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
expect RelatedProposalsLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related Proposals via promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond 500
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedVoteInTreeLoader should load three proposals", ->
RelatedVoteInTreeLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
beforeEach inject (_$httpBackend_, _RelatedVoteInTreeLoader_) ->
$httpBackend = _$httpBackend_
RelatedVoteInTreeLoader = _RelatedVoteInTreeLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should see Related VOTE IN TREE proposals defined", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
expect RelatedVoteInTreeLoader clicked_proposal
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related VOTE IN TREE via promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond 500
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
| 213100 | describe 'API Loaders Tests', ->
beforeEach module 'spokenvote'
$httpBackend = undefined
$rootScope = undefined
$route = undefined
returnedProposalList =
id: 15,
related_proposals: [
{
id: 49,
statement: "Voter 1 has something better that is the max length you can make a proposal so Voter 2 can test supporting that max length text layout here.",
user_id: 1,
created_at: "2013-05-16 02:08:32 UTC",
votes_count: 2,
ancestry: "14/15",
created_by: null,
hub_id: 2,
votes_in_tree: 6,
votes_percentage: 33,
is_editable: false,
has_support: true,
current_user_support: true,
related_proposals_count: 1,
user: {
id: 1,
email: "<EMAIL>",
name: "<NAME>",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null
},
hub: {
id: 2,
group_name: "Marriage Equality",
formatted_location: "Solapur, Maharashtra, India",
full_hub: "Marriage Equality - Solapur, Maharashtra, India"
},
votes: [
{
id: 63,
comment: "ghghfdfdfd",
username: "<NAME>",
created_at: "2013-02-10 16:34:28 UTC",
user_id: 42,
email: "<EMAIL>",
gravatar_hash: "3423a20950bd9efcc25a2e7657ff990c",
facebook_auth: null,
updated_at: "Over a year ago"
},
{
id: 71,
comment: "dfkdfdfjdjfk",
username: "Voter1",
created_at: "2013-05-16 02:08:32 UTC",
user_id: 1,
email: "<EMAIL>",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null,
updated_at: "Over a year ago"
}
]
}
]
clicked_proposal =
id: 17
statement: 'My proposal statement'
votes: [
id: 22
comment: 'Why you should vote for this proposal']
returnedProposalResponse =
ancestry: null
created_at: "2015-07-10T20:56:49.970Z"
created_by: null
hub_id: 1
id: 258
statement: "My Proposal"
supporting_statement: null
updated_at: "2015-07-10T22:46:48.516Z"
user_id: 44
votes_count: 1
describe "CurrentUserLoader should respond to requests", ->
currentUserLoader = undefined
CurrentUser = undefined
returnedUser =
email: "term...mail.com"
facebook_auth: "1014514417"
first_name: "<NAME>"
gravatar_hash: "bbd5398ccde904d92ba0d5b8fc6c7344"
id: 44
'is_admin?': false
name: "<NAME>"
username: "Kim Miller"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentUserLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentUserLoader = _CurrentUserLoader_
CurrentUser = -> {}
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current USER", ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (current_user) ->
user = current_user
$httpBackend.flush()
expect user instanceof Object
.toBeTruthy()
expect user
.toEqual jasmine.objectContaining returnedUser
it 'should return a promise', ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (data) ->
user = data
$httpBackend.flush()
expect user.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/currentuser'
.respond 500
promise = currentUserLoader()
user = undefined
promise.then (fruits) ->
user = fruits
, (reason) ->
user = reason
$httpBackend.flush()
expect user
.toContain 'Unable'
describe "CurrentHubLoader should respond to requests", ->
currentHubLoader = undefined
returnedHub =
id: 1
group_name: "Hacker Dojo"
description: "Hacker Dojo"
created_at: "2013-02-10T00:01:58.914Z"
updated_at: "2013-02-10T00:01:58.914Z"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentHubLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentHubLoader = _CurrentHubLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnedHub
it "should return a promise", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs/1'
.respond 500
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
$route.current.params.hub = null
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "SelectHubLoader should respond to requests", ->
selectHubLoader = undefined
returnHubsList = [
id: 1
group_name: "Hacker Dojo"
,
id: 321
group_name: "Hacker Doggies"
,
id: 676
group_name: "Hacker Dummies"
]
beforeEach inject (_$httpBackend_, _$rootScope_, SelectHubLoader) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
selectHubLoader = SelectHubLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond returnHubsList
hub_filter = 'ha'
promise = selectHubLoader(hub_filter)
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnHubsList
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond 500
hub_filter = 'ha'
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
console.log 'fruits: ', fruits
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
hub_filter = null
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "ProposalLoader should load a proposal", ->
proposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 23
beforeEach inject (_$httpBackend_, ProposalLoader) ->
$httpBackend = _$httpBackend_
proposalLoader = ProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should have proposalLoader defined", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
expect proposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals.$resolved
.toEqual true
it "should return a proposal via promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposal = undefined
promise.then (data) ->
proposal = data
$httpBackend.flush()
expect proposal instanceof Object
.toBeTruthy()
expect proposal
.toEqual jasmine.objectContaining returnedProposalResponse
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/23'
.respond 500
promise = proposalLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
describe "MultiProposalLoader should load three proposals", ->
multiProposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, MultiProposalLoader) ->
$httpBackend = _$httpBackend_
multiProposalLoader = MultiProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of proposals", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
expect multiProposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an array via promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Array
.toBeTruthy()
expect proposals
.toEqual [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond 500
promise = multiProposalLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedProposalsLoader should load three proposals", ->
RelatedProposalsLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
# hub: 1
# filter: 'active'
# user: 42
beforeEach inject (_$httpBackend_, _RelatedProposalsLoader_) ->
$httpBackend = _$httpBackend_
RelatedProposalsLoader = _RelatedProposalsLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of RELATED proposals", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
expect RelatedProposalsLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related Proposals via promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond 500
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedVoteInTreeLoader should load three proposals", ->
RelatedVoteInTreeLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
beforeEach inject (_$httpBackend_, _RelatedVoteInTreeLoader_) ->
$httpBackend = _$httpBackend_
RelatedVoteInTreeLoader = _RelatedVoteInTreeLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should see Related VOTE IN TREE proposals defined", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
expect RelatedVoteInTreeLoader clicked_proposal
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related VOTE IN TREE via promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond 500
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
| true | describe 'API Loaders Tests', ->
beforeEach module 'spokenvote'
$httpBackend = undefined
$rootScope = undefined
$route = undefined
returnedProposalList =
id: 15,
related_proposals: [
{
id: 49,
statement: "Voter 1 has something better that is the max length you can make a proposal so Voter 2 can test supporting that max length text layout here.",
user_id: 1,
created_at: "2013-05-16 02:08:32 UTC",
votes_count: 2,
ancestry: "14/15",
created_by: null,
hub_id: 2,
votes_in_tree: 6,
votes_percentage: 33,
is_editable: false,
has_support: true,
current_user_support: true,
related_proposals_count: 1,
user: {
id: 1,
email: "PI:EMAIL:<EMAIL>END_PI",
name: "PI:NAME:<NAME>END_PI",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null
},
hub: {
id: 2,
group_name: "Marriage Equality",
formatted_location: "Solapur, Maharashtra, India",
full_hub: "Marriage Equality - Solapur, Maharashtra, India"
},
votes: [
{
id: 63,
comment: "ghghfdfdfd",
username: "PI:NAME:<NAME>END_PI",
created_at: "2013-02-10 16:34:28 UTC",
user_id: 42,
email: "PI:EMAIL:<EMAIL>END_PI",
gravatar_hash: "3423a20950bd9efcc25a2e7657ff990c",
facebook_auth: null,
updated_at: "Over a year ago"
},
{
id: 71,
comment: "dfkdfdfjdjfk",
username: "Voter1",
created_at: "2013-05-16 02:08:32 UTC",
user_id: 1,
email: "PI:EMAIL:<EMAIL>END_PI",
gravatar_hash: "d7c85daf2d627cc0d173d3bcff09a326",
facebook_auth: null,
updated_at: "Over a year ago"
}
]
}
]
clicked_proposal =
id: 17
statement: 'My proposal statement'
votes: [
id: 22
comment: 'Why you should vote for this proposal']
returnedProposalResponse =
ancestry: null
created_at: "2015-07-10T20:56:49.970Z"
created_by: null
hub_id: 1
id: 258
statement: "My Proposal"
supporting_statement: null
updated_at: "2015-07-10T22:46:48.516Z"
user_id: 44
votes_count: 1
describe "CurrentUserLoader should respond to requests", ->
currentUserLoader = undefined
CurrentUser = undefined
returnedUser =
email: "term...mail.com"
facebook_auth: "1014514417"
first_name: "PI:NAME:<NAME>END_PI"
gravatar_hash: "bbd5398ccde904d92ba0d5b8fc6c7344"
id: 44
'is_admin?': false
name: "PI:NAME:<NAME>END_PI"
username: "Kim Miller"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentUserLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentUserLoader = _CurrentUserLoader_
CurrentUser = -> {}
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current USER", ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (current_user) ->
user = current_user
$httpBackend.flush()
expect user instanceof Object
.toBeTruthy()
expect user
.toEqual jasmine.objectContaining returnedUser
it 'should return a promise', ->
$httpBackend.expectGET '/currentuser'
.respond returnedUser
promise = currentUserLoader()
user = undefined
promise.then (data) ->
user = data
$httpBackend.flush()
expect user.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/currentuser'
.respond 500
promise = currentUserLoader()
user = undefined
promise.then (fruits) ->
user = fruits
, (reason) ->
user = reason
$httpBackend.flush()
expect user
.toContain 'Unable'
describe "CurrentHubLoader should respond to requests", ->
currentHubLoader = undefined
returnedHub =
id: 1
group_name: "Hacker Dojo"
description: "Hacker Dojo"
created_at: "2013-02-10T00:01:58.914Z"
updated_at: "2013-02-10T00:01:58.914Z"
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, _$rootScope_, _$route_, _CurrentHubLoader_) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
$route = _$route_
currentHubLoader = _CurrentHubLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnedHub
it "should return a promise", ->
$httpBackend.expectGET '/hubs/1'
.respond returnedHub
promise = currentHubLoader()
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub.$resolved
.toEqual true
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs/1'
.respond 500
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
$route.current.params.hub = null
promise = currentHubLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "SelectHubLoader should respond to requests", ->
selectHubLoader = undefined
returnHubsList = [
id: 1
group_name: "Hacker Dojo"
,
id: 321
group_name: "Hacker Doggies"
,
id: 676
group_name: "Hacker Dummies"
]
beforeEach inject (_$httpBackend_, _$rootScope_, SelectHubLoader) ->
$httpBackend = _$httpBackend_
$rootScope = _$rootScope_
selectHubLoader = SelectHubLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load the current hub", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond returnHubsList
hub_filter = 'ha'
promise = selectHubLoader(hub_filter)
hub = undefined
promise.then (data) ->
hub = data
$httpBackend.flush()
expect hub instanceof Object
.toBeTruthy()
expect hub
.toEqual jasmine.objectContaining returnHubsList
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/hubs?hub_filter=ha'
.respond 500
hub_filter = 'ha'
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
console.log 'fruits: ', fruits
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
it "should reject a missing hub filter", ->
hub_filter = null
promise = selectHubLoader hub_filter
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$rootScope.$apply()
expect proposal
.toContain 'No Hub ID'
describe "ProposalLoader should load a proposal", ->
proposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 23
beforeEach inject (_$httpBackend_, ProposalLoader) ->
$httpBackend = _$httpBackend_
proposalLoader = ProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should have proposalLoader defined", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
expect proposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals.$resolved
.toEqual true
it "should return a proposal via promise", ->
$httpBackend.expectGET '/proposals/23'
.respond returnedProposalResponse
promise = proposalLoader()
proposal = undefined
promise.then (data) ->
proposal = data
$httpBackend.flush()
expect proposal instanceof Object
.toBeTruthy()
expect proposal
.toEqual jasmine.objectContaining returnedProposalResponse
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/23'
.respond 500
promise = proposalLoader()
proposal = undefined
promise.then (fruits) ->
proposal = fruits
, (reason) ->
proposal = reason
$httpBackend.flush()
expect proposal
.toContain 'Unable'
describe "MultiProposalLoader should load three proposals", ->
multiProposalLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
hub: 1
filter: 'active'
user: 42
beforeEach inject (_$httpBackend_, MultiProposalLoader) ->
$httpBackend = _$httpBackend_
multiProposalLoader = MultiProposalLoader
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of proposals", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
expect multiProposalLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an array via promise", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
promise = multiProposalLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Array
.toBeTruthy()
expect proposals
.toEqual [ "id":23,"statement 23":"Hacker Dojo", "id":24,"statement24":"Hacker Dojo", "id":25,"statement 25":"Hacker Dojo" ]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals?filter=active&hub=1&user=42'
.respond 500
promise = multiProposalLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedProposalsLoader should load three proposals", ->
RelatedProposalsLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
# hub: 1
# filter: 'active'
# user: 42
beforeEach inject (_$httpBackend_, _RelatedProposalsLoader_) ->
$httpBackend = _$httpBackend_
RelatedProposalsLoader = _RelatedProposalsLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should load list of RELATED proposals", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
expect RelatedProposalsLoader()
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related Proposals via promise", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond returnedProposalList
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/15/related_proposals?related_sort_by='
.respond 500
promise = RelatedProposalsLoader()
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
describe "RelatedVoteInTreeLoader should load three proposals", ->
RelatedVoteInTreeLoader = undefined
beforeEach module ($provide) ->
-> $provide.value '$route',
current:
params:
proposalId: 15
beforeEach inject (_$httpBackend_, _RelatedVoteInTreeLoader_) ->
$httpBackend = _$httpBackend_
RelatedVoteInTreeLoader = _RelatedVoteInTreeLoader_
afterEach ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it "should see Related VOTE IN TREE proposals defined", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
expect RelatedVoteInTreeLoader clicked_proposal
.toBeDefined()
$httpBackend.flush()
it "should return a promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
expect promise.then instanceof Object
.toBeTruthy()
$httpBackend.flush()
it "should return an Object of Related VOTE IN TREE via promise", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond returnedProposalList
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (data) ->
proposals = data
$httpBackend.flush()
expect proposals instanceof Object
.toBeTruthy()
expect proposals[0]
.toEqual returnedProposalList[0]
it "should reject the promise and respond with error", ->
$httpBackend.expectGET '/proposals/17/related_vote_in_tree'
.respond 500
promise = RelatedVoteInTreeLoader clicked_proposal
proposals = undefined
promise.then (fruits) ->
proposals = fruits
, (reason) ->
proposals = reason
$httpBackend.flush()
expect proposals
.toContain 'Unable'
|
[
{
"context": ".$scope.cfg.workers ?= []\n name = \"myslave\" + (self.$scope.cfg.workers.length + 1).toString(",
"end": 3883,
"score": 0.9962283968925476,
"start": 3876,
"tag": "USERNAME",
"value": "myslave"
},
{
"context": ".$scope.cfg.workers.push\n ... | buildbot_travis/ui/src/app/configEditors/bbtravis_config.controller.coffee | levitte/buildbot_travis | 1 | class ProjectsConfig extends Controller
self = null
constructor: ($scope, config, $state) ->
self = this
self.$scope = $scope
$scope.title = "Watched Projects"
$scope.project_remove = (project) ->
_.remove $scope.cfg.projects, (i) -> i == project
$scope.shows = {}
$scope.toggle_show = (i) ->
$scope.shows[i] ?= false
$scope.shows[i] = !$scope.shows[i]
$scope.new_project = ->
$scope.cfg.projects ?= []
$scope.shows[$scope.cfg.projects.length] = true
$scope.cfg.projects.push
vcs_type: _.keys(config.plugins.buildbot_travis.supported_vcs)[0]
$scope.is_shown = (i) ->
return $scope.shows[i]
$scope.allTags = (query) ->
ret = []
for p in $scope.cfg.projects
if p.tags?
for tag in p.tags
if tag.indexOf(query) == 0 and ret.indexOf(tag) < 0
ret.push(tag)
return ret
$scope.allStages = (query) ->
ret = []
for s in $scope.cfg.stages
if s.indexOf(query) == 0 and ret.indexOf(s) < 0
ret.push(s)
return ret
$scope.allBranches = (query) ->
ret = []
for p in $scope.cfg.projects
if p.branches?
for b in p.branches
if b.indexOf(query) == 0 and ret.indexOf(b) < 0
ret.push(b)
return ret
class EnvConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Default Environment Variables"
@$scope.new_env = {}
@$scope.env_remove = (key) ->
delete $scope.cfg.env[key]
@$scope.env_add = ->
self.$scope.cfg.env ?= {}
self.$scope.cfg.env[self.$scope.new_env.key] = self.$scope.new_env.value
$scope.new_env = {}
class DeploymentConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Deployment Environments"
@$scope.new_stage = ""
@$scope.stage_remove = (stage) ->
if self.$scope.cfg.stages.indexOf(stage) != -1
self.$scope.cfg.stages.splice(self.$scope.cfg.stages.indexOf(stage), 1)
@$scope.stage_add = (stage) ->
if stage
self.$scope.cfg.stages ?= []
self.$scope.cfg.stages.push(stage)
stage = ""
class NotImportantFilesConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Not Important Files"
@$scope.important_file_remove = (file) ->
_.remove self.$scope.cfg.workers, (i) -> i == file
@$scope.important_file_add = ->
if self.$scope.new_file
self.$scope.cfg.workers ?= []
self.$scope.cfg.not_important_files.push(self.$scope.new_file)
self.$scope.new_file = ""
class WorkerConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Workers"
@$scope.new_worker = type: "Worker"
@$scope.shows = {}
@$scope.toggle_show = (i) ->
self.$scope.shows[i] ?= false
self.$scope.shows[i] = !self.$scope.shows[i]
@$scope.is_shown = (i) ->
return self.$scope.shows[i]
@$scope.worker_remove = (worker) ->
_.remove self.$scope.cfg.workers, (i) -> i == worker
@$scope.worker_add = ->
if self.$scope.new_worker.type
self.$scope.cfg.workers ?= []
name = "myslave" + (self.$scope.cfg.workers.length + 1).toString()
id = _.random(2 ** 32)
self.$scope.shows[name] = true
self.$scope.cfg.workers.push
name: name
type: self.$scope.new_worker.type
number: 1
id: id
self.$scope.toggle_show(id)
DEFAULT_CUSTOM_AUTHCODE = """
from buildbot.plugins import *
auth = util.UserPasswordAuth({"homer": "doh!"})
"""
DEFAULT_CUSTOM_AUTHZCODE = """
from buildbot.plugins import *
from buildbot_travis.configurator import TravisEndpointMatcher
allowRules=[
util.StopBuildEndpointMatcher(role="admins"),
util.ForceBuildEndpointMatcher(role="admins"),
util.RebuildBuildEndpointMatcher(role="admins"),
TravisEndpointMatcher(role="admins")
]
roleMatchers=[
util.RolesFromEmails(admins=["my@email.com"])
]
"""
class AuthConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Authentication and Authorization"
@$scope.auth = {}
@$scope.$watch "cfg", (cfg) ->
if cfg
cfg.auth ?= {type: "None"}
self.$scope.auth = cfg.auth
@$scope.$watch "auth.type", (type) ->
if type == "Custom" and not self.$scope.auth.customcode
self.$scope.auth.customcode = DEFAULT_CUSTOM_AUTHCODE
@$scope.$watch "auth.authztype", (type) ->
if type == "Groups" and not self.$scope.auth.groups
self.$scope.auth.groups = []
if type == "Emails" and not self.$scope.auth.emails
self.$scope.auth.emails = []
if type == "Custom" and not self.$scope.auth.customauthzcode
self.$scope.auth.customauthzcode = DEFAULT_CUSTOM_AUTHZCODE
@$scope.isOAuth = ->
return self.$scope.auth.type in [ "Google", "GitLab", "GitHub", "Bitbucket"]
@$scope.getOAuthDoc = (type) ->
return {
Google: "https://developers.google.com/accounts/docs/OAuth2"
GitLab: "http://docs.gitlab.com/ce/api/oauth2.html"
GitHub: "https://developer.github.com/v3/oauth/"
Bitbucket: "https://confluence.atlassian.com/bitbucket/oauth-on-bitbucket-cloud-238027431.html"
}[type]
| 200592 | class ProjectsConfig extends Controller
self = null
constructor: ($scope, config, $state) ->
self = this
self.$scope = $scope
$scope.title = "Watched Projects"
$scope.project_remove = (project) ->
_.remove $scope.cfg.projects, (i) -> i == project
$scope.shows = {}
$scope.toggle_show = (i) ->
$scope.shows[i] ?= false
$scope.shows[i] = !$scope.shows[i]
$scope.new_project = ->
$scope.cfg.projects ?= []
$scope.shows[$scope.cfg.projects.length] = true
$scope.cfg.projects.push
vcs_type: _.keys(config.plugins.buildbot_travis.supported_vcs)[0]
$scope.is_shown = (i) ->
return $scope.shows[i]
$scope.allTags = (query) ->
ret = []
for p in $scope.cfg.projects
if p.tags?
for tag in p.tags
if tag.indexOf(query) == 0 and ret.indexOf(tag) < 0
ret.push(tag)
return ret
$scope.allStages = (query) ->
ret = []
for s in $scope.cfg.stages
if s.indexOf(query) == 0 and ret.indexOf(s) < 0
ret.push(s)
return ret
$scope.allBranches = (query) ->
ret = []
for p in $scope.cfg.projects
if p.branches?
for b in p.branches
if b.indexOf(query) == 0 and ret.indexOf(b) < 0
ret.push(b)
return ret
class EnvConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Default Environment Variables"
@$scope.new_env = {}
@$scope.env_remove = (key) ->
delete $scope.cfg.env[key]
@$scope.env_add = ->
self.$scope.cfg.env ?= {}
self.$scope.cfg.env[self.$scope.new_env.key] = self.$scope.new_env.value
$scope.new_env = {}
class DeploymentConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Deployment Environments"
@$scope.new_stage = ""
@$scope.stage_remove = (stage) ->
if self.$scope.cfg.stages.indexOf(stage) != -1
self.$scope.cfg.stages.splice(self.$scope.cfg.stages.indexOf(stage), 1)
@$scope.stage_add = (stage) ->
if stage
self.$scope.cfg.stages ?= []
self.$scope.cfg.stages.push(stage)
stage = ""
class NotImportantFilesConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Not Important Files"
@$scope.important_file_remove = (file) ->
_.remove self.$scope.cfg.workers, (i) -> i == file
@$scope.important_file_add = ->
if self.$scope.new_file
self.$scope.cfg.workers ?= []
self.$scope.cfg.not_important_files.push(self.$scope.new_file)
self.$scope.new_file = ""
class WorkerConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Workers"
@$scope.new_worker = type: "Worker"
@$scope.shows = {}
@$scope.toggle_show = (i) ->
self.$scope.shows[i] ?= false
self.$scope.shows[i] = !self.$scope.shows[i]
@$scope.is_shown = (i) ->
return self.$scope.shows[i]
@$scope.worker_remove = (worker) ->
_.remove self.$scope.cfg.workers, (i) -> i == worker
@$scope.worker_add = ->
if self.$scope.new_worker.type
self.$scope.cfg.workers ?= []
name = "myslave" + (self.$scope.cfg.workers.length + 1).toString()
id = _.random(2 ** 32)
self.$scope.shows[name] = true
self.$scope.cfg.workers.push
name: <NAME>
type: self.$scope.new_worker.type
number: 1
id: id
self.$scope.toggle_show(id)
DEFAULT_CUSTOM_AUTHCODE = """
from buildbot.plugins import *
auth = util.UserPasswordAuth({"homer": "<PASSWORD>!"})
"""
DEFAULT_CUSTOM_AUTHZCODE = """
from buildbot.plugins import *
from buildbot_travis.configurator import TravisEndpointMatcher
allowRules=[
util.StopBuildEndpointMatcher(role="admins"),
util.ForceBuildEndpointMatcher(role="admins"),
util.RebuildBuildEndpointMatcher(role="admins"),
TravisEndpointMatcher(role="admins")
]
roleMatchers=[
util.RolesFromEmails(admins=["<EMAIL>"])
]
"""
class AuthConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Authentication and Authorization"
@$scope.auth = {}
@$scope.$watch "cfg", (cfg) ->
if cfg
cfg.auth ?= {type: "None"}
self.$scope.auth = cfg.auth
@$scope.$watch "auth.type", (type) ->
if type == "Custom" and not self.$scope.auth.customcode
self.$scope.auth.customcode = DEFAULT_CUSTOM_AUTHCODE
@$scope.$watch "auth.authztype", (type) ->
if type == "Groups" and not self.$scope.auth.groups
self.$scope.auth.groups = []
if type == "Emails" and not self.$scope.auth.emails
self.$scope.auth.emails = []
if type == "Custom" and not self.$scope.auth.customauthzcode
self.$scope.auth.customauthzcode = DEFAULT_CUSTOM_AUTHZCODE
@$scope.isOAuth = ->
return self.$scope.auth.type in [ "Google", "GitLab", "GitHub", "Bitbucket"]
@$scope.getOAuthDoc = (type) ->
return {
Google: "https://developers.google.com/accounts/docs/OAuth2"
GitLab: "http://docs.gitlab.com/ce/api/oauth2.html"
GitHub: "https://developer.github.com/v3/oauth/"
Bitbucket: "https://confluence.atlassian.com/bitbucket/oauth-on-bitbucket-cloud-238027431.html"
}[type]
| true | class ProjectsConfig extends Controller
self = null
constructor: ($scope, config, $state) ->
self = this
self.$scope = $scope
$scope.title = "Watched Projects"
$scope.project_remove = (project) ->
_.remove $scope.cfg.projects, (i) -> i == project
$scope.shows = {}
$scope.toggle_show = (i) ->
$scope.shows[i] ?= false
$scope.shows[i] = !$scope.shows[i]
$scope.new_project = ->
$scope.cfg.projects ?= []
$scope.shows[$scope.cfg.projects.length] = true
$scope.cfg.projects.push
vcs_type: _.keys(config.plugins.buildbot_travis.supported_vcs)[0]
$scope.is_shown = (i) ->
return $scope.shows[i]
$scope.allTags = (query) ->
ret = []
for p in $scope.cfg.projects
if p.tags?
for tag in p.tags
if tag.indexOf(query) == 0 and ret.indexOf(tag) < 0
ret.push(tag)
return ret
$scope.allStages = (query) ->
ret = []
for s in $scope.cfg.stages
if s.indexOf(query) == 0 and ret.indexOf(s) < 0
ret.push(s)
return ret
$scope.allBranches = (query) ->
ret = []
for p in $scope.cfg.projects
if p.branches?
for b in p.branches
if b.indexOf(query) == 0 and ret.indexOf(b) < 0
ret.push(b)
return ret
class EnvConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Default Environment Variables"
@$scope.new_env = {}
@$scope.env_remove = (key) ->
delete $scope.cfg.env[key]
@$scope.env_add = ->
self.$scope.cfg.env ?= {}
self.$scope.cfg.env[self.$scope.new_env.key] = self.$scope.new_env.value
$scope.new_env = {}
class DeploymentConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Deployment Environments"
@$scope.new_stage = ""
@$scope.stage_remove = (stage) ->
if self.$scope.cfg.stages.indexOf(stage) != -1
self.$scope.cfg.stages.splice(self.$scope.cfg.stages.indexOf(stage), 1)
@$scope.stage_add = (stage) ->
if stage
self.$scope.cfg.stages ?= []
self.$scope.cfg.stages.push(stage)
stage = ""
class NotImportantFilesConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Not Important Files"
@$scope.important_file_remove = (file) ->
_.remove self.$scope.cfg.workers, (i) -> i == file
@$scope.important_file_add = ->
if self.$scope.new_file
self.$scope.cfg.workers ?= []
self.$scope.cfg.not_important_files.push(self.$scope.new_file)
self.$scope.new_file = ""
class WorkerConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Workers"
@$scope.new_worker = type: "Worker"
@$scope.shows = {}
@$scope.toggle_show = (i) ->
self.$scope.shows[i] ?= false
self.$scope.shows[i] = !self.$scope.shows[i]
@$scope.is_shown = (i) ->
return self.$scope.shows[i]
@$scope.worker_remove = (worker) ->
_.remove self.$scope.cfg.workers, (i) -> i == worker
@$scope.worker_add = ->
if self.$scope.new_worker.type
self.$scope.cfg.workers ?= []
name = "myslave" + (self.$scope.cfg.workers.length + 1).toString()
id = _.random(2 ** 32)
self.$scope.shows[name] = true
self.$scope.cfg.workers.push
name: PI:NAME:<NAME>END_PI
type: self.$scope.new_worker.type
number: 1
id: id
self.$scope.toggle_show(id)
DEFAULT_CUSTOM_AUTHCODE = """
from buildbot.plugins import *
auth = util.UserPasswordAuth({"homer": "PI:PASSWORD:<PASSWORD>END_PI!"})
"""
DEFAULT_CUSTOM_AUTHZCODE = """
from buildbot.plugins import *
from buildbot_travis.configurator import TravisEndpointMatcher
allowRules=[
util.StopBuildEndpointMatcher(role="admins"),
util.ForceBuildEndpointMatcher(role="admins"),
util.RebuildBuildEndpointMatcher(role="admins"),
TravisEndpointMatcher(role="admins")
]
roleMatchers=[
util.RolesFromEmails(admins=["PI:EMAIL:<EMAIL>END_PI"])
]
"""
class AuthConfig extends Controller
self = null
constructor: (@$scope, config, $state) ->
self = this
@$scope.title = "Authentication and Authorization"
@$scope.auth = {}
@$scope.$watch "cfg", (cfg) ->
if cfg
cfg.auth ?= {type: "None"}
self.$scope.auth = cfg.auth
@$scope.$watch "auth.type", (type) ->
if type == "Custom" and not self.$scope.auth.customcode
self.$scope.auth.customcode = DEFAULT_CUSTOM_AUTHCODE
@$scope.$watch "auth.authztype", (type) ->
if type == "Groups" and not self.$scope.auth.groups
self.$scope.auth.groups = []
if type == "Emails" and not self.$scope.auth.emails
self.$scope.auth.emails = []
if type == "Custom" and not self.$scope.auth.customauthzcode
self.$scope.auth.customauthzcode = DEFAULT_CUSTOM_AUTHZCODE
@$scope.isOAuth = ->
return self.$scope.auth.type in [ "Google", "GitLab", "GitHub", "Bitbucket"]
@$scope.getOAuthDoc = (type) ->
return {
Google: "https://developers.google.com/accounts/docs/OAuth2"
GitLab: "http://docs.gitlab.com/ce/api/oauth2.html"
GitHub: "https://developer.github.com/v3/oauth/"
Bitbucket: "https://confluence.atlassian.com/bitbucket/oauth-on-bitbucket-cloud-238027431.html"
}[type]
|
[
{
"context": " id = '' + new Date().getTime()\n name = \"name-#{id}\"\n email = \"#{id}@domain\"\n ident = ",
"end": 3478,
"score": 0.6618633270263672,
"start": 3474,
"tag": "USERNAME",
"value": "name"
},
{
"context": "e\"\n repo.identify new Actor('ro... | test/repo.test.coffee | weemonger/gift | 0 | should = require 'should'
sinon = require 'sinon'
fs = require 'fs-extra'
fixtures = require './fixtures'
git = require '../src'
Actor = require '../src/actor'
Commit = require '../src/commit'
Tree = require '../src/tree'
Diff = require '../src/diff'
Tag = require '../src/tag'
Status = require '../src/status'
{Ref, Head} = require '../src/ref'
{exec} = require 'child_process'
describe "Repo", ->
describe "#add", ->
repo = null
git_dir = __dirname + "/fixtures/junk_add"
status = null
file = null
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, '0755', (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
done()
after (done) ->
fs.remove git_dir, done
describe "with only a file", ->
file = 'foo.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "with no file and all option", ->
file = 'bar.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add [], A:true, (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "#sync", ->
describe "when passed curried arguments", ->
repo = fixtures.branched
remote = branch = ""
before ->
sinon.stub repo, "git", (command, opts, args, callback) ->
if command is "pull"
remote = args[0]
branch = args[1]
callback? null
sinon.stub repo, "status", (callback) ->
callback? null, clean: no
after ->
repo.git.restore()
repo.status.restore()
it "passes through the correct parameters when nothing is omitted", (done) ->
repo.sync "github", "my-branch", ->
remote.should.eql "github"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name is omitted", (done) ->
repo.sync "my-branch", ->
remote.should.eql "origin"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name and branch are omitted", (done) ->
repo.sync ->
remote.should.eql "origin"
branch.should.eql "master"
done()
describe "#identify", ->
describe "when asked to set the identity's name and email", ->
repo = fixtures.branched
id = '' + new Date().getTime()
name = "name-#{id}"
email = "#{id}@domain"
ident = null
before (done) ->
actor = new Actor(name, email)
repo.identify actor, (err) ->
done err if err
repo.identity (err, _actor) ->
ident = _actor
done err
after (done) ->
exec "git checkout -- #{ repo.path }", done
it "has correctly set them", ->
ident.name.should.eql name
ident.email.should.eql email
describe "#commits", ->
describe "with a single commit", ->
repo = null
commit = null
git_dir = __dirname + "/fixtures/junk_commit"
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err?
fs.mkdir git_dir, '0755', (err) ->
return done err if err?
git.init git_dir, (err) ->
return done err if err?
repo = git(git_dir)
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.identify new Actor('root', 'root@domain.net'), (err) ->
return done err if err?
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err?
repo.commit 'message with spaces',
author: 'Someone <someone@somewhere.com>'
, (err) ->
return done err if err?
repo.commits (err, _commits) ->
commit = _commits[0]
done err
after (done) ->
fs.remove git_dir, done
it "has right message", (done) ->
commit.message.should.eql 'message with spaces'
commit.author.name.should.eql 'Someone'
commit.author.email.should.eql 'someone@somewhere.com'
done()
it "has a tree", (done) ->
commit.tree().should.be.an.instanceof Tree
commit.tree().contents (err, child) ->
return done err if err
child.length.should.eql 1
child[0].name.should.eql 'foo.txt'
done()
describe "with only a callback", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits (err, _commits) ->
commits = _commits
done err
it "passes an Array", ->
commits.should.be.an.instanceof Array
it "is a list of commits", ->
commits[0].id.should.eql "913318e66e9beed3e89e9c402c1d6585ef3f7e6f"
commits[0].repo.should.eql repo
commits[0].author.name.should.eql "sentientwaffle"
commits[0].committer.name.should.eql "sentientwaffle"
commits[0].authored_date.should.be.an.instanceof Date
commits[0].committed_date.should.be.an.instanceof Date
commits[0].parents().should.be.an.instanceof Array
commits[0].message.should.eql "add a sub dir"
describe "specify a branch", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits "something", (err, _commits) ->
commits = _commits
done err
# The first commit ...
it "is the latest commit", ->
commits[0].message.should.eql "2"
it "has a parent commit", ->
commits[0].parents().should.have.lengthOf 1
commits[0].parents()[0].id.should.eql commits[1].id
describe "specify a tag", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "tag-1", (err, _commits) ->
commits = _commits
done err
it "is the latest commit on the tag", ->
commits[0].message.should.containEql "commit 5"
describe "limit the number of commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits.should.have.lengthOf 2
describe "skip commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 1, 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits[0].message.should.containEql "commit 4"
describe "with or without gpg signature", ->
repo = fixtures.gpgsigned
commits = null
before (done) ->
repo.commits "master", (err, _commits) ->
commits = _commits
done err
it "has no gpgsig", ->
commits[0].gpgsig.should.not.be.ok
it "has gpgsig", ->
commits[1].gpgsig.should.be.ok
it "contains the correct signature", ->
commits[1].gpgsig.should.equal """
-----BEGIN#{" "}PGP#{" "}SIGNATURE-----
#{" "}Version:#{" "}GnuPG#{" "}v2.0.22#{" "}(GNU/Linux)
#{" "}
#{" "}iQEcBAABAgAGBQJTQw8qAAoJEL0/h9tqDFPiP3UH/RwxUS90+6DEkThcKMmV9H4K
#{" "}dr+D0H0z2ViMq3AHSmCydv5dWr3bupl2XyaLWWuRCxAJ78xuf98qVRIBfT/FKGeP
#{" "}fz+GtXkv3naCD12Ay6YiwfxSQhxFiJtRwP5rla2i7hlV3BLFPYCWTtL8OLF4CoRm
#{" "}7aF5EuDr1x7emEDyu1rf5E59ttSIySuIw0J1mTjrPCkC6lsowzTJS/vaCxZ3e7fN
#{" "}iZE6VEWWY/iOxd8foJH/VZ3cfNKjfi8+Fh8t7o9ztjYTQAOZUJTn2CHB7Wkyr0Ar
#{" "}HNM3v26gPFpb7UkHw0Cq2HWNV/Z7cbQc/BQ4HmrmuBPB6SWNOaBN751BbQKnPcA=
#{" "}=IusH
#{" "}-----END#{" "}PGP#{" "}SIGNATURE-----"""
describe "#tree", ->
repo = fixtures.branched
describe "master", ->
it "is a Tree", ->
repo.tree().should.be.an.instanceof Tree
it "checks out branch:master", (done) ->
repo.tree().blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla"
data.should.not.containEql "Bla2"
done err
describe "specific branch", ->
it "is a Tree", ->
repo.tree("something").should.be.an.instanceof Tree
it "checks out branch:something", (done) ->
repo.tree("something").blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla2"
done err
describe "#diff", ->
repo = fixtures.branched
describe "between 2 branches", ->
diffs = null
before (done) ->
repo.diff "something", "master", (err, _diffs) ->
diffs = _diffs
done err
it "is passes an Array of Diffs", ->
diffs.should.be.an.instanceof Array
diffs[0].should.be.an.instanceof Diff
# The first diff...
it "modifies the README.md file", ->
diffs[0].a_path.should.eql "README.md"
diffs[0].b_path.should.eql "README.md"
# The second diff...
it "creates some/hi.txt", ->
diffs[1].new_file.should.be.true
diffs[1].b_path.should.eql "some/hi.txt"
describe "#remotes", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remotes (err, _remotes) ->
remotes = _remotes
done err
it "is an Array of Refs", ->
remotes.should.be.an.instanceof Array
remotes[0].should.be.an.instanceof Ref
it "contains the correct Refs", ->
remotes[0].commit.id.should.eql "bdd3996d38d885e18e5c5960df1c2c06e34d673f"
remotes[0].name.should.eql "origin/HEAD"
remotes[1].commit.id.should.eql "bdd3996d38d885e18e5c5960df1c2c06e34d673f"
remotes[1].name.should.eql "origin/master"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remotes (err, remotes) ->
remotes.should.eql []
describe "#remote_list", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remote_list (err, _remotes) ->
remotes = _remotes
done err
it "is a list of remotes", ->
remotes.should.have.lengthOf 1
remotes[0].should.eql "origin"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remote_list (err, remotes) ->
remotes.should.eql []
describe "#tags", ->
describe "a repo with tags", ->
repo = fixtures.tagged
tags = null
before (done) ->
repo.tags (err, _tags) ->
tags = _tags
done err
it "is an Array of Tags", ->
tags.should.be.an.instanceof Array
tags[0].should.be.an.instanceof Tag
it "is the correct tag", ->
tags[0].name.should.eql "tag-1"
describe "a repo without tags", ->
repo = fixtures.branched
it "is an empty array", (done) ->
repo.tags (err, tags) ->
tags.should.eql []
done err
describe "#create_tag", ->
repo = null
git_dir = __dirname + "/fixtures/junk_create_tag"
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, 0o755, (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git(git_dir)
repo.identify new Actor('name', 'em@il'), ->
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err
repo.commit "initial commit", {all: true}, done
after (done) ->
fs.remove git_dir, done
it "creates a tag", (done) ->
repo.create_tag "foo", done
describe "#delete_tag", ->
describe "deleting a tag that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_tag "nonexistant-tag", (err) ->
should.exist err
done()
describe "#branches", ->
repo = fixtures.branched
branches = null
before (done) ->
repo.branches (err, _branches) ->
branches = _branches
done err
it "is an Array of Heads", ->
branches.should.be.an.instanceof Array
branches[0].should.be.an.instanceof Head
it "has the correct branches", ->
branches[0].name.should.eql "master"
branches[1].name.should.eql "something"
describe "#branch", ->
describe "when a branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch "something", (err, b) ->
branch = b
done err
it "is a Head", ->
branch.should.be.an.instanceof Head
it "has the correct name", ->
branch.name.should.eql "something"
describe "when no branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch (err, b) ->
branch = b
done err
it "has the correct name", ->
branch.name.should.eql "master"
describe "an invalid branch", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.branch "nonexistant-branch", (err, b) ->
should.exist err
should.not.exist b
done()
describe "current branch is (no branch)", ->
repo = fixtures.noBranch
it "passes an error", (done) ->
repo.branch (err, b) ->
should.exist err
should.not.exist b
done()
describe "#delete_branch", ->
describe "a branch that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_branch "nonexistant-branch", (err) ->
should.exist err
done()
describe "#clean", ->
repo = null
git_dir = __dirname + "/fixtures/junk_clean"
status = null
file = "bla.txt"
dir = 'blah'
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/clean", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
fs.mkdir "#{git_dir}/#{dir}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "clean with no args shouldn't do anything", ->
beforeEach (done) ->
repo.clean ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the untracked file alone", ->
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
fs.existsSync("#{git_dir}/#{dir}").should.be.true
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "clean --force", ->
beforeEach (done) ->
repo.clean {force: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "should remove the file but not the directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.true
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
# git does not clean untracked files in untracked directories
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
describe "clean -df", ->
beforeEach (done) ->
repo.clean {force: true, d: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file and directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.false
fs.existsSync("#{git_dir}/iamuntracked").should.be.false
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.false
describe "#reset", ->
repo = null
git_dir = __dirname + "/fixtures/junk_reset"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "reset without specific treeish (defaults to HEAD)", ->
describe "reset (--mixed)", ->
beforeEach (done) ->
repo.reset ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index, leaves it in working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "reset --soft", ->
beforeEach (done) ->
repo.reset {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the added file in the index", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "reset --hard", ->
beforeEach (done) ->
repo.reset {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index and working tree", ->
status.files.should.not.have.a.property file
describe "reset to specific treeish", ->
describe "reset (--mixed) HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.false
status.files['rawr.txt'].tracked.should.be.false
status.files['rawr.txt'].should.not.have.a.property 'type'
describe "reset --soft HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the index and working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'AM'
describe "reset --hard HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, all changes get discarded completely", ->
status.files.should.not.have.a.property file
status.files.should.not.have.a.property 'rawr.txt'
describe "#checkoutFile", ->
repo = null
git_dir = __dirname + "/fixtures/junk_checkoutFile"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "passing no explicit files", ->
beforeEach (done) ->
repo.checkoutFile ->
repo.status (err, _status) ->
status = _status
done err
it "discards changes in the working tree for all files", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
describe "passing an explicit file", ->
beforeEach (done) ->
repo.checkoutFile 'rawr.txt', ->
repo.status (err, _status) ->
status = _status
done err
it "discard changes to the specified file", ->
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
| 142424 | should = require 'should'
sinon = require 'sinon'
fs = require 'fs-extra'
fixtures = require './fixtures'
git = require '../src'
Actor = require '../src/actor'
Commit = require '../src/commit'
Tree = require '../src/tree'
Diff = require '../src/diff'
Tag = require '../src/tag'
Status = require '../src/status'
{Ref, Head} = require '../src/ref'
{exec} = require 'child_process'
describe "Repo", ->
describe "#add", ->
repo = null
git_dir = __dirname + "/fixtures/junk_add"
status = null
file = null
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, '0755', (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
done()
after (done) ->
fs.remove git_dir, done
describe "with only a file", ->
file = 'foo.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "with no file and all option", ->
file = 'bar.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add [], A:true, (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "#sync", ->
describe "when passed curried arguments", ->
repo = fixtures.branched
remote = branch = ""
before ->
sinon.stub repo, "git", (command, opts, args, callback) ->
if command is "pull"
remote = args[0]
branch = args[1]
callback? null
sinon.stub repo, "status", (callback) ->
callback? null, clean: no
after ->
repo.git.restore()
repo.status.restore()
it "passes through the correct parameters when nothing is omitted", (done) ->
repo.sync "github", "my-branch", ->
remote.should.eql "github"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name is omitted", (done) ->
repo.sync "my-branch", ->
remote.should.eql "origin"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name and branch are omitted", (done) ->
repo.sync ->
remote.should.eql "origin"
branch.should.eql "master"
done()
describe "#identify", ->
describe "when asked to set the identity's name and email", ->
repo = fixtures.branched
id = '' + new Date().getTime()
name = "name-#{id}"
email = "#{id}@domain"
ident = null
before (done) ->
actor = new Actor(name, email)
repo.identify actor, (err) ->
done err if err
repo.identity (err, _actor) ->
ident = _actor
done err
after (done) ->
exec "git checkout -- #{ repo.path }", done
it "has correctly set them", ->
ident.name.should.eql name
ident.email.should.eql email
describe "#commits", ->
describe "with a single commit", ->
repo = null
commit = null
git_dir = __dirname + "/fixtures/junk_commit"
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err?
fs.mkdir git_dir, '0755', (err) ->
return done err if err?
git.init git_dir, (err) ->
return done err if err?
repo = git(git_dir)
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.identify new Actor('root', '<EMAIL>'), (err) ->
return done err if err?
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err?
repo.commit 'message with spaces',
author: '<NAME> <<EMAIL>>'
, (err) ->
return done err if err?
repo.commits (err, _commits) ->
commit = _commits[0]
done err
after (done) ->
fs.remove git_dir, done
it "has right message", (done) ->
commit.message.should.eql 'message with spaces'
commit.author.name.should.eql '<NAME>'
commit.author.email.should.eql '<EMAIL>'
done()
it "has a tree", (done) ->
commit.tree().should.be.an.instanceof Tree
commit.tree().contents (err, child) ->
return done err if err
child.length.should.eql 1
child[0].name.should.eql 'foo.txt'
done()
describe "with only a callback", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits (err, _commits) ->
commits = _commits
done err
it "passes an Array", ->
commits.should.be.an.instanceof Array
it "is a list of commits", ->
commits[0].id.should.eql "913318e66e9beed3e89e9c402c1d658<PASSWORD>"
commits[0].repo.should.eql repo
commits[0].author.name.should.eql "sentientwaffle"
commits[0].committer.name.should.eql "sentientwaffle"
commits[0].authored_date.should.be.an.instanceof Date
commits[0].committed_date.should.be.an.instanceof Date
commits[0].parents().should.be.an.instanceof Array
commits[0].message.should.eql "add a sub dir"
describe "specify a branch", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits "something", (err, _commits) ->
commits = _commits
done err
# The first commit ...
it "is the latest commit", ->
commits[0].message.should.eql "2"
it "has a parent commit", ->
commits[0].parents().should.have.lengthOf 1
commits[0].parents()[0].id.should.eql commits[1].id
describe "specify a tag", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "tag-1", (err, _commits) ->
commits = _commits
done err
it "is the latest commit on the tag", ->
commits[0].message.should.containEql "commit 5"
describe "limit the number of commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits.should.have.lengthOf 2
describe "skip commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 1, 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits[0].message.should.containEql "commit 4"
describe "with or without gpg signature", ->
repo = fixtures.gpgsigned
commits = null
before (done) ->
repo.commits "master", (err, _commits) ->
commits = _commits
done err
it "has no gpgsig", ->
commits[0].gpgsig.should.not.be.ok
it "has gpgsig", ->
commits[1].gpgsig.should.be.ok
it "contains the correct signature", ->
commits[1].gpgsig.should.equal """
-----BEGIN#{" "}PGP#{" "}SIGNATURE-----
#{" "}Version:#{" "}GnuPG#{" "}v2.0.22#{" "}(GNU/Linux)
#{" "}
#{" "}<KEY>c<KEY>
#{" "}<KEY>
#{" "}<KEY>
#{" "}<KEY>
#{" "}i<KEY>/i<KEY>
#{" "}HNM3v26gPFpb7UkHw0Cq2HWNV/Z7cbQc/BQ4HmrmuBPB6SWNOaBN751BbQ<KEY>
#{" "}=IusH
#{" "}-----END#{" "}PGP#{" "}SIGNATURE-----"""
describe "#tree", ->
repo = fixtures.branched
describe "master", ->
it "is a Tree", ->
repo.tree().should.be.an.instanceof Tree
it "checks out branch:master", (done) ->
repo.tree().blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla"
data.should.not.containEql "Bla2"
done err
describe "specific branch", ->
it "is a Tree", ->
repo.tree("something").should.be.an.instanceof Tree
it "checks out branch:something", (done) ->
repo.tree("something").blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla2"
done err
describe "#diff", ->
repo = fixtures.branched
describe "between 2 branches", ->
diffs = null
before (done) ->
repo.diff "something", "master", (err, _diffs) ->
diffs = _diffs
done err
it "is passes an Array of Diffs", ->
diffs.should.be.an.instanceof Array
diffs[0].should.be.an.instanceof Diff
# The first diff...
it "modifies the README.md file", ->
diffs[0].a_path.should.eql "README.md"
diffs[0].b_path.should.eql "README.md"
# The second diff...
it "creates some/hi.txt", ->
diffs[1].new_file.should.be.true
diffs[1].b_path.should.eql "some/hi.txt"
describe "#remotes", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remotes (err, _remotes) ->
remotes = _remotes
done err
it "is an Array of Refs", ->
remotes.should.be.an.instanceof Array
remotes[0].should.be.an.instanceof Ref
it "contains the correct Refs", ->
remotes[0].commit.id.should.eql "bdd3996d38d885e18e5c5960df1c2c06e34d673f"
remotes[0].name.should.eql "origin/HEAD"
remotes[1].commit.id.should.eql "<PASSWORD>c<PASSWORD>"
remotes[1].name.should.eql "origin/master"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remotes (err, remotes) ->
remotes.should.eql []
describe "#remote_list", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remote_list (err, _remotes) ->
remotes = _remotes
done err
it "is a list of remotes", ->
remotes.should.have.lengthOf 1
remotes[0].should.eql "origin"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remote_list (err, remotes) ->
remotes.should.eql []
describe "#tags", ->
describe "a repo with tags", ->
repo = fixtures.tagged
tags = null
before (done) ->
repo.tags (err, _tags) ->
tags = _tags
done err
it "is an Array of Tags", ->
tags.should.be.an.instanceof Array
tags[0].should.be.an.instanceof Tag
it "is the correct tag", ->
tags[0].name.should.eql "tag-1"
describe "a repo without tags", ->
repo = fixtures.branched
it "is an empty array", (done) ->
repo.tags (err, tags) ->
tags.should.eql []
done err
describe "#create_tag", ->
repo = null
git_dir = __dirname + "/fixtures/junk_create_tag"
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, 0o755, (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git(git_dir)
repo.identify new Actor('name', '<NAME>@il'), ->
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err
repo.commit "initial commit", {all: true}, done
after (done) ->
fs.remove git_dir, done
it "creates a tag", (done) ->
repo.create_tag "foo", done
describe "#delete_tag", ->
describe "deleting a tag that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_tag "nonexistant-tag", (err) ->
should.exist err
done()
describe "#branches", ->
repo = fixtures.branched
branches = null
before (done) ->
repo.branches (err, _branches) ->
branches = _branches
done err
it "is an Array of Heads", ->
branches.should.be.an.instanceof Array
branches[0].should.be.an.instanceof Head
it "has the correct branches", ->
branches[0].name.should.eql "master"
branches[1].name.should.eql "something"
describe "#branch", ->
describe "when a branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch "something", (err, b) ->
branch = b
done err
it "is a Head", ->
branch.should.be.an.instanceof Head
it "has the correct name", ->
branch.name.should.eql "something"
describe "when no branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch (err, b) ->
branch = b
done err
it "has the correct name", ->
branch.name.should.eql "master"
describe "an invalid branch", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.branch "nonexistant-branch", (err, b) ->
should.exist err
should.not.exist b
done()
describe "current branch is (no branch)", ->
repo = fixtures.noBranch
it "passes an error", (done) ->
repo.branch (err, b) ->
should.exist err
should.not.exist b
done()
describe "#delete_branch", ->
describe "a branch that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_branch "nonexistant-branch", (err) ->
should.exist err
done()
describe "#clean", ->
repo = null
git_dir = __dirname + "/fixtures/junk_clean"
status = null
file = "bla.txt"
dir = 'blah'
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/clean", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
fs.mkdir "#{git_dir}/#{dir}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "clean with no args shouldn't do anything", ->
beforeEach (done) ->
repo.clean ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the untracked file alone", ->
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
fs.existsSync("#{git_dir}/#{dir}").should.be.true
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "clean --force", ->
beforeEach (done) ->
repo.clean {force: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "should remove the file but not the directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.true
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
# git does not clean untracked files in untracked directories
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
describe "clean -df", ->
beforeEach (done) ->
repo.clean {force: true, d: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file and directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.false
fs.existsSync("#{git_dir}/iamuntracked").should.be.false
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.false
describe "#reset", ->
repo = null
git_dir = __dirname + "/fixtures/junk_reset"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "reset without specific treeish (defaults to HEAD)", ->
describe "reset (--mixed)", ->
beforeEach (done) ->
repo.reset ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index, leaves it in working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "reset --soft", ->
beforeEach (done) ->
repo.reset {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the added file in the index", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "reset --hard", ->
beforeEach (done) ->
repo.reset {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index and working tree", ->
status.files.should.not.have.a.property file
describe "reset to specific treeish", ->
describe "reset (--mixed) HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.false
status.files['rawr.txt'].tracked.should.be.false
status.files['rawr.txt'].should.not.have.a.property 'type'
describe "reset --soft HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the index and working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'AM'
describe "reset --hard HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, all changes get discarded completely", ->
status.files.should.not.have.a.property file
status.files.should.not.have.a.property 'rawr.txt'
describe "#checkoutFile", ->
repo = null
git_dir = __dirname + "/fixtures/junk_checkoutFile"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "passing no explicit files", ->
beforeEach (done) ->
repo.checkoutFile ->
repo.status (err, _status) ->
status = _status
done err
it "discards changes in the working tree for all files", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
describe "passing an explicit file", ->
beforeEach (done) ->
repo.checkoutFile 'rawr.txt', ->
repo.status (err, _status) ->
status = _status
done err
it "discard changes to the specified file", ->
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
| true | should = require 'should'
sinon = require 'sinon'
fs = require 'fs-extra'
fixtures = require './fixtures'
git = require '../src'
Actor = require '../src/actor'
Commit = require '../src/commit'
Tree = require '../src/tree'
Diff = require '../src/diff'
Tag = require '../src/tag'
Status = require '../src/status'
{Ref, Head} = require '../src/ref'
{exec} = require 'child_process'
describe "Repo", ->
describe "#add", ->
repo = null
git_dir = __dirname + "/fixtures/junk_add"
status = null
file = null
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, '0755', (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
done()
after (done) ->
fs.remove git_dir, done
describe "with only a file", ->
file = 'foo.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "with no file and all option", ->
file = 'bar.txt'
# given a new file
before (done) ->
fs.writeFile "#{git_dir}/#{file}", "cheese", (err) ->
return done err if err?
repo.add [], A:true, (err) ->
return done err if err?
repo.status (err, _status) ->
status = _status
done err
it "was added", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "#sync", ->
describe "when passed curried arguments", ->
repo = fixtures.branched
remote = branch = ""
before ->
sinon.stub repo, "git", (command, opts, args, callback) ->
if command is "pull"
remote = args[0]
branch = args[1]
callback? null
sinon.stub repo, "status", (callback) ->
callback? null, clean: no
after ->
repo.git.restore()
repo.status.restore()
it "passes through the correct parameters when nothing is omitted", (done) ->
repo.sync "github", "my-branch", ->
remote.should.eql "github"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name is omitted", (done) ->
repo.sync "my-branch", ->
remote.should.eql "origin"
branch.should.eql "my-branch"
done()
it "passes through the correct parameters when remote_name and branch are omitted", (done) ->
repo.sync ->
remote.should.eql "origin"
branch.should.eql "master"
done()
describe "#identify", ->
describe "when asked to set the identity's name and email", ->
repo = fixtures.branched
id = '' + new Date().getTime()
name = "name-#{id}"
email = "#{id}@domain"
ident = null
before (done) ->
actor = new Actor(name, email)
repo.identify actor, (err) ->
done err if err
repo.identity (err, _actor) ->
ident = _actor
done err
after (done) ->
exec "git checkout -- #{ repo.path }", done
it "has correctly set them", ->
ident.name.should.eql name
ident.email.should.eql email
describe "#commits", ->
describe "with a single commit", ->
repo = null
commit = null
git_dir = __dirname + "/fixtures/junk_commit"
# given a fresh new repo
before (done) ->
fs.remove git_dir, (err) ->
return done err if err?
fs.mkdir git_dir, '0755', (err) ->
return done err if err?
git.init git_dir, (err) ->
return done err if err?
repo = git(git_dir)
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.identify new Actor('root', 'PI:EMAIL:<EMAIL>END_PI'), (err) ->
return done err if err?
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err?
repo.commit 'message with spaces',
author: 'PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>'
, (err) ->
return done err if err?
repo.commits (err, _commits) ->
commit = _commits[0]
done err
after (done) ->
fs.remove git_dir, done
it "has right message", (done) ->
commit.message.should.eql 'message with spaces'
commit.author.name.should.eql 'PI:NAME:<NAME>END_PI'
commit.author.email.should.eql 'PI:EMAIL:<EMAIL>END_PI'
done()
it "has a tree", (done) ->
commit.tree().should.be.an.instanceof Tree
commit.tree().contents (err, child) ->
return done err if err
child.length.should.eql 1
child[0].name.should.eql 'foo.txt'
done()
describe "with only a callback", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits (err, _commits) ->
commits = _commits
done err
it "passes an Array", ->
commits.should.be.an.instanceof Array
it "is a list of commits", ->
commits[0].id.should.eql "913318e66e9beed3e89e9c402c1d658PI:PASSWORD:<PASSWORD>END_PI"
commits[0].repo.should.eql repo
commits[0].author.name.should.eql "sentientwaffle"
commits[0].committer.name.should.eql "sentientwaffle"
commits[0].authored_date.should.be.an.instanceof Date
commits[0].committed_date.should.be.an.instanceof Date
commits[0].parents().should.be.an.instanceof Array
commits[0].message.should.eql "add a sub dir"
describe "specify a branch", ->
repo = fixtures.branched
commits = null
before (done) ->
repo.commits "something", (err, _commits) ->
commits = _commits
done err
# The first commit ...
it "is the latest commit", ->
commits[0].message.should.eql "2"
it "has a parent commit", ->
commits[0].parents().should.have.lengthOf 1
commits[0].parents()[0].id.should.eql commits[1].id
describe "specify a tag", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "tag-1", (err, _commits) ->
commits = _commits
done err
it "is the latest commit on the tag", ->
commits[0].message.should.containEql "commit 5"
describe "limit the number of commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits.should.have.lengthOf 2
describe "skip commits", ->
repo = fixtures.tagged
commits = null
before (done) ->
repo.commits "master", 1, 2, (err, _commits) ->
commits = _commits
done err
it "returns 2 commits", ->
commits[0].message.should.containEql "commit 4"
describe "with or without gpg signature", ->
repo = fixtures.gpgsigned
commits = null
before (done) ->
repo.commits "master", (err, _commits) ->
commits = _commits
done err
it "has no gpgsig", ->
commits[0].gpgsig.should.not.be.ok
it "has gpgsig", ->
commits[1].gpgsig.should.be.ok
it "contains the correct signature", ->
commits[1].gpgsig.should.equal """
-----BEGIN#{" "}PGP#{" "}SIGNATURE-----
#{" "}Version:#{" "}GnuPG#{" "}v2.0.22#{" "}(GNU/Linux)
#{" "}
#{" "}PI:KEY:<KEY>END_PIcPI:KEY:<KEY>END_PI
#{" "}PI:KEY:<KEY>END_PI
#{" "}PI:KEY:<KEY>END_PI
#{" "}PI:KEY:<KEY>END_PI
#{" "}iPI:KEY:<KEY>END_PI/iPI:KEY:<KEY>END_PI
#{" "}HNM3v26gPFpb7UkHw0Cq2HWNV/Z7cbQc/BQ4HmrmuBPB6SWNOaBN751BbQPI:KEY:<KEY>END_PI
#{" "}=IusH
#{" "}-----END#{" "}PGP#{" "}SIGNATURE-----"""
describe "#tree", ->
repo = fixtures.branched
describe "master", ->
it "is a Tree", ->
repo.tree().should.be.an.instanceof Tree
it "checks out branch:master", (done) ->
repo.tree().blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla"
data.should.not.containEql "Bla2"
done err
describe "specific branch", ->
it "is a Tree", ->
repo.tree("something").should.be.an.instanceof Tree
it "checks out branch:something", (done) ->
repo.tree("something").blobs (err, blobs) ->
blobs[0].data (err, data) ->
data.should.containEql "Bla2"
done err
describe "#diff", ->
repo = fixtures.branched
describe "between 2 branches", ->
diffs = null
before (done) ->
repo.diff "something", "master", (err, _diffs) ->
diffs = _diffs
done err
it "is passes an Array of Diffs", ->
diffs.should.be.an.instanceof Array
diffs[0].should.be.an.instanceof Diff
# The first diff...
it "modifies the README.md file", ->
diffs[0].a_path.should.eql "README.md"
diffs[0].b_path.should.eql "README.md"
# The second diff...
it "creates some/hi.txt", ->
diffs[1].new_file.should.be.true
diffs[1].b_path.should.eql "some/hi.txt"
describe "#remotes", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remotes (err, _remotes) ->
remotes = _remotes
done err
it "is an Array of Refs", ->
remotes.should.be.an.instanceof Array
remotes[0].should.be.an.instanceof Ref
it "contains the correct Refs", ->
remotes[0].commit.id.should.eql "bdd3996d38d885e18e5c5960df1c2c06e34d673f"
remotes[0].name.should.eql "origin/HEAD"
remotes[1].commit.id.should.eql "PI:PASSWORD:<PASSWORD>END_PIcPI:PASSWORD:<PASSWORD>END_PI"
remotes[1].name.should.eql "origin/master"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remotes (err, remotes) ->
remotes.should.eql []
describe "#remote_list", ->
describe "in a repository with remotes", ->
repo = fixtures.remotes
remotes = null
before (done) ->
repo.remote_list (err, _remotes) ->
remotes = _remotes
done err
it "is a list of remotes", ->
remotes.should.have.lengthOf 1
remotes[0].should.eql "origin"
describe "when there are no remotes", ->
repo = fixtures.branched
it "is an empty Array", ->
repo.remote_list (err, remotes) ->
remotes.should.eql []
describe "#tags", ->
describe "a repo with tags", ->
repo = fixtures.tagged
tags = null
before (done) ->
repo.tags (err, _tags) ->
tags = _tags
done err
it "is an Array of Tags", ->
tags.should.be.an.instanceof Array
tags[0].should.be.an.instanceof Tag
it "is the correct tag", ->
tags[0].name.should.eql "tag-1"
describe "a repo without tags", ->
repo = fixtures.branched
it "is an empty array", (done) ->
repo.tags (err, tags) ->
tags.should.eql []
done err
describe "#create_tag", ->
repo = null
git_dir = __dirname + "/fixtures/junk_create_tag"
before (done) ->
fs.remove git_dir, (err) ->
return done err if err
fs.mkdir git_dir, 0o755, (err) ->
return done err if err
git.init git_dir, (err) ->
return done err if err
repo = git(git_dir)
repo.identify new Actor('name', 'PI:NAME:<NAME>END_PI@il'), ->
fs.writeFileSync "#{git_dir}/foo.txt", "cheese"
repo.add "#{git_dir}/foo.txt", (err) ->
return done err if err
repo.commit "initial commit", {all: true}, done
after (done) ->
fs.remove git_dir, done
it "creates a tag", (done) ->
repo.create_tag "foo", done
describe "#delete_tag", ->
describe "deleting a tag that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_tag "nonexistant-tag", (err) ->
should.exist err
done()
describe "#branches", ->
repo = fixtures.branched
branches = null
before (done) ->
repo.branches (err, _branches) ->
branches = _branches
done err
it "is an Array of Heads", ->
branches.should.be.an.instanceof Array
branches[0].should.be.an.instanceof Head
it "has the correct branches", ->
branches[0].name.should.eql "master"
branches[1].name.should.eql "something"
describe "#branch", ->
describe "when a branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch "something", (err, b) ->
branch = b
done err
it "is a Head", ->
branch.should.be.an.instanceof Head
it "has the correct name", ->
branch.name.should.eql "something"
describe "when no branch name is given", ->
repo = fixtures.branched
branch = null
before (done) ->
repo.branch (err, b) ->
branch = b
done err
it "has the correct name", ->
branch.name.should.eql "master"
describe "an invalid branch", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.branch "nonexistant-branch", (err, b) ->
should.exist err
should.not.exist b
done()
describe "current branch is (no branch)", ->
repo = fixtures.noBranch
it "passes an error", (done) ->
repo.branch (err, b) ->
should.exist err
should.not.exist b
done()
describe "#delete_branch", ->
describe "a branch that does not exist", ->
repo = fixtures.branched
it "passes an error", (done) ->
repo.delete_branch "nonexistant-branch", (err) ->
should.exist err
done()
describe "#clean", ->
repo = null
git_dir = __dirname + "/fixtures/junk_clean"
status = null
file = "bla.txt"
dir = 'blah'
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/clean", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
fs.mkdir "#{git_dir}/#{dir}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "clean with no args shouldn't do anything", ->
beforeEach (done) ->
repo.clean ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the untracked file alone", ->
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
fs.existsSync("#{git_dir}/#{dir}").should.be.true
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "clean --force", ->
beforeEach (done) ->
repo.clean {force: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "should remove the file but not the directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.true
fs.existsSync("#{git_dir}/iamuntracked").should.be.true
# git does not clean untracked files in untracked directories
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.true
describe "clean -df", ->
beforeEach (done) ->
repo.clean {force: true, d: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file and directory", ->
status.files.should.not.have.a.property file
fs.existsSync("#{git_dir}/#{dir}").should.be.false
fs.existsSync("#{git_dir}/iamuntracked").should.be.false
fs.existsSync("#{git_dir}/iamuntracked/untracked.txt").should.be.false
describe "#reset", ->
repo = null
git_dir = __dirname + "/fixtures/junk_reset"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
return done err if err
git.init git_dir, (err) ->
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "reset without specific treeish (defaults to HEAD)", ->
describe "reset (--mixed)", ->
beforeEach (done) ->
repo.reset ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index, leaves it in working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
describe "reset --soft", ->
beforeEach (done) ->
repo.reset {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "leaves the added file in the index", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
describe "reset --hard", ->
beforeEach (done) ->
repo.reset {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "removes the file from index and working tree", ->
status.files.should.not.have.a.property file
describe "reset to specific treeish", ->
describe "reset (--mixed) HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.false
status.files[file].tracked.should.be.false
status.files[file].should.not.have.a.property 'type'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.false
status.files['rawr.txt'].tracked.should.be.false
status.files['rawr.txt'].should.not.have.a.property 'type'
describe "reset --soft HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {soft: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, changes stay in the index and working tree", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'AM'
describe "reset --hard HEAD~1", ->
beforeEach (done) ->
repo.reset 'HEAD~1', {hard: true}, ->
repo.status (err, _status) ->
status = _status
done err
it "resets to HEAD~1, all changes get discarded completely", ->
status.files.should.not.have.a.property file
status.files.should.not.have.a.property 'rawr.txt'
describe "#checkoutFile", ->
repo = null
git_dir = __dirname + "/fixtures/junk_checkoutFile"
status = null
file = "bla.txt"
# given a fresh new repo
beforeEach (done) ->
status = null
fs.remove git_dir, (err) ->
return done err if err
fs.copy "#{__dirname}/fixtures/reset", "#{git_dir}", (err) ->
return done err if err
fs.rename "#{git_dir}/git.git", "#{git_dir}/.git", (err) ->
git.init git_dir, (err) ->
return done err if err
repo = git git_dir
fs.writeFile "#{git_dir}/#{file}", "hello", (err) ->
return done err if err?
repo.add "#{git_dir}/#{file}", (err) ->
done err
after (done) ->
fs.remove git_dir, (err) ->
done err
describe "passing no explicit files", ->
beforeEach (done) ->
repo.checkoutFile ->
repo.status (err, _status) ->
status = _status
done err
it "discards changes in the working tree for all files", ->
status.files.should.have.a.property file
status.files[file].staged.should.be.true
status.files[file].tracked.should.be.true
status.files[file].type.should.eql 'A'
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
describe "passing an explicit file", ->
beforeEach (done) ->
repo.checkoutFile 'rawr.txt', ->
repo.status (err, _status) ->
status = _status
done err
it "discard changes to the specified file", ->
status.files.should.have.a.property 'rawr.txt'
status.files['rawr.txt'].staged.should.be.true
status.files['rawr.txt'].tracked.should.be.true
status.files['rawr.txt'].type.should.eql 'M'
|
[
{
"context": " regEx: SimpleSchema.RegEx.Email\n label: 'Votre courriel'\n subject:\n type: String\n label:",
"end": 259,
"score": 0.9990129470825195,
"start": 245,
"tag": "NAME",
"value": "Votre courriel"
}
] | contact-form.coffee | JustinRvt/Meteor-French-Contact-Form | 0 | global.Schema = global.Schema or {}
Schema.myFrenchContactForm = new SimpleSchema(
name:
type: String
label: 'Votre nom'
max: 64
email:
type: String
regEx: SimpleSchema.RegEx.Email
label: 'Votre courriel'
subject:
type: String
label: 'Sujet'
max: 256
message:
type: String
label: 'Message'
max: 1024)
| 75968 | global.Schema = global.Schema or {}
Schema.myFrenchContactForm = new SimpleSchema(
name:
type: String
label: 'Votre nom'
max: 64
email:
type: String
regEx: SimpleSchema.RegEx.Email
label: '<NAME>'
subject:
type: String
label: 'Sujet'
max: 256
message:
type: String
label: 'Message'
max: 1024)
| true | global.Schema = global.Schema or {}
Schema.myFrenchContactForm = new SimpleSchema(
name:
type: String
label: 'Votre nom'
max: 64
email:
type: String
regEx: SimpleSchema.RegEx.Email
label: 'PI:NAME:<NAME>END_PI'
subject:
type: String
label: 'Sujet'
max: 256
message:
type: String
label: 'Message'
max: 1024)
|
[
{
"context": "'name': 'Test Ruby'\n'type': 'tree-sitter'\n'scopeName': 'test.rb'\n'pa",
"end": 18,
"score": 0.9265958070755005,
"start": 9,
"tag": "NAME",
"value": "Test Ruby"
}
] | spec/fixtures/packages/package-with-rb-filetype/grammars/rb.cson | davidbertsch/atom | 62,188 | 'name': 'Test Ruby'
'type': 'tree-sitter'
'scopeName': 'test.rb'
'parser': 'tree-sitter-ruby'
'firstLineRegex': '^\\#!.*(?:\\s|\\/)(?:testruby)(?:$|\\s)'
'fileTypes': [
'rb'
]
| 70846 | 'name': '<NAME>'
'type': 'tree-sitter'
'scopeName': 'test.rb'
'parser': 'tree-sitter-ruby'
'firstLineRegex': '^\\#!.*(?:\\s|\\/)(?:testruby)(?:$|\\s)'
'fileTypes': [
'rb'
]
| true | 'name': 'PI:NAME:<NAME>END_PI'
'type': 'tree-sitter'
'scopeName': 'test.rb'
'parser': 'tree-sitter-ruby'
'firstLineRegex': '^\\#!.*(?:\\s|\\/)(?:testruby)(?:$|\\s)'
'fileTypes': [
'rb'
]
|
[
{
"context": "###\n Bookmarklet IDE\n (c) 2015 Daniel Davison & Contributors\n\n Bookmarklet IDE for Chrome\n###\n",
"end": 47,
"score": 0.9998506307601929,
"start": 33,
"tag": "NAME",
"value": "Daniel Davison"
}
] | src/coffee/options.coffee | ddavison/chrome-bookmarklet-ide | 15 | ###
Bookmarklet IDE
(c) 2015 Daniel Davison & Contributors
Bookmarklet IDE for Chrome
###
# Load default settings (and set them if they are not set already
window.Settings.get({
project_dir: 'Bookmarklets',
indentUnit: 2,
smartIndent: true,
tabSize: 2,
indentWithTabs: false,
lineNumbers: true,
theme: 'solarized'
}, (items) ->
document.querySelector('#ide-options-txt-projectdir').value = items.project_dir
document.querySelector('#ide-options-theme').value = items.theme
document.querySelector('#ide-options-txt-indentsize').value = items.tabSize
document.querySelector('#ide-options-chk-usetabs').checked = 'checked' if items.indentWithTabs
document.querySelector('#ide-options-chk-showlinenumbers').checked = 'checked' if items.lineNumbers
)
# Save
document.querySelector('#ide-options-btn-save').addEventListener('click', ->
project_dir = document.querySelector('#ide-options-txt-projectdir').value
theme = document.querySelector('#ide-options-theme').value
indentUnit = document.querySelector('#ide-options-txt-indentsize').value
tabSize = document.querySelector('#ide-options-txt-indentsize').value
indentWithTabs = document.querySelector('#ide-options-chk-usetabs').checked
lineNumbers = document.querySelector('#ide-options-chk-showlinenumbers').checked
window.Settings.save({
project_dir: project_dir,
indentUnit: indentUnit,
tabSize: tabSize,
indentWithTabs: indentWithTabs,
lineNumbers: lineNumbers
theme: theme
}, ->
document.getElementById('ide-options-status').innerHTML = 'Saved.'
)
)
| 11686 | ###
Bookmarklet IDE
(c) 2015 <NAME> & Contributors
Bookmarklet IDE for Chrome
###
# Load default settings (and set them if they are not set already
window.Settings.get({
project_dir: 'Bookmarklets',
indentUnit: 2,
smartIndent: true,
tabSize: 2,
indentWithTabs: false,
lineNumbers: true,
theme: 'solarized'
}, (items) ->
document.querySelector('#ide-options-txt-projectdir').value = items.project_dir
document.querySelector('#ide-options-theme').value = items.theme
document.querySelector('#ide-options-txt-indentsize').value = items.tabSize
document.querySelector('#ide-options-chk-usetabs').checked = 'checked' if items.indentWithTabs
document.querySelector('#ide-options-chk-showlinenumbers').checked = 'checked' if items.lineNumbers
)
# Save
document.querySelector('#ide-options-btn-save').addEventListener('click', ->
project_dir = document.querySelector('#ide-options-txt-projectdir').value
theme = document.querySelector('#ide-options-theme').value
indentUnit = document.querySelector('#ide-options-txt-indentsize').value
tabSize = document.querySelector('#ide-options-txt-indentsize').value
indentWithTabs = document.querySelector('#ide-options-chk-usetabs').checked
lineNumbers = document.querySelector('#ide-options-chk-showlinenumbers').checked
window.Settings.save({
project_dir: project_dir,
indentUnit: indentUnit,
tabSize: tabSize,
indentWithTabs: indentWithTabs,
lineNumbers: lineNumbers
theme: theme
}, ->
document.getElementById('ide-options-status').innerHTML = 'Saved.'
)
)
| true | ###
Bookmarklet IDE
(c) 2015 PI:NAME:<NAME>END_PI & Contributors
Bookmarklet IDE for Chrome
###
# Load default settings (and set them if they are not set already
window.Settings.get({
project_dir: 'Bookmarklets',
indentUnit: 2,
smartIndent: true,
tabSize: 2,
indentWithTabs: false,
lineNumbers: true,
theme: 'solarized'
}, (items) ->
document.querySelector('#ide-options-txt-projectdir').value = items.project_dir
document.querySelector('#ide-options-theme').value = items.theme
document.querySelector('#ide-options-txt-indentsize').value = items.tabSize
document.querySelector('#ide-options-chk-usetabs').checked = 'checked' if items.indentWithTabs
document.querySelector('#ide-options-chk-showlinenumbers').checked = 'checked' if items.lineNumbers
)
# Save
document.querySelector('#ide-options-btn-save').addEventListener('click', ->
project_dir = document.querySelector('#ide-options-txt-projectdir').value
theme = document.querySelector('#ide-options-theme').value
indentUnit = document.querySelector('#ide-options-txt-indentsize').value
tabSize = document.querySelector('#ide-options-txt-indentsize').value
indentWithTabs = document.querySelector('#ide-options-chk-usetabs').checked
lineNumbers = document.querySelector('#ide-options-chk-showlinenumbers').checked
window.Settings.save({
project_dir: project_dir,
indentUnit: indentUnit,
tabSize: tabSize,
indentWithTabs: indentWithTabs,
lineNumbers: lineNumbers
theme: theme
}, ->
document.getElementById('ide-options-status').innerHTML = 'Saved.'
)
)
|
[
{
"context": "e: form.username.value,\n password: form.password.value,\n api: form.api.value\n }, null,",
"end": 2557,
"score": 0.8390699625015259,
"start": 2543,
"tag": "PASSWORD",
"value": "password.value"
},
{
"context": "pw = mk('input', {type: 'passw... | ui/js/coffee/account.coffee | isabella232/incubator-warble-server | 1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
signup = (form) ->
err = null
if form.name.value.length < 2
err = "Please enter your full name"
else if form.screenname.value.length < 1
err = "Please enter a screen name"
else if form.email.value.length < 6 or not form.email.value.match(/^[^\r\n\s @]+@[^\r\n\s @]+$/)
err = "Please enter a valid email address"
else if form.password.value.length < 1 or form.password.value != form.password2.value
err = "Please enter your password and make sure it matches the re-type"
if err
document.getElementById('signupmsg').innerHTML = "<h2>Error: " + err + "</h2>"
return false
else
document.getElementById('signupmsg').innerHTML = "Creating account, hang on..!"
post('user-signup',
{
action: 'create',
name: form.name.value,
password: form.password.value,
screenname: form.screenname.value,
email: form.email.value,
code: form.code.value
}
, null, validateSignup)
return false
validateSignup = (json, state) ->
if json.created
document.getElementById('signupmsg').innerHTML = "<span style='color: #060;'>Account created! Please check your inbox for verification instructions.</span>"
else
document.getElementById('signupmsg').innerHTML = "<h2 style='font-size: 2rem; color: #830;'>Error: " + json.message + "</h2>"
login = (form) ->
if form.username.value.length > 5 and form.password.value.length > 0
cog(document.getElementById('loginmsg'))
post('account', {
username: form.username.value,
password: form.password.value,
api: form.api.value
}, null, validateLogin)
return false
validateLogin = (json, state) ->
if json.loginRequired
document.getElementById('loginmsg').innerHTML = json.error
else
if json.apiversion and json.apiversion >= 3
if document.referrer and document.referrer.match(/https:\/\/(?:www)?\.snoot\.io\/dashboard/i)
location.href = document.referrer
else
location.href = "/dashboard.html?page=default"
else
location.href = "/api2.html?page=default"
doResetPass = () ->
rtoken = get('rtoken').value
newpass = get('newpass').value
post('account',{ remail: remail, rtoken: rtoken, newpass: newpass } , null, pwReset)
return false
remail = ""
pwReset = () ->
get('resetform').innerHTML = "Assuming you entered the right token, your password has now been reset!. <a href='login.html'>Log in</a>."
getResetToken = (json, state) ->
form = get('resetform')
form.innerHTML = ""
p = mk('p', {}, "A reset token has been sent to your email address. Please enter the reset token and your new preferred password below:")
app(form, p)
token = mk('input', {type: 'text', placeholder: 'Reset token', autocomplete: 'off', name: 'rtoken', id: 'rtoken'})
newpw = mk('input', {type: 'password', placeholder: 'New passord', autocomplete: 'off', name: 'newpass', id: 'newpass'})
app(form, token)
app(form, mk('br'))
app(form, newpw)
app(form, mk('br'))
btn = mk('input', { type: 'button', onclick: 'doResetPass()', value: 'Reset your password'})
form.setAttribute("onsubmit", "return doResetPass();")
app(form, btn)
resetpw = () ->
email = get('email').value
remail = email
post('account',{ reset: email } , null, getResetToken)
return false
| 192801 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
signup = (form) ->
err = null
if form.name.value.length < 2
err = "Please enter your full name"
else if form.screenname.value.length < 1
err = "Please enter a screen name"
else if form.email.value.length < 6 or not form.email.value.match(/^[^\r\n\s @]+@[^\r\n\s @]+$/)
err = "Please enter a valid email address"
else if form.password.value.length < 1 or form.password.value != form.password2.value
err = "Please enter your password and make sure it matches the re-type"
if err
document.getElementById('signupmsg').innerHTML = "<h2>Error: " + err + "</h2>"
return false
else
document.getElementById('signupmsg').innerHTML = "Creating account, hang on..!"
post('user-signup',
{
action: 'create',
name: form.name.value,
password: form.password.value,
screenname: form.screenname.value,
email: form.email.value,
code: form.code.value
}
, null, validateSignup)
return false
validateSignup = (json, state) ->
if json.created
document.getElementById('signupmsg').innerHTML = "<span style='color: #060;'>Account created! Please check your inbox for verification instructions.</span>"
else
document.getElementById('signupmsg').innerHTML = "<h2 style='font-size: 2rem; color: #830;'>Error: " + json.message + "</h2>"
login = (form) ->
if form.username.value.length > 5 and form.password.value.length > 0
cog(document.getElementById('loginmsg'))
post('account', {
username: form.username.value,
password: form.<PASSWORD>,
api: form.api.value
}, null, validateLogin)
return false
validateLogin = (json, state) ->
if json.loginRequired
document.getElementById('loginmsg').innerHTML = json.error
else
if json.apiversion and json.apiversion >= 3
if document.referrer and document.referrer.match(/https:\/\/(?:www)?\.snoot\.io\/dashboard/i)
location.href = document.referrer
else
location.href = "/dashboard.html?page=default"
else
location.href = "/api2.html?page=default"
doResetPass = () ->
rtoken = get('rtoken').value
newpass = get('newpass').value
post('account',{ remail: remail, rtoken: rtoken, newpass: newpass } , null, pwReset)
return false
remail = ""
pwReset = () ->
get('resetform').innerHTML = "Assuming you entered the right token, your password has now been reset!. <a href='login.html'>Log in</a>."
getResetToken = (json, state) ->
form = get('resetform')
form.innerHTML = ""
p = mk('p', {}, "A reset token has been sent to your email address. Please enter the reset token and your new preferred password below:")
app(form, p)
token = mk('input', {type: 'text', placeholder: 'Reset token', autocomplete: 'off', name: 'rtoken', id: 'rtoken'})
newpw = mk('input', {type: 'password', placeholder: '<PASSWORD> passord', autocomplete: 'off', name: 'newpass', id: 'newpass'})
app(form, token)
app(form, mk('br'))
app(form, newpw)
app(form, mk('br'))
btn = mk('input', { type: 'button', onclick: 'doResetPass()', value: 'Reset your password'})
form.setAttribute("onsubmit", "return doResetPass();")
app(form, btn)
resetpw = () ->
email = get('email').value
remail = email
post('account',{ reset: email } , null, getResetToken)
return false
| true | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
signup = (form) ->
err = null
if form.name.value.length < 2
err = "Please enter your full name"
else if form.screenname.value.length < 1
err = "Please enter a screen name"
else if form.email.value.length < 6 or not form.email.value.match(/^[^\r\n\s @]+@[^\r\n\s @]+$/)
err = "Please enter a valid email address"
else if form.password.value.length < 1 or form.password.value != form.password2.value
err = "Please enter your password and make sure it matches the re-type"
if err
document.getElementById('signupmsg').innerHTML = "<h2>Error: " + err + "</h2>"
return false
else
document.getElementById('signupmsg').innerHTML = "Creating account, hang on..!"
post('user-signup',
{
action: 'create',
name: form.name.value,
password: form.password.value,
screenname: form.screenname.value,
email: form.email.value,
code: form.code.value
}
, null, validateSignup)
return false
validateSignup = (json, state) ->
if json.created
document.getElementById('signupmsg').innerHTML = "<span style='color: #060;'>Account created! Please check your inbox for verification instructions.</span>"
else
document.getElementById('signupmsg').innerHTML = "<h2 style='font-size: 2rem; color: #830;'>Error: " + json.message + "</h2>"
login = (form) ->
if form.username.value.length > 5 and form.password.value.length > 0
cog(document.getElementById('loginmsg'))
post('account', {
username: form.username.value,
password: form.PI:PASSWORD:<PASSWORD>END_PI,
api: form.api.value
}, null, validateLogin)
return false
validateLogin = (json, state) ->
if json.loginRequired
document.getElementById('loginmsg').innerHTML = json.error
else
if json.apiversion and json.apiversion >= 3
if document.referrer and document.referrer.match(/https:\/\/(?:www)?\.snoot\.io\/dashboard/i)
location.href = document.referrer
else
location.href = "/dashboard.html?page=default"
else
location.href = "/api2.html?page=default"
doResetPass = () ->
rtoken = get('rtoken').value
newpass = get('newpass').value
post('account',{ remail: remail, rtoken: rtoken, newpass: newpass } , null, pwReset)
return false
remail = ""
pwReset = () ->
get('resetform').innerHTML = "Assuming you entered the right token, your password has now been reset!. <a href='login.html'>Log in</a>."
getResetToken = (json, state) ->
form = get('resetform')
form.innerHTML = ""
p = mk('p', {}, "A reset token has been sent to your email address. Please enter the reset token and your new preferred password below:")
app(form, p)
token = mk('input', {type: 'text', placeholder: 'Reset token', autocomplete: 'off', name: 'rtoken', id: 'rtoken'})
newpw = mk('input', {type: 'password', placeholder: 'PI:PASSWORD:<PASSWORD>END_PI passord', autocomplete: 'off', name: 'newpass', id: 'newpass'})
app(form, token)
app(form, mk('br'))
app(form, newpw)
app(form, mk('br'))
btn = mk('input', { type: 'button', onclick: 'doResetPass()', value: 'Reset your password'})
form.setAttribute("onsubmit", "return doResetPass();")
app(form, btn)
resetpw = () ->
email = get('email').value
remail = email
post('account',{ reset: email } , null, getResetToken)
return false
|
[
{
"context": "itialize: (container, application) ->\n\n key = 'common:main'\n \n container.register key, container.looku",
"end": 118,
"score": 0.9986782073974609,
"start": 107,
"tag": "KEY",
"value": "common:main"
}
] | app/initializers/common_controller.coffee | tobobo/thyme-frontend | 0 | Ember.Application.initializer
name: 'injectCommon'
initialize: (container, application) ->
key = 'common:main'
container.register key, container.lookup('controller:common'),
instantiate: false
for type in ['controller', 'route']
for name in ['common']
container.typeInjection type, name, key
`export default undefined`
| 121981 | Ember.Application.initializer
name: 'injectCommon'
initialize: (container, application) ->
key = '<KEY>'
container.register key, container.lookup('controller:common'),
instantiate: false
for type in ['controller', 'route']
for name in ['common']
container.typeInjection type, name, key
`export default undefined`
| true | Ember.Application.initializer
name: 'injectCommon'
initialize: (container, application) ->
key = 'PI:KEY:<KEY>END_PI'
container.register key, container.lookup('controller:common'),
instantiate: false
for type in ['controller', 'route']
for name in ['common']
container.typeInjection type, name, key
`export default undefined`
|
[
{
"context": "yBaer\"\n\t@type:\"ModifierEnvyBaer\"\n\n\t@modifierName:\"Envybaer\"\n\t@description:\"Whenever this minion damages an e",
"end": 327,
"score": 0.9945217967033386,
"start": 319,
"tag": "NAME",
"value": "Envybaer"
}
] | app/sdk/modifiers/modifierEnvyBaer.coffee | willroberts/duelyst | 5 | ModifierDealDamageWatch = require './modifierDealDamageWatch'
RandomTeleportAction = require 'app/sdk/actions/randomTeleportAction'
CONFIG = require 'app/common/config'
_ = require 'underscore'
class ModifierEnvyBaer extends ModifierDealDamageWatch
type:"ModifierEnvyBaer"
@type:"ModifierEnvyBaer"
@modifierName:"Envybaer"
@description:"Whenever this minion damages an enemy, teleport that enemy to a random corner"
maxStacks: 1
onDealDamage: (action) ->
if action.getTarget().getOwnerId() isnt @getCard().getOwnerId()
randomTeleportAction = new RandomTeleportAction(@getGameSession())
randomTeleportAction.setOwnerId(@getCard().getOwnerId())
randomTeleportAction.setSource(action.getTarget())
randomTeleportAction.setTeleportPattern(CONFIG.PATTERN_CORNERS)
randomTeleportAction.setFXResource(_.union(randomTeleportAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(randomTeleportAction)
module.exports = ModifierEnvyBaer
| 175388 | ModifierDealDamageWatch = require './modifierDealDamageWatch'
RandomTeleportAction = require 'app/sdk/actions/randomTeleportAction'
CONFIG = require 'app/common/config'
_ = require 'underscore'
class ModifierEnvyBaer extends ModifierDealDamageWatch
type:"ModifierEnvyBaer"
@type:"ModifierEnvyBaer"
@modifierName:"<NAME>"
@description:"Whenever this minion damages an enemy, teleport that enemy to a random corner"
maxStacks: 1
onDealDamage: (action) ->
if action.getTarget().getOwnerId() isnt @getCard().getOwnerId()
randomTeleportAction = new RandomTeleportAction(@getGameSession())
randomTeleportAction.setOwnerId(@getCard().getOwnerId())
randomTeleportAction.setSource(action.getTarget())
randomTeleportAction.setTeleportPattern(CONFIG.PATTERN_CORNERS)
randomTeleportAction.setFXResource(_.union(randomTeleportAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(randomTeleportAction)
module.exports = ModifierEnvyBaer
| true | ModifierDealDamageWatch = require './modifierDealDamageWatch'
RandomTeleportAction = require 'app/sdk/actions/randomTeleportAction'
CONFIG = require 'app/common/config'
_ = require 'underscore'
class ModifierEnvyBaer extends ModifierDealDamageWatch
type:"ModifierEnvyBaer"
@type:"ModifierEnvyBaer"
@modifierName:"PI:NAME:<NAME>END_PI"
@description:"Whenever this minion damages an enemy, teleport that enemy to a random corner"
maxStacks: 1
onDealDamage: (action) ->
if action.getTarget().getOwnerId() isnt @getCard().getOwnerId()
randomTeleportAction = new RandomTeleportAction(@getGameSession())
randomTeleportAction.setOwnerId(@getCard().getOwnerId())
randomTeleportAction.setSource(action.getTarget())
randomTeleportAction.setTeleportPattern(CONFIG.PATTERN_CORNERS)
randomTeleportAction.setFXResource(_.union(randomTeleportAction.getFXResource(), @getFXResource()))
@getGameSession().executeAction(randomTeleportAction)
module.exports = ModifierEnvyBaer
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9988133311271667,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "\"http\")\n http.Server(->\n ).listen common.PORT, \"127.0.0.1\"\nelse if process.argv[2] is... | test/simple/test-cluster-master-error.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
cluster = require("cluster")
# Cluster setup
if cluster.isWorker
http = require("http")
http.Server(->
).listen common.PORT, "127.0.0.1"
else if process.argv[2] is "cluster"
totalWorkers = 2
# Send PID to testcase process
forkNum = 0
cluster.on "fork", forkEvent = (worker) ->
# Send PID
process.send
cmd: "worker"
workerPID: worker.process.pid
# Stop listening when done
cluster.removeListener "fork", forkEvent if ++forkNum is totalWorkers
return
# Throw accidently error when all workers are listening
listeningNum = 0
cluster.on "listening", listeningEvent = ->
# When all workers are listening
if ++listeningNum is totalWorkers
# Stop listening
cluster.removeListener "listening", listeningEvent
# throw accidently error
process.nextTick ->
console.error "about to throw"
throw new Error("accidently error")return
return
# Startup a basic cluster
cluster.fork()
cluster.fork()
else
# This is the testcase
fork = require("child_process").fork
isAlive = (pid) ->
try
#this will throw an error if the process is dead
process.kill pid, 0
return true
catch e
return false
return
existMaster = false
existWorker = false
# List all workers
workers = []
# Spawn a cluster process
master = fork(process.argv[1], ["cluster"],
silent: true
)
# Handle messages from the cluster
master.on "message", (data) ->
# Add worker pid to list and progress tracker
workers.push data.workerPID if data.cmd is "worker"
return
# When cluster is dead
master.on "exit", (code) ->
# Check that the cluster died accidently
# Give the workers time to shut down
checkWorkers = ->
# When master is dead all workers should be dead to
alive = false
workers.forEach (pid) ->
alive = true if isAlive(pid)
return
# If a worker was alive this did not act as expected
existWorker = not alive
return
existMaster = !!code
setTimeout checkWorkers, 200
return
process.once "exit", ->
m = "The master did not die after an error was throwed"
assert.ok existMaster, m
m = "The workers did not die after an error in the master"
assert.ok existWorker, m
return
| 557 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
cluster = require("cluster")
# Cluster setup
if cluster.isWorker
http = require("http")
http.Server(->
).listen common.PORT, "127.0.0.1"
else if process.argv[2] is "cluster"
totalWorkers = 2
# Send PID to testcase process
forkNum = 0
cluster.on "fork", forkEvent = (worker) ->
# Send PID
process.send
cmd: "worker"
workerPID: worker.process.pid
# Stop listening when done
cluster.removeListener "fork", forkEvent if ++forkNum is totalWorkers
return
# Throw accidently error when all workers are listening
listeningNum = 0
cluster.on "listening", listeningEvent = ->
# When all workers are listening
if ++listeningNum is totalWorkers
# Stop listening
cluster.removeListener "listening", listeningEvent
# throw accidently error
process.nextTick ->
console.error "about to throw"
throw new Error("accidently error")return
return
# Startup a basic cluster
cluster.fork()
cluster.fork()
else
# This is the testcase
fork = require("child_process").fork
isAlive = (pid) ->
try
#this will throw an error if the process is dead
process.kill pid, 0
return true
catch e
return false
return
existMaster = false
existWorker = false
# List all workers
workers = []
# Spawn a cluster process
master = fork(process.argv[1], ["cluster"],
silent: true
)
# Handle messages from the cluster
master.on "message", (data) ->
# Add worker pid to list and progress tracker
workers.push data.workerPID if data.cmd is "worker"
return
# When cluster is dead
master.on "exit", (code) ->
# Check that the cluster died accidently
# Give the workers time to shut down
checkWorkers = ->
# When master is dead all workers should be dead to
alive = false
workers.forEach (pid) ->
alive = true if isAlive(pid)
return
# If a worker was alive this did not act as expected
existWorker = not alive
return
existMaster = !!code
setTimeout checkWorkers, 200
return
process.once "exit", ->
m = "The master did not die after an error was throwed"
assert.ok existMaster, m
m = "The workers did not die after an error in the master"
assert.ok existWorker, m
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
cluster = require("cluster")
# Cluster setup
if cluster.isWorker
http = require("http")
http.Server(->
).listen common.PORT, "127.0.0.1"
else if process.argv[2] is "cluster"
totalWorkers = 2
# Send PID to testcase process
forkNum = 0
cluster.on "fork", forkEvent = (worker) ->
# Send PID
process.send
cmd: "worker"
workerPID: worker.process.pid
# Stop listening when done
cluster.removeListener "fork", forkEvent if ++forkNum is totalWorkers
return
# Throw accidently error when all workers are listening
listeningNum = 0
cluster.on "listening", listeningEvent = ->
# When all workers are listening
if ++listeningNum is totalWorkers
# Stop listening
cluster.removeListener "listening", listeningEvent
# throw accidently error
process.nextTick ->
console.error "about to throw"
throw new Error("accidently error")return
return
# Startup a basic cluster
cluster.fork()
cluster.fork()
else
# This is the testcase
fork = require("child_process").fork
isAlive = (pid) ->
try
#this will throw an error if the process is dead
process.kill pid, 0
return true
catch e
return false
return
existMaster = false
existWorker = false
# List all workers
workers = []
# Spawn a cluster process
master = fork(process.argv[1], ["cluster"],
silent: true
)
# Handle messages from the cluster
master.on "message", (data) ->
# Add worker pid to list and progress tracker
workers.push data.workerPID if data.cmd is "worker"
return
# When cluster is dead
master.on "exit", (code) ->
# Check that the cluster died accidently
# Give the workers time to shut down
checkWorkers = ->
# When master is dead all workers should be dead to
alive = false
workers.forEach (pid) ->
alive = true if isAlive(pid)
return
# If a worker was alive this did not act as expected
existWorker = not alive
return
existMaster = !!code
setTimeout checkWorkers, 200
return
process.once "exit", ->
m = "The master did not die after an error was throwed"
assert.ok existMaster, m
m = "The workers did not die after an error in the master"
assert.ok existWorker, m
return
|
[
{
"context": "n \"{your answer} who?\", then \"lol\"\n#\n# Author:\n# Tim Kinnane\n#\n\nmodule.exports = (robot) ->\n require '../../l",
"end": 466,
"score": 0.9998786449432373,
"start": 455,
"tag": "NAME",
"value": "Tim Kinnane"
}
] | integration/scripts/knock-knock-direct-noreply.coffee | PropertyUX/nubot-playbook | 0 | # Description:
# Tell Hubot a knock knock joke - it is guaranteed to laugh
# Uses inline declarations, single branch path with prompt (very compact)
#
# Dependencies:
# hubot-playbook
#
# Configuration:
# Playbook direct scene responds to a single user and room
# sendReplies: false by default - Hubot will send to room not reply to user
#
# Commands:
# knock - it will say "Who's there", then "{your answer} who?", then "lol"
#
# Author:
# Tim Kinnane
#
module.exports = (robot) ->
require '../../lib'
.use robot
.sceneHear /knock/, scope: 'direct', (res) ->
res.dialogue.addPath "Who's there?", [ /.*/, (res) ->
res.dialogue.addPath "#{ res.match[0] } who?", [ /.*/, "lol" ]
]
| 47644 | # Description:
# Tell Hubot a knock knock joke - it is guaranteed to laugh
# Uses inline declarations, single branch path with prompt (very compact)
#
# Dependencies:
# hubot-playbook
#
# Configuration:
# Playbook direct scene responds to a single user and room
# sendReplies: false by default - Hubot will send to room not reply to user
#
# Commands:
# knock - it will say "Who's there", then "{your answer} who?", then "lol"
#
# Author:
# <NAME>
#
module.exports = (robot) ->
require '../../lib'
.use robot
.sceneHear /knock/, scope: 'direct', (res) ->
res.dialogue.addPath "Who's there?", [ /.*/, (res) ->
res.dialogue.addPath "#{ res.match[0] } who?", [ /.*/, "lol" ]
]
| true | # Description:
# Tell Hubot a knock knock joke - it is guaranteed to laugh
# Uses inline declarations, single branch path with prompt (very compact)
#
# Dependencies:
# hubot-playbook
#
# Configuration:
# Playbook direct scene responds to a single user and room
# sendReplies: false by default - Hubot will send to room not reply to user
#
# Commands:
# knock - it will say "Who's there", then "{your answer} who?", then "lol"
#
# Author:
# PI:NAME:<NAME>END_PI
#
module.exports = (robot) ->
require '../../lib'
.use robot
.sceneHear /knock/, scope: 'direct', (res) ->
res.dialogue.addPath "Who's there?", [ /.*/, (res) ->
res.dialogue.addPath "#{ res.match[0] } who?", [ /.*/, "lol" ]
]
|
[
{
"context": "@getAvatarUrlFromUsername = (username) ->\n\tkey = \"avatar_random_#{username}\"\n\trandom = Session?.keys[key] or 0\n\tif not usernam",
"end": 76,
"score": 0.9982385635375977,
"start": 50,
"tag": "KEY",
"value": "avatar_random_#{username}\""
}
] | packages/rocketchat-ui/lib/getAvatarUrlFromUsername.coffee | Cosecha/rocket-chat-stable | 2 | @getAvatarUrlFromUsername = (username) ->
key = "avatar_random_#{username}"
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl().replace /\/$/, ''
else
path = __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '';
"#{path}/avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
| 19502 | @getAvatarUrlFromUsername = (username) ->
key = "<KEY>
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl().replace /\/$/, ''
else
path = __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '';
"#{path}/avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
| true | @getAvatarUrlFromUsername = (username) ->
key = "PI:KEY:<KEY>END_PI
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl().replace /\/$/, ''
else
path = __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '';
"#{path}/avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
|
[
{
"context": "#\n# * $Id$\n# *\n# * Copyright 2014 Valentyn Kolesnikov\n# *\n# * Licensed under the Apache License, Versio",
"end": 53,
"score": 0.9995880722999573,
"start": 34,
"tag": "NAME",
"value": "Valentyn Kolesnikov"
},
{
"context": "0_5\"\n \"-text\": \"тысяч,миллионов... | src/main/resources/com/github/moneytostr/moneytostr.coffee | runbins/thirdparty-moneytostr-russian | 40 | #
# * $Id$
# *
# * Copyright 2014 Valentyn Kolesnikov
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
#
currencyList = CurrencyList:
language:
"-value": "UKR"
UKR:
item: [
{
"-value": "0"
"-text": "нуль"
}
{
"-value": "1000_10"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "1000_1"
"-text": "тисяча,мільйон,мільярд,трильйон"
}
{
"-value": "1000_234"
"-text": "тисячі,мільйона,мільярда,трильйона"
}
{
"-value": "1000_5"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "10_19"
"-text": "десять,одинадцять,дванадцять,тринадцять,чотирнадцять,п’ятнадцять,шiстнадцять,сiмнадцять,вiсiмнадцять,дев'ятнадцять"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "дві,два,два,дві"
}
{
"-value": "3_9"
"-text": "три,чотири,п’ять,шість,сім,вісім,дев’ять"
}
{
"-value": "100_900"
"-text": "сто ,двісті ,триста ,чотириста ,п’ятсот ,шістсот ,сімсот ,вісімсот ,дев’ятсот "
}
{
"-value": "20_90"
"-text": "двадцять ,тридцять ,сорок ,п’ятдесят ,шістдесят ,сімдесят ,вісімдесят ,дев’яносто "
}
{
"-value": "pdv"
"-text": "в т.ч. ПДВ "
}
{
"-value": "pdv_value"
"-text": "20"
}
]
RUS:
item: [
{
"-value": "0"
"-text": "ноль"
}
{
"-value": "1000_10"
"-text": "тысяч,миллионов,миллиардов,триллионов"
}
{
"-value": "1000_1"
"-text": "тысяча,миллион,миллиард,триллион"
}
{
"-value": "1000_234"
"-text": "тысячи,миллиона,миллиарда,триллиона"
}
{
"-value": "1000_5"
"-text": "тысяч,миллионов,миллиардов,триллионов"
}
{
"-value": "10_19"
"-text": "десять,одиннадцать,двенадцать,тринадцать,четырнадцать,пятнадцать,шестнадцать,семнадцать,восемнадцать,девятнадцать"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "две,два,два,две"
}
{
"-value": "3_9"
"-text": "три,четыре,пять,шесть,семь,восемь,девять"
}
{
"-value": "100_900"
"-text": "сто ,двести ,триста ,четыреста ,пятьсот ,шестьсот ,семьсот ,восемьсот ,девятьсот "
}
{
"-value": "20_90"
"-text": "двадцать ,тридцать ,сорок ,пятьдесят ,шестьдесят ,семьдесят ,восемьдесят ,девяносто "
}
{
"-value": "pdv"
"-text": "в т.ч. НДС "
}
{
"-value": "pdv_value"
"-text": "18"
}
]
ENG:
item: [
{
"-value": "0"
"-text": "zero"
}
{
"-value": "1000_10"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_1"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_234"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_5"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "10_19"
"-text": "ten,eleven,twelve,thirteen,fourteen,fifteen,sixteen,seventeen,eighteen,nineteen"
}
{
"-value": "1"
"-text": "one,one,one,one"
}
{
"-value": "2"
"-text": "two,two,two,two"
}
{
"-value": "3_9"
"-text": "three,four,five,six,seven,eight,nine"
}
{
"-value": "100_900"
"-text": "one hundred ,two hundred ,three hundred ,four hundred ,five hundred ,six hundred ,seven hundred ,eight hundred ,nine hundred "
}
{
"-value": "20_90"
"-text": "twenty-,thirty-,forty-,fifty-,sixty-,seventy-,eighty-,ninety-"
}
{
"-value": "pdv"
"-text": "including VAT "
}
{
"-value": "pdv_value"
"-text": "10"
}
]
RUR: [
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "RUS"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рубля"
"-RubFiveUnit": "рублей"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "UKR"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рублі"
"-RubFiveUnit": "рублів"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "ENG"
"-RubOneUnit": "ruble"
"-RubTwoUnit": "rubles"
"-RubFiveUnit": "rubles"
"-RubSex": "M"
"-RubShortUnit": "RUR."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
UAH: [
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "RUS"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривни"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "UKR"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривні"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "ENG"
"-RubOneUnit": "hryvnia"
"-RubTwoUnit": "hryvnias"
"-RubFiveUnit": "hryvnias"
"-RubSex": "M"
"-RubShortUnit": "UAH."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
USD: [
{
"-CurrID": "840"
"-CurrName": "Долари США"
"-language": "RUS"
"-RubOneUnit": "доллар"
"-RubTwoUnit": "доллара"
"-RubFiveUnit": "долларов"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центов"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "Долари США"
"-language": "UKR"
"-RubOneUnit": "долар"
"-RubTwoUnit": "долара"
"-RubFiveUnit": "доларів"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центів"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "Долари США"
"-language": "ENG"
"-RubOneUnit": "dollar"
"-RubTwoUnit": "dollars"
"-RubFiveUnit": "dollars"
"-RubSex": "M"
"-RubShortUnit": "USD."
"-KopOneUnit": "cent"
"-KopTwoUnit": "cents"
"-KopFiveUnit": "cents"
"-KopSex": "M"
}
]
PER10: [
{
"-CurrID": "556"
"-CurrName": "Вiдсотки з десятими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятая процента"
"-KopTwoUnit": "десятых процента"
"-KopFiveUnit": "десятых процента"
"-KopSex": "F"
}
{
"-CurrID": "556"
"-CurrName": "Вiдсотки з десятими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десята відсотка"
"-KopTwoUnit": "десятих відсотка"
"-KopFiveUnit": "десятих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "560"
"-CurrName": "Вiдсотки з десятими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "tenth of one percent"
"-KopTwoUnit": "tenth of one percent"
"-KopFiveUnit": "tenth of one percent"
"-KopSex": "F"
}
]
PER100: [
{
"-CurrID": "557"
"-CurrName": "Вiдсотки з сотими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "сотая процента"
"-KopTwoUnit": "сотых процента"
"-KopFiveUnit": "сотых процента"
"-KopSex": "F"
}
{
"-CurrID": "557"
"-CurrName": "Вiдсотки з сотими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "сота відсотка"
"-KopTwoUnit": "сотих відсотка"
"-KopFiveUnit": "сотих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "561"
"-CurrName": "Вiдсотки з сотими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "hundred percent"
"-KopTwoUnit": "hundredth of percent"
"-KopFiveUnit": "hundredth of percent"
"-KopSex": "F"
}
]
PER1000: [
{
"-CurrID": "558"
"-CurrName": "Вiдсотки з тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "тысячная процента"
"-KopTwoUnit": "тысячных процента"
"-KopFiveUnit": "тысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "558"
"-CurrName": "Вiдсотки з тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "тисячна відсотка"
"-KopTwoUnit": "тисячних відсотка"
"-KopFiveUnit": "тисячних відсотка"
"-KopSex": "F"
}
{
"-CurrID": "562"
"-CurrName": "Вiдсотки з тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "thousandth of percent"
"-KopTwoUnit": "thousandths of percent"
"-KopFiveUnit": "thousandths of percent"
"-KopSex": "F"
}
]
PER10000: [
{
"-CurrID": "559"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятитысячная процента"
"-KopTwoUnit": "десятитысячные процента"
"-KopFiveUnit": "десятитысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "559"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десятитисячна відсотка"
"-KopTwoUnit": "десятитисячних відсотка"
"-KopFiveUnit": "десятитисячних відсотка"
"-KopSex": "M"
}
{
"-CurrID": "563"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "ten percent"
"-KopTwoUnit": "ten-percent"
"-KopFiveUnit": "ten-percent"
"-KopSex": "F"
}
]
###*
Converts numbers to symbols.
@author Valentyn Kolesnikov
@version $Revision$ $Date$
###
###*
Currency.
###
Currency = (->
Currency = ->
Currency.UAH = "UAH"
Currency.RUR = "RUR"
Currency.USD = "USD"
Currency.PER10 = "PER10"
Currency.PER100 = "PER100"
Currency.PER1000 = "PER1000"
Currency.PER10000 = "PER10000"
Currency
)()
###*
Language.
###
Language = (->
Language = ->
Language.RUS = "RUS"
Language.UKR = "UKR"
Language.ENG = "ENG"
Language
)()
###*
Pennies.
###
Pennies = (->
Pennies = ->
Pennies.NUMBER = "NUMBER"
Pennies.TEXT = "TEXT"
Pennies
)()
StringBuilder = (->
StringBuilder = ->
@_buffer = []
return
StringBuilder::append = (text) ->
@_buffer[@_buffer.length] = text
this
StringBuilder::insert = (index, text) ->
@_buffer.splice index, 0, text
this
StringBuilder::length = ->
@toString().length
StringBuilder::deleteCharAt = (index) ->
str = @toString()
@_buffer = []
@append str.substring(0, index)
this
StringBuilder::toString = ->
@_buffer.join ""
StringBuilder
)()
MoneyToStr = (->
MoneyToStr = (currency, language, pennies) ->
@currency = currency
@language = language
@pennies = pennies
languageElement = language
items = currencyList["CurrencyList"][languageElement]["item"]
@messages = {}
for index of items
languageItem = items[index]
@messages[languageItem["-value"]] = languageItem["-text"].split(",") if languageItem["-text"]
currencyItem = currencyList["CurrencyList"][currency]
theISOElement = null
for index of currencyItem
if currencyItem[index]["-language"] is language
theISOElement = currencyItem[index]
break
throw new Error("Currency not found " + currency) unless theISOElement?
@rubOneUnit = theISOElement["-RubOneUnit"]
@rubTwoUnit = theISOElement["-RubTwoUnit"]
@rubFiveUnit = theISOElement["-RubFiveUnit"]
@kopOneUnit = theISOElement["-KopOneUnit"]
@kopTwoUnit = theISOElement["-KopTwoUnit"]
@kopFiveUnit = theISOElement["-KopFiveUnit"]
@rubSex = theISOElement["-RubSex"]
@kopSex = theISOElement["-KopSex"]
@rubShortUnit = theISOElement["-RubShortUnit"]
return
MoneyToStr.NUM0 = 0
MoneyToStr.NUM1 = 1
MoneyToStr.NUM2 = 2
MoneyToStr.NUM3 = 3
MoneyToStr.NUM4 = 4
MoneyToStr.NUM5 = 5
MoneyToStr.NUM6 = 6
MoneyToStr.NUM7 = 7
MoneyToStr.NUM8 = 8
MoneyToStr.NUM9 = 9
MoneyToStr.NUM10 = 10
MoneyToStr.NUM11 = 11
MoneyToStr.NUM14 = 14
MoneyToStr.NUM100 = 100
MoneyToStr.NUM1000 = 1000
MoneyToStr.NUM10000 = 10000
MoneyToStr.INDEX_0 = 0
MoneyToStr.INDEX_1 = 1
MoneyToStr.INDEX_2 = 2
MoneyToStr.INDEX_3 = 3
MoneyToStr.percentToStr = (amount, lang) ->
throw new Error("amount is null") unless amount?
throw new Error("lang is null") unless lang?
intPart = parseInt(amount)
fractPart = 0
result = undefined
if amount is parseInt(amount)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(amount, 0)
else if (amount * MoneyToStr.NUM10).toFixed(4) is parseInt(amount * MoneyToStr.NUM10)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM100).toFixed(4) is parseInt(amount * MoneyToStr.NUM100)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM100)
result = new MoneyToStr(Currency.PER100, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM1000).toFixed(4) is parseInt(amount * MoneyToStr.NUM1000)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM1000)
result = new MoneyToStr(Currency.PER1000, lang, Pennies.TEXT).convert(intPart, fractPart)
else
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10000)
result = new MoneyToStr(Currency.PER10000, lang, Pennies.TEXT).convert(intPart, fractPart)
result
###*
Converts double value to the text description.
@param theMoney
the amount of money in format major.minor
@return the string description of money value
###
MoneyToStr::convertValue = (theMoney) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
intPart = parseInt(theMoney)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM100)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM1000) if @currency is Currency.PER1000
@convert intPart, fractPart
###*
Converts number to currency. Usage: var moneyToStr = new MoneyToStr(Currency.UAH, Language.UKR, Pennies.NUMBER);
var result = moneyToStr.convert(123, 0); Expected: result = сто двадцять три гривні 00 копійок
@param theMoney
the amount of money major currency
@param theKopeiki
the amount of money minor currency
@return the string description of money value
###
MoneyToStr::convert = (theMoney, theKopeiki) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
throw new Error("theKopeiki is null") if typeof theKopeiki is 'undefined' or not theKopeiki?
money2str = new StringBuilder()
triadNum = 0
theTriad = 0
intPart = theMoney
money2str.append @messages["0"][0] + " " if intPart is 0
loop
theTriad = parseInt(intPart % MoneyToStr.NUM1000)
money2str.insert 0, @triad2Word(theTriad, triadNum, @rubSex)
if triadNum is 0
range10 = parseInt((theTriad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
range = parseInt(theTriad % MoneyToStr.NUM10)
if range10 is MoneyToStr.NUM1
money2str.append @rubFiveUnit
else
switch range
when MoneyToStr.NUM1
money2str.append @rubOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @rubTwoUnit
else
money2str.append @rubFiveUnit
intPart = parseInt(intPart / MoneyToStr.NUM1000)
triadNum++
break unless intPart > 0
if @pennies is Pennies.TEXT
money2str.append((if @language is Language.ENG then " and " else " ")).append (if theKopeiki is 0 then @messages["0"][0] + " " else @triad2Word(theKopeiki, 0, @kopSex))
else
money2str.append " " + ((if theKopeiki < 10 then "0" + theKopeiki else theKopeiki)) + " "
if theKopeiki >= MoneyToStr.NUM11 and theKopeiki <= MoneyToStr.NUM14
money2str.append @kopFiveUnit
else
switch parseInt(theKopeiki % MoneyToStr.NUM10)
when MoneyToStr.NUM1
money2str.append @kopOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @kopTwoUnit
else
money2str.append @kopFiveUnit
money2str.toString().trim()
MoneyToStr::triad2Word = (triad, triadNum, sex) ->
triadWord = new StringBuilder()
return "" if triad is 0
range = @check1(triad, triadWord)
if @language is Language.ENG and triadWord.length() > 0 and triad % MoneyToStr.NUM10 is 0
triadWord.deleteCharAt triadWord.length() - 1
triadWord.append " "
range10 = range
range = parseInt(triad % MoneyToStr.NUM10)
@check2 triadNum, sex, triadWord, triad, range10
switch triadNum
when MoneyToStr.NUM0, MoneyToStr.NUM1, MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
if range10 is MoneyToStr.NUM1
triadWord.append @messages["1000_10"][triadNum - 1] + " "
else
switch range
when MoneyToStr.NUM1
triadWord.append @messages["1000_1"][triadNum - 1] + " "
break
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
triadWord.append @messages["1000_234"][triadNum - 1] + " "
break
else
triadWord.append @messages["1000_5"][triadNum - 1] + " "
else
triadWord.append "??? "
triadWord.toString()
###*
@param triadNum the triad num
@param sex the sex
@param triadWord the triad word
@param triad the triad
@param range10 the range 10
###
MoneyToStr::check2 = (triadNum, sex, triadWord, triad, range10) ->
range = parseInt(triad % MoneyToStr.NUM10)
if range10 is 1
triadWord.append @messages["10_19"][range] + " "
else
switch range
when MoneyToStr.NUM1
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["1"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["1"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["1"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["1"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM2
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["2"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["2"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["2"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["2"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM3, MoneyToStr.NUM4, MoneyToStr.NUM5, MoneyToStr.NUM6, MoneyToStr.NUM7, MoneyToStr.NUM8, MoneyToStr.NUM9
triadWord.append [
""
""
""
].concat(@messages["3_9"])[range] + " "
else
return
###*
@param triad the triad
@param triadWord the triad word
@return the range
###
MoneyToStr::check1 = (triad, triadWord) ->
range = parseInt(triad / MoneyToStr.NUM100)
triadWord.append [""].concat(@messages["100_900"])[range]
range = parseInt((triad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
triadWord.append [
""
""
].concat(@messages["20_90"])[range]
range
MoneyToStr::getMessages = ->
@messages
MoneyToStr::getRubShortUnit = ->
@rubShortUnit
MoneyToStr
)()
module.exports =
MoneyToStr: MoneyToStr
Currency: Currency
Language: Language
Pennies: Pennies
| 90558 | #
# * $Id$
# *
# * Copyright 2014 <NAME>
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
#
currencyList = CurrencyList:
language:
"-value": "UKR"
UKR:
item: [
{
"-value": "0"
"-text": "нуль"
}
{
"-value": "1000_10"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "1000_1"
"-text": "тисяча,мільйон,мільярд,трильйон"
}
{
"-value": "1000_234"
"-text": "тисячі,мільйона,мільярда,трильйона"
}
{
"-value": "1000_5"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "10_19"
"-text": "десять,одинадцять,дванадцять,тринадцять,чотирнадцять,п’ятнадцять,шiстнадцять,сiмнадцять,вiсiмнадцять,дев'ятнадцять"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "дві,два,два,дві"
}
{
"-value": "3_9"
"-text": "три,чотири,п’ять,шість,сім,вісім,дев’ять"
}
{
"-value": "100_900"
"-text": "сто ,двісті ,триста ,чотириста ,п’ятсот ,шістсот ,сімсот ,вісімсот ,дев’ятсот "
}
{
"-value": "20_90"
"-text": "двадцять ,тридцять ,сорок ,п’ятдесят ,шістдесят ,сімдесят ,вісімдесят ,дев’яносто "
}
{
"-value": "pdv"
"-text": "в т.ч. ПДВ "
}
{
"-value": "pdv_value"
"-text": "20"
}
]
RUS:
item: [
{
"-value": "0"
"-text": "ноль"
}
{
"-value": "1000_10"
"-text": "тысяч,миллионов,миллиардов,триллионов"
}
{
"-value": "1000_1"
"-text": "тысяча,миллион,миллиард,триллион"
}
{
"-value": "1000_234"
"-text": "тысячи,миллиона,миллиарда,триллиона"
}
{
"-value": "1000_5"
"-text": "тысяч,миллионов,миллиардов,<NAME>"
}
{
"-value": "10_19"
"-text": "десять,одиннадцать,двенадцать,тринадцать,четырнадцать,пятнадцать,шестнадцать,семнадцать,восемнадцать,девятнадцать"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "две,два,два,две"
}
{
"-value": "3_9"
"-text": "три,четыре,пять,шесть,семь,восемь,девять"
}
{
"-value": "100_900"
"-text": "сто ,двести ,триста ,четыреста ,пятьсот ,шестьсот ,семьсот ,восемьсот ,девятьсот "
}
{
"-value": "20_90"
"-text": "двадцать ,тридцать ,сорок ,пятьдесят ,шестьдесят ,семьдесят ,восемьдесят ,девяносто "
}
{
"-value": "pdv"
"-text": "в т.ч. НДС "
}
{
"-value": "pdv_value"
"-text": "18"
}
]
ENG:
item: [
{
"-value": "0"
"-text": "zero"
}
{
"-value": "1000_10"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_1"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_234"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_5"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "10_19"
"-text": "ten,eleven,twelve,thirteen,fourteen,fifteen,sixteen,seventeen,eighteen,nineteen"
}
{
"-value": "1"
"-text": "one,one,one,one"
}
{
"-value": "2"
"-text": "two,two,two,two"
}
{
"-value": "3_9"
"-text": "three,four,five,six,seven,eight,nine"
}
{
"-value": "100_900"
"-text": "one hundred ,two hundred ,three hundred ,four hundred ,five hundred ,six hundred ,seven hundred ,eight hundred ,nine hundred "
}
{
"-value": "20_90"
"-text": "twenty-,thirty-,forty-,fifty-,sixty-,seventy-,eighty-,ninety-"
}
{
"-value": "pdv"
"-text": "including VAT "
}
{
"-value": "pdv_value"
"-text": "10"
}
]
RUR: [
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "RUS"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рубля"
"-RubFiveUnit": "рублей"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "UKR"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рублі"
"-RubFiveUnit": "рублів"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "ENG"
"-RubOneUnit": "ruble"
"-RubTwoUnit": "rubles"
"-RubFiveUnit": "rubles"
"-RubSex": "M"
"-RubShortUnit": "RUR."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
UAH: [
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "RUS"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривни"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "UKR"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривні"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "<NAME>краинскі гривні"
"-language": "ENG"
"-RubOneUnit": "hryvnia"
"-RubTwoUnit": "hryvnias"
"-RubFiveUnit": "hryvnias"
"-RubSex": "M"
"-RubShortUnit": "UAH."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
USD: [
{
"-CurrID": "840"
"-CurrName": "<NAME>"
"-language": "RUS"
"-RubOneUnit": "доллар"
"-RubTwoUnit": "доллара"
"-RubFiveUnit": "долларов"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центов"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "<NAME>"
"-language": "UKR"
"-RubOneUnit": "долар"
"-RubTwoUnit": "долара"
"-RubFiveUnit": "доларів"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центів"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "<NAME>"
"-language": "ENG"
"-RubOneUnit": "dollar"
"-RubTwoUnit": "dollars"
"-RubFiveUnit": "dollars"
"-RubSex": "M"
"-RubShortUnit": "USD."
"-KopOneUnit": "cent"
"-KopTwoUnit": "cents"
"-KopFiveUnit": "cents"
"-KopSex": "M"
}
]
PER10: [
{
"-CurrID": "556"
"-CurrName": "<NAME> з десятими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятая процента"
"-KopTwoUnit": "десятых процента"
"-KopFiveUnit": "десятых процента"
"-KopSex": "F"
}
{
"-CurrID": "556"
"-CurrName": "<NAME> з десятими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десята відсотка"
"-KopTwoUnit": "десятих відсотка"
"-KopFiveUnit": "десятих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "560"
"-CurrName": "<NAME> з десятими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "tenth of one percent"
"-KopTwoUnit": "tenth of one percent"
"-KopFiveUnit": "tenth of one percent"
"-KopSex": "F"
}
]
PER100: [
{
"-CurrID": "557"
"-CurrName": "Вiдсотки з сотими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "сотая процента"
"-KopTwoUnit": "сотых процента"
"-KopFiveUnit": "сотых процента"
"-KopSex": "F"
}
{
"-CurrID": "557"
"-CurrName": "<NAME>iдсотки з сотими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "сота відсотка"
"-KopTwoUnit": "сотих відсотка"
"-KopFiveUnit": "сотих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "561"
"-CurrName": "<NAME>дсотки з сотими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "hundred percent"
"-KopTwoUnit": "hundredth of percent"
"-KopFiveUnit": "hundredth of percent"
"-KopSex": "F"
}
]
PER1000: [
{
"-CurrID": "558"
"-CurrName": "<NAME>дсотки з тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "тысячная процента"
"-KopTwoUnit": "тысячных процента"
"-KopFiveUnit": "тысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "558"
"-CurrName": "<NAME> з тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "тисячна відсотка"
"-KopTwoUnit": "тисячних відсотка"
"-KopFiveUnit": "тисячних відсотка"
"-KopSex": "F"
}
{
"-CurrID": "562"
"-CurrName": "<NAME> з тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "thousandth of percent"
"-KopTwoUnit": "thousandths of percent"
"-KopFiveUnit": "thousandths of percent"
"-KopSex": "F"
}
]
PER10000: [
{
"-CurrID": "559"
"-CurrName": "<NAME> з десяти тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятитысячная процента"
"-KopTwoUnit": "десятитысячные процента"
"-KopFiveUnit": "десятитысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "559"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десятитисячна відсотка"
"-KopTwoUnit": "десятитисячних відсотка"
"-KopFiveUnit": "десятитисячних відсотка"
"-KopSex": "M"
}
{
"-CurrID": "563"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "ten percent"
"-KopTwoUnit": "ten-percent"
"-KopFiveUnit": "ten-percent"
"-KopSex": "F"
}
]
###*
Converts numbers to symbols.
@author <NAME>
@version $Revision$ $Date$
###
###*
Currency.
###
Currency = (->
Currency = ->
Currency.UAH = "UAH"
Currency.RUR = "RUR"
Currency.USD = "USD"
Currency.PER10 = "PER10"
Currency.PER100 = "PER100"
Currency.PER1000 = "PER1000"
Currency.PER10000 = "PER10000"
Currency
)()
###*
Language.
###
Language = (->
Language = ->
Language.RUS = "RUS"
Language.UKR = "UKR"
Language.ENG = "ENG"
Language
)()
###*
Pennies.
###
Pennies = (->
Pennies = ->
Pennies.NUMBER = "NUMBER"
Pennies.TEXT = "TEXT"
Pennies
)()
StringBuilder = (->
StringBuilder = ->
@_buffer = []
return
StringBuilder::append = (text) ->
@_buffer[@_buffer.length] = text
this
StringBuilder::insert = (index, text) ->
@_buffer.splice index, 0, text
this
StringBuilder::length = ->
@toString().length
StringBuilder::deleteCharAt = (index) ->
str = @toString()
@_buffer = []
@append str.substring(0, index)
this
StringBuilder::toString = ->
@_buffer.join ""
StringBuilder
)()
MoneyToStr = (->
MoneyToStr = (currency, language, pennies) ->
@currency = currency
@language = language
@pennies = pennies
languageElement = language
items = currencyList["CurrencyList"][languageElement]["item"]
@messages = {}
for index of items
languageItem = items[index]
@messages[languageItem["-value"]] = languageItem["-text"].split(",") if languageItem["-text"]
currencyItem = currencyList["CurrencyList"][currency]
theISOElement = null
for index of currencyItem
if currencyItem[index]["-language"] is language
theISOElement = currencyItem[index]
break
throw new Error("Currency not found " + currency) unless theISOElement?
@rubOneUnit = theISOElement["-RubOneUnit"]
@rubTwoUnit = theISOElement["-RubTwoUnit"]
@rubFiveUnit = theISOElement["-RubFiveUnit"]
@kopOneUnit = theISOElement["-KopOneUnit"]
@kopTwoUnit = theISOElement["-KopTwoUnit"]
@kopFiveUnit = theISOElement["-KopFiveUnit"]
@rubSex = theISOElement["-RubSex"]
@kopSex = theISOElement["-KopSex"]
@rubShortUnit = theISOElement["-RubShortUnit"]
return
MoneyToStr.NUM0 = 0
MoneyToStr.NUM1 = 1
MoneyToStr.NUM2 = 2
MoneyToStr.NUM3 = 3
MoneyToStr.NUM4 = 4
MoneyToStr.NUM5 = 5
MoneyToStr.NUM6 = 6
MoneyToStr.NUM7 = 7
MoneyToStr.NUM8 = 8
MoneyToStr.NUM9 = 9
MoneyToStr.NUM10 = 10
MoneyToStr.NUM11 = 11
MoneyToStr.NUM14 = 14
MoneyToStr.NUM100 = 100
MoneyToStr.NUM1000 = 1000
MoneyToStr.NUM10000 = 10000
MoneyToStr.INDEX_0 = 0
MoneyToStr.INDEX_1 = 1
MoneyToStr.INDEX_2 = 2
MoneyToStr.INDEX_3 = 3
MoneyToStr.percentToStr = (amount, lang) ->
throw new Error("amount is null") unless amount?
throw new Error("lang is null") unless lang?
intPart = parseInt(amount)
fractPart = 0
result = undefined
if amount is parseInt(amount)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(amount, 0)
else if (amount * MoneyToStr.NUM10).toFixed(4) is parseInt(amount * MoneyToStr.NUM10)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM100).toFixed(4) is parseInt(amount * MoneyToStr.NUM100)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM100)
result = new MoneyToStr(Currency.PER100, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM1000).toFixed(4) is parseInt(amount * MoneyToStr.NUM1000)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM1000)
result = new MoneyToStr(Currency.PER1000, lang, Pennies.TEXT).convert(intPart, fractPart)
else
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10000)
result = new MoneyToStr(Currency.PER10000, lang, Pennies.TEXT).convert(intPart, fractPart)
result
###*
Converts double value to the text description.
@param theMoney
the amount of money in format major.minor
@return the string description of money value
###
MoneyToStr::convertValue = (theMoney) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
intPart = parseInt(theMoney)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM100)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM1000) if @currency is Currency.PER1000
@convert intPart, fractPart
###*
Converts number to currency. Usage: var moneyToStr = new MoneyToStr(Currency.UAH, Language.UKR, Pennies.NUMBER);
var result = moneyToStr.convert(123, 0); Expected: result = сто двадцять три гривні 00 копійок
@param theMoney
the amount of money major currency
@param theKopeiki
the amount of money minor currency
@return the string description of money value
###
MoneyToStr::convert = (theMoney, theKopeiki) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
throw new Error("theKopeiki is null") if typeof theKopeiki is 'undefined' or not theKopeiki?
money2str = new StringBuilder()
triadNum = 0
theTriad = 0
intPart = theMoney
money2str.append @messages["0"][0] + " " if intPart is 0
loop
theTriad = parseInt(intPart % MoneyToStr.NUM1000)
money2str.insert 0, @triad2Word(theTriad, triadNum, @rubSex)
if triadNum is 0
range10 = parseInt((theTriad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
range = parseInt(theTriad % MoneyToStr.NUM10)
if range10 is MoneyToStr.NUM1
money2str.append @rubFiveUnit
else
switch range
when MoneyToStr.NUM1
money2str.append @rubOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @rubTwoUnit
else
money2str.append @rubFiveUnit
intPart = parseInt(intPart / MoneyToStr.NUM1000)
triadNum++
break unless intPart > 0
if @pennies is Pennies.TEXT
money2str.append((if @language is Language.ENG then " and " else " ")).append (if theKopeiki is 0 then @messages["0"][0] + " " else @triad2Word(theKopeiki, 0, @kopSex))
else
money2str.append " " + ((if theKopeiki < 10 then "0" + theKopeiki else theKopeiki)) + " "
if theKopeiki >= MoneyToStr.NUM11 and theKopeiki <= MoneyToStr.NUM14
money2str.append @kopFiveUnit
else
switch parseInt(theKopeiki % MoneyToStr.NUM10)
when MoneyToStr.NUM1
money2str.append @kopOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @kopTwoUnit
else
money2str.append @kopFiveUnit
money2str.toString().trim()
MoneyToStr::triad2Word = (triad, triadNum, sex) ->
triadWord = new StringBuilder()
return "" if triad is 0
range = @check1(triad, triadWord)
if @language is Language.ENG and triadWord.length() > 0 and triad % MoneyToStr.NUM10 is 0
triadWord.deleteCharAt triadWord.length() - 1
triadWord.append " "
range10 = range
range = parseInt(triad % MoneyToStr.NUM10)
@check2 triadNum, sex, triadWord, triad, range10
switch triadNum
when MoneyToStr.NUM0, MoneyToStr.NUM1, MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
if range10 is MoneyToStr.NUM1
triadWord.append @messages["1000_10"][triadNum - 1] + " "
else
switch range
when MoneyToStr.NUM1
triadWord.append @messages["1000_1"][triadNum - 1] + " "
break
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
triadWord.append @messages["1000_234"][triadNum - 1] + " "
break
else
triadWord.append @messages["1000_5"][triadNum - 1] + " "
else
triadWord.append "??? "
triadWord.toString()
###*
@param triadNum the triad num
@param sex the sex
@param triadWord the triad word
@param triad the triad
@param range10 the range 10
###
MoneyToStr::check2 = (triadNum, sex, triadWord, triad, range10) ->
range = parseInt(triad % MoneyToStr.NUM10)
if range10 is 1
triadWord.append @messages["10_19"][range] + " "
else
switch range
when MoneyToStr.NUM1
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["1"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["1"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["1"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["1"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM2
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["2"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["2"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["2"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["2"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM3, MoneyToStr.NUM4, MoneyToStr.NUM5, MoneyToStr.NUM6, MoneyToStr.NUM7, MoneyToStr.NUM8, MoneyToStr.NUM9
triadWord.append [
""
""
""
].concat(@messages["3_9"])[range] + " "
else
return
###*
@param triad the triad
@param triadWord the triad word
@return the range
###
MoneyToStr::check1 = (triad, triadWord) ->
range = parseInt(triad / MoneyToStr.NUM100)
triadWord.append [""].concat(@messages["100_900"])[range]
range = parseInt((triad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
triadWord.append [
""
""
].concat(@messages["20_90"])[range]
range
MoneyToStr::getMessages = ->
@messages
MoneyToStr::getRubShortUnit = ->
@rubShortUnit
MoneyToStr
)()
module.exports =
MoneyToStr: MoneyToStr
Currency: Currency
Language: Language
Pennies: Pennies
| true | #
# * $Id$
# *
# * Copyright 2014 PI:NAME:<NAME>END_PI
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
#
currencyList = CurrencyList:
language:
"-value": "UKR"
UKR:
item: [
{
"-value": "0"
"-text": "нуль"
}
{
"-value": "1000_10"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "1000_1"
"-text": "тисяча,мільйон,мільярд,трильйон"
}
{
"-value": "1000_234"
"-text": "тисячі,мільйона,мільярда,трильйона"
}
{
"-value": "1000_5"
"-text": "тисяч,мільйонів,мільярдів,трильйонів"
}
{
"-value": "10_19"
"-text": "десять,одинадцять,дванадцять,тринадцять,чотирнадцять,п’ятнадцять,шiстнадцять,сiмнадцять,вiсiмнадцять,дев'ятнадцять"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "дві,два,два,дві"
}
{
"-value": "3_9"
"-text": "три,чотири,п’ять,шість,сім,вісім,дев’ять"
}
{
"-value": "100_900"
"-text": "сто ,двісті ,триста ,чотириста ,п’ятсот ,шістсот ,сімсот ,вісімсот ,дев’ятсот "
}
{
"-value": "20_90"
"-text": "двадцять ,тридцять ,сорок ,п’ятдесят ,шістдесят ,сімдесят ,вісімдесят ,дев’яносто "
}
{
"-value": "pdv"
"-text": "в т.ч. ПДВ "
}
{
"-value": "pdv_value"
"-text": "20"
}
]
RUS:
item: [
{
"-value": "0"
"-text": "ноль"
}
{
"-value": "1000_10"
"-text": "тысяч,миллионов,миллиардов,триллионов"
}
{
"-value": "1000_1"
"-text": "тысяча,миллион,миллиард,триллион"
}
{
"-value": "1000_234"
"-text": "тысячи,миллиона,миллиарда,триллиона"
}
{
"-value": "1000_5"
"-text": "тысяч,миллионов,миллиардов,PI:NAME:<NAME>END_PI"
}
{
"-value": "10_19"
"-text": "десять,одиннадцать,двенадцать,тринадцать,четырнадцать,пятнадцать,шестнадцать,семнадцать,восемнадцать,девятнадцать"
}
{
"-value": "1"
"-text": "одна,один,один,одна"
}
{
"-value": "2"
"-text": "две,два,два,две"
}
{
"-value": "3_9"
"-text": "три,четыре,пять,шесть,семь,восемь,девять"
}
{
"-value": "100_900"
"-text": "сто ,двести ,триста ,четыреста ,пятьсот ,шестьсот ,семьсот ,восемьсот ,девятьсот "
}
{
"-value": "20_90"
"-text": "двадцать ,тридцать ,сорок ,пятьдесят ,шестьдесят ,семьдесят ,восемьдесят ,девяносто "
}
{
"-value": "pdv"
"-text": "в т.ч. НДС "
}
{
"-value": "pdv_value"
"-text": "18"
}
]
ENG:
item: [
{
"-value": "0"
"-text": "zero"
}
{
"-value": "1000_10"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_1"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_234"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "1000_5"
"-text": "thousand,million,billion,trillion"
}
{
"-value": "10_19"
"-text": "ten,eleven,twelve,thirteen,fourteen,fifteen,sixteen,seventeen,eighteen,nineteen"
}
{
"-value": "1"
"-text": "one,one,one,one"
}
{
"-value": "2"
"-text": "two,two,two,two"
}
{
"-value": "3_9"
"-text": "three,four,five,six,seven,eight,nine"
}
{
"-value": "100_900"
"-text": "one hundred ,two hundred ,three hundred ,four hundred ,five hundred ,six hundred ,seven hundred ,eight hundred ,nine hundred "
}
{
"-value": "20_90"
"-text": "twenty-,thirty-,forty-,fifty-,sixty-,seventy-,eighty-,ninety-"
}
{
"-value": "pdv"
"-text": "including VAT "
}
{
"-value": "pdv_value"
"-text": "10"
}
]
RUR: [
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "RUS"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рубля"
"-RubFiveUnit": "рублей"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "UKR"
"-RubOneUnit": "рубль"
"-RubTwoUnit": "рублі"
"-RubFiveUnit": "рублів"
"-RubSex": "M"
"-RubShortUnit": "руб."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "810"
"-CurrName": "Российские рубли"
"-language": "ENG"
"-RubOneUnit": "ruble"
"-RubTwoUnit": "rubles"
"-RubFiveUnit": "rubles"
"-RubSex": "M"
"-RubShortUnit": "RUR."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
UAH: [
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "RUS"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривни"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копейка"
"-KopTwoUnit": "копейки"
"-KopFiveUnit": "копеек"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "Украинскі гривні"
"-language": "UKR"
"-RubOneUnit": "гривня"
"-RubTwoUnit": "гривні"
"-RubFiveUnit": "гривень"
"-RubSex": "F"
"-RubShortUnit": "грн."
"-KopOneUnit": "копійка"
"-KopTwoUnit": "копійки"
"-KopFiveUnit": "копійок"
"-KopSex": "F"
}
{
"-CurrID": "980"
"-CurrName": "PI:NAME:<NAME>END_PIкраинскі гривні"
"-language": "ENG"
"-RubOneUnit": "hryvnia"
"-RubTwoUnit": "hryvnias"
"-RubFiveUnit": "hryvnias"
"-RubSex": "M"
"-RubShortUnit": "UAH."
"-KopOneUnit": "kopeck"
"-KopTwoUnit": "kopecks"
"-KopFiveUnit": "kopecks"
"-KopSex": "M"
}
]
USD: [
{
"-CurrID": "840"
"-CurrName": "PI:NAME:<NAME>END_PI"
"-language": "RUS"
"-RubOneUnit": "доллар"
"-RubTwoUnit": "доллара"
"-RubFiveUnit": "долларов"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центов"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "PI:NAME:<NAME>END_PI"
"-language": "UKR"
"-RubOneUnit": "долар"
"-RubTwoUnit": "долара"
"-RubFiveUnit": "доларів"
"-RubSex": "M"
"-RubShortUnit": "дол."
"-KopOneUnit": "цент"
"-KopTwoUnit": "цента"
"-KopFiveUnit": "центів"
"-KopSex": "M"
}
{
"-CurrID": "840"
"-CurrName": "PI:NAME:<NAME>END_PI"
"-language": "ENG"
"-RubOneUnit": "dollar"
"-RubTwoUnit": "dollars"
"-RubFiveUnit": "dollars"
"-RubSex": "M"
"-RubShortUnit": "USD."
"-KopOneUnit": "cent"
"-KopTwoUnit": "cents"
"-KopFiveUnit": "cents"
"-KopSex": "M"
}
]
PER10: [
{
"-CurrID": "556"
"-CurrName": "PI:NAME:<NAME>END_PI з десятими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятая процента"
"-KopTwoUnit": "десятых процента"
"-KopFiveUnit": "десятых процента"
"-KopSex": "F"
}
{
"-CurrID": "556"
"-CurrName": "PI:NAME:<NAME>END_PI з десятими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десята відсотка"
"-KopTwoUnit": "десятих відсотка"
"-KopFiveUnit": "десятих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "560"
"-CurrName": "PI:NAME:<NAME>END_PI з десятими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "tenth of one percent"
"-KopTwoUnit": "tenth of one percent"
"-KopFiveUnit": "tenth of one percent"
"-KopSex": "F"
}
]
PER100: [
{
"-CurrID": "557"
"-CurrName": "Вiдсотки з сотими частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "сотая процента"
"-KopTwoUnit": "сотых процента"
"-KopFiveUnit": "сотых процента"
"-KopSex": "F"
}
{
"-CurrID": "557"
"-CurrName": "PI:NAME:<NAME>END_PIiдсотки з сотими частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "сота відсотка"
"-KopTwoUnit": "сотих відсотка"
"-KopFiveUnit": "сотих відсотка"
"-KopSex": "F"
}
{
"-CurrID": "561"
"-CurrName": "PI:NAME:<NAME>END_PIдсотки з сотими частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "hundred percent"
"-KopTwoUnit": "hundredth of percent"
"-KopFiveUnit": "hundredth of percent"
"-KopSex": "F"
}
]
PER1000: [
{
"-CurrID": "558"
"-CurrName": "PI:NAME:<NAME>END_PIдсотки з тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "тысячная процента"
"-KopTwoUnit": "тысячных процента"
"-KopFiveUnit": "тысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "558"
"-CurrName": "PI:NAME:<NAME>END_PI з тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "тисячна відсотка"
"-KopTwoUnit": "тисячних відсотка"
"-KopFiveUnit": "тисячних відсотка"
"-KopSex": "F"
}
{
"-CurrID": "562"
"-CurrName": "PI:NAME:<NAME>END_PI з тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "thousandth of percent"
"-KopTwoUnit": "thousandths of percent"
"-KopFiveUnit": "thousandths of percent"
"-KopSex": "F"
}
]
PER10000: [
{
"-CurrID": "559"
"-CurrName": "PI:NAME:<NAME>END_PI з десяти тисячними частинами"
"-language": "RUS"
"-RubOneUnit": "целая,"
"-RubTwoUnit": "целых,"
"-RubFiveUnit": "целых,"
"-RubSex": "F"
"-KopOneUnit": "десятитысячная процента"
"-KopTwoUnit": "десятитысячные процента"
"-KopFiveUnit": "десятитысячных процента"
"-KopSex": "F"
}
{
"-CurrID": "559"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "UKR"
"-RubOneUnit": "ціла,"
"-RubTwoUnit": "цілих,"
"-RubFiveUnit": "цілих,"
"-RubSex": "F"
"-KopOneUnit": "десятитисячна відсотка"
"-KopTwoUnit": "десятитисячних відсотка"
"-KopFiveUnit": "десятитисячних відсотка"
"-KopSex": "M"
}
{
"-CurrID": "563"
"-CurrName": "Вiдсотки з десяти тисячними частинами"
"-language": "ENG"
"-RubOneUnit": ","
"-RubTwoUnit": "integers,"
"-RubFiveUnit": "integers,"
"-RubSex": "F"
"-KopOneUnit": "ten percent"
"-KopTwoUnit": "ten-percent"
"-KopFiveUnit": "ten-percent"
"-KopSex": "F"
}
]
###*
Converts numbers to symbols.
@author PI:NAME:<NAME>END_PI
@version $Revision$ $Date$
###
###*
Currency.
###
Currency = (->
Currency = ->
Currency.UAH = "UAH"
Currency.RUR = "RUR"
Currency.USD = "USD"
Currency.PER10 = "PER10"
Currency.PER100 = "PER100"
Currency.PER1000 = "PER1000"
Currency.PER10000 = "PER10000"
Currency
)()
###*
Language.
###
Language = (->
Language = ->
Language.RUS = "RUS"
Language.UKR = "UKR"
Language.ENG = "ENG"
Language
)()
###*
Pennies.
###
Pennies = (->
Pennies = ->
Pennies.NUMBER = "NUMBER"
Pennies.TEXT = "TEXT"
Pennies
)()
StringBuilder = (->
StringBuilder = ->
@_buffer = []
return
StringBuilder::append = (text) ->
@_buffer[@_buffer.length] = text
this
StringBuilder::insert = (index, text) ->
@_buffer.splice index, 0, text
this
StringBuilder::length = ->
@toString().length
StringBuilder::deleteCharAt = (index) ->
str = @toString()
@_buffer = []
@append str.substring(0, index)
this
StringBuilder::toString = ->
@_buffer.join ""
StringBuilder
)()
MoneyToStr = (->
MoneyToStr = (currency, language, pennies) ->
@currency = currency
@language = language
@pennies = pennies
languageElement = language
items = currencyList["CurrencyList"][languageElement]["item"]
@messages = {}
for index of items
languageItem = items[index]
@messages[languageItem["-value"]] = languageItem["-text"].split(",") if languageItem["-text"]
currencyItem = currencyList["CurrencyList"][currency]
theISOElement = null
for index of currencyItem
if currencyItem[index]["-language"] is language
theISOElement = currencyItem[index]
break
throw new Error("Currency not found " + currency) unless theISOElement?
@rubOneUnit = theISOElement["-RubOneUnit"]
@rubTwoUnit = theISOElement["-RubTwoUnit"]
@rubFiveUnit = theISOElement["-RubFiveUnit"]
@kopOneUnit = theISOElement["-KopOneUnit"]
@kopTwoUnit = theISOElement["-KopTwoUnit"]
@kopFiveUnit = theISOElement["-KopFiveUnit"]
@rubSex = theISOElement["-RubSex"]
@kopSex = theISOElement["-KopSex"]
@rubShortUnit = theISOElement["-RubShortUnit"]
return
MoneyToStr.NUM0 = 0
MoneyToStr.NUM1 = 1
MoneyToStr.NUM2 = 2
MoneyToStr.NUM3 = 3
MoneyToStr.NUM4 = 4
MoneyToStr.NUM5 = 5
MoneyToStr.NUM6 = 6
MoneyToStr.NUM7 = 7
MoneyToStr.NUM8 = 8
MoneyToStr.NUM9 = 9
MoneyToStr.NUM10 = 10
MoneyToStr.NUM11 = 11
MoneyToStr.NUM14 = 14
MoneyToStr.NUM100 = 100
MoneyToStr.NUM1000 = 1000
MoneyToStr.NUM10000 = 10000
MoneyToStr.INDEX_0 = 0
MoneyToStr.INDEX_1 = 1
MoneyToStr.INDEX_2 = 2
MoneyToStr.INDEX_3 = 3
MoneyToStr.percentToStr = (amount, lang) ->
throw new Error("amount is null") unless amount?
throw new Error("lang is null") unless lang?
intPart = parseInt(amount)
fractPart = 0
result = undefined
if amount is parseInt(amount)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(amount, 0)
else if (amount * MoneyToStr.NUM10).toFixed(4) is parseInt(amount * MoneyToStr.NUM10)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10)
result = new MoneyToStr(Currency.PER10, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM100).toFixed(4) is parseInt(amount * MoneyToStr.NUM100)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM100)
result = new MoneyToStr(Currency.PER100, lang, Pennies.TEXT).convert(intPart, fractPart)
else if (amount * MoneyToStr.NUM1000).toFixed(4) is parseInt(amount * MoneyToStr.NUM1000)
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM1000)
result = new MoneyToStr(Currency.PER1000, lang, Pennies.TEXT).convert(intPart, fractPart)
else
fractPart = Math.round((amount - intPart) * MoneyToStr.NUM10000)
result = new MoneyToStr(Currency.PER10000, lang, Pennies.TEXT).convert(intPart, fractPart)
result
###*
Converts double value to the text description.
@param theMoney
the amount of money in format major.minor
@return the string description of money value
###
MoneyToStr::convertValue = (theMoney) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
intPart = parseInt(theMoney)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM100)
fractPart = Math.round((theMoney - intPart) * MoneyToStr.NUM1000) if @currency is Currency.PER1000
@convert intPart, fractPart
###*
Converts number to currency. Usage: var moneyToStr = new MoneyToStr(Currency.UAH, Language.UKR, Pennies.NUMBER);
var result = moneyToStr.convert(123, 0); Expected: result = сто двадцять три гривні 00 копійок
@param theMoney
the amount of money major currency
@param theKopeiki
the amount of money minor currency
@return the string description of money value
###
MoneyToStr::convert = (theMoney, theKopeiki) ->
throw new Error("theMoney is null") if typeof theMoney is 'undefined' or not theMoney?
throw new Error("theKopeiki is null") if typeof theKopeiki is 'undefined' or not theKopeiki?
money2str = new StringBuilder()
triadNum = 0
theTriad = 0
intPart = theMoney
money2str.append @messages["0"][0] + " " if intPart is 0
loop
theTriad = parseInt(intPart % MoneyToStr.NUM1000)
money2str.insert 0, @triad2Word(theTriad, triadNum, @rubSex)
if triadNum is 0
range10 = parseInt((theTriad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
range = parseInt(theTriad % MoneyToStr.NUM10)
if range10 is MoneyToStr.NUM1
money2str.append @rubFiveUnit
else
switch range
when MoneyToStr.NUM1
money2str.append @rubOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @rubTwoUnit
else
money2str.append @rubFiveUnit
intPart = parseInt(intPart / MoneyToStr.NUM1000)
triadNum++
break unless intPart > 0
if @pennies is Pennies.TEXT
money2str.append((if @language is Language.ENG then " and " else " ")).append (if theKopeiki is 0 then @messages["0"][0] + " " else @triad2Word(theKopeiki, 0, @kopSex))
else
money2str.append " " + ((if theKopeiki < 10 then "0" + theKopeiki else theKopeiki)) + " "
if theKopeiki >= MoneyToStr.NUM11 and theKopeiki <= MoneyToStr.NUM14
money2str.append @kopFiveUnit
else
switch parseInt(theKopeiki % MoneyToStr.NUM10)
when MoneyToStr.NUM1
money2str.append @kopOneUnit
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
money2str.append @kopTwoUnit
else
money2str.append @kopFiveUnit
money2str.toString().trim()
MoneyToStr::triad2Word = (triad, triadNum, sex) ->
triadWord = new StringBuilder()
return "" if triad is 0
range = @check1(triad, triadWord)
if @language is Language.ENG and triadWord.length() > 0 and triad % MoneyToStr.NUM10 is 0
triadWord.deleteCharAt triadWord.length() - 1
triadWord.append " "
range10 = range
range = parseInt(triad % MoneyToStr.NUM10)
@check2 triadNum, sex, triadWord, triad, range10
switch triadNum
when MoneyToStr.NUM0, MoneyToStr.NUM1, MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
if range10 is MoneyToStr.NUM1
triadWord.append @messages["1000_10"][triadNum - 1] + " "
else
switch range
when MoneyToStr.NUM1
triadWord.append @messages["1000_1"][triadNum - 1] + " "
break
when MoneyToStr.NUM2, MoneyToStr.NUM3, MoneyToStr.NUM4
triadWord.append @messages["1000_234"][triadNum - 1] + " "
break
else
triadWord.append @messages["1000_5"][triadNum - 1] + " "
else
triadWord.append "??? "
triadWord.toString()
###*
@param triadNum the triad num
@param sex the sex
@param triadWord the triad word
@param triad the triad
@param range10 the range 10
###
MoneyToStr::check2 = (triadNum, sex, triadWord, triad, range10) ->
range = parseInt(triad % MoneyToStr.NUM10)
if range10 is 1
triadWord.append @messages["10_19"][range] + " "
else
switch range
when MoneyToStr.NUM1
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["1"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["1"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["1"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["1"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM2
if triadNum is MoneyToStr.NUM1
triadWord.append @messages["2"][MoneyToStr.INDEX_0] + " "
else if triadNum is MoneyToStr.NUM2 or triadNum is MoneyToStr.NUM3 or triadNum is MoneyToStr.NUM4
triadWord.append @messages["2"][MoneyToStr.INDEX_1] + " "
else if "M" is sex
triadWord.append @messages["2"][MoneyToStr.INDEX_2] + " "
else triadWord.append @messages["2"][MoneyToStr.INDEX_3] + " " if "F" is sex
when MoneyToStr.NUM3, MoneyToStr.NUM4, MoneyToStr.NUM5, MoneyToStr.NUM6, MoneyToStr.NUM7, MoneyToStr.NUM8, MoneyToStr.NUM9
triadWord.append [
""
""
""
].concat(@messages["3_9"])[range] + " "
else
return
###*
@param triad the triad
@param triadWord the triad word
@return the range
###
MoneyToStr::check1 = (triad, triadWord) ->
range = parseInt(triad / MoneyToStr.NUM100)
triadWord.append [""].concat(@messages["100_900"])[range]
range = parseInt((triad % MoneyToStr.NUM100) / MoneyToStr.NUM10)
triadWord.append [
""
""
].concat(@messages["20_90"])[range]
range
MoneyToStr::getMessages = ->
@messages
MoneyToStr::getRubShortUnit = ->
@rubShortUnit
MoneyToStr
)()
module.exports =
MoneyToStr: MoneyToStr
Currency: Currency
Language: Language
Pennies: Pennies
|
[
{
"context": "to#\",\n \"body\": \"<a id=\\\"$1\\\" href=\\\"mailto:${2:joe@example.com}?subject=${3:feedback}\\\">${4:email me}</a>$0\"\n ,",
"end": 11695,
"score": 0.9997244477272034,
"start": 11680,
"tag": "EMAIL",
"value": "joe@example.com"
},
{
"context": ".\",\n \"body\"... | snippets/html-id-class-snippets.cson | Seldonite/html-id-class-snippets | 3 | ".text.html":
"Anchor#":
"prefix": "a#",
"body": "<a id=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Anchor.":
"prefix": "a.",
"body": "<a class=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Abbreviation#":
"prefix": "abbr#",
"body": "<abbr id=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Abbreviation.":
"prefix": "abbr.",
"body": "<abbr class=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Area#":
"prefix": "area#",
"body": "<area id=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Area.":
"prefix": "area.",
"body": "<area class=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Article#":
"prefix": "article#",
"body": "<article id=\"$1\">\n\t$2\n</article>"
,
"Aside#":
"prefix": "aside#",
"body": "<aside id=\"$1\">\n\t$2\n</aside>"
,
"Aside.":
"prefix": "aside.",
"body": "<aside class=\"$1\">\n\t$2\n</aside>"
,
"Audio#":
"prefix": "audio#",
"body": "<audio id=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Audio.":
"prefix": "audio.",
"body": "<audio class=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Bold#":
"prefix": "b#",
"body": "<b id=\"$1\">$2</b>$0"
,
"Bold.":
"prefix": "b.",
"body": "<b class=\"$1\">$2</b>$0"
,
"Base#":
"prefix": "base#",
"body": "<base id=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Base.":
"prefix": "base.",
"body": "<base class=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Bi-Directional Isolation#":
"prefix": "bdi#",
"body": "<bdi id=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Isolation.":
"prefix": "bdi.",
"body": "<bdi class=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Override#":
"prefix": "bdo#",
"body": "<bdo id=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Bi-Directional Override.":
"prefix": "bdo.",
"body": "<bdo class=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Blockquote#":
"prefix": "blockquote#",
"body": "<blockquote id=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Blockquote.":
"prefix": "blockquote.",
"body": "<blockquote class=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Line Breaker#":
"prefix": "br#",
"body": "<br id=\"$1\">$0"
,
"Line Breaker.":
"prefix": "br.",
"body": "<br class=\"$1\">$0"
,
"Button#":
"prefix": "button#",
"body": "<button id=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Button.":
"prefix": "button.",
"body": "<button class=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Caption#":
"prefix": "caption#",
"body": "<caption id=\"$1\">$2</caption>$0"
,
"Caption.":
"prefix": "caption.",
"body": "<caption class=\"$1\">$2</caption>$0"
,
"Citation#":
"prefix": "cite#",
"body": "<cite id=\"$1\">$2</cite>$0"
,
"Citation.":
"prefix": "cite.",
"body": "<cite class=\"$1\">$2</cite>$0"
,
"Code#":
"prefix": "code#",
"body": "<code id=\"$1\">$2</code>$0"
,
"Code.":
"prefix": "code.",
"body": "<code class=\"$1\">$2</code>$0"
,
"Column#":
"prefix": "col#",
"body": "<col id=\"$1\">$2</col>$0"
,
"Column.":
"prefix": "col.",
"body": "<col class=\"$1\">$2</col>$0"
,
"Column Group#":
"prefix": "colgroup#",
"body": "<colgroup id=\"$1\">$2</colgroup>$0"
,
"Column Group.":
"prefix": "colgroup.",
"body": "<colgroup class=\"$1\">$2</colgroup>$0"
,
"Content#":
"prefix": "content#",
"body": "<content id=\"$1\" select=\"$2\">$3</content>$0"
,
"Content.":
"prefix": "content.",
"body": "<content class=\"$1\" select=\"$2\">$3</content>$0"
,
"Data#":
"prefix": "data#",
"body": "<data id=\"$1\" value=\"$2\">$3</data>$0"
,
"Data.":
"prefix": "data.",
"body": "<data class=\"$1\" value=\"$2\">$3</data>$0"
,
"Description#":
"prefix": "dd#",
"body": "<dd id=\"$1\">$2</dd>$0"
,
"Description.":
"prefix": "dd.",
"body": "<dd class=\"$1\">$2</dd>$0"
,
"Deleted Text#":
"prefix": "del#",
"body": "<del id=\"$1\">$2</del>$0"
,
"Deleted Text.":
"prefix": "del.",
"body": "<del class=\"$1\">$2</del>$0"
,
"Details#":
"prefix": "details#",
"body": "<details id=\"$1\" ${2:open}>$3</details>$0"
,
"Details.":
"prefix": "details.",
"body": "<details class=\"$1\" ${2:open}>$3</details>$0"
,
"Definition#":
"prefix": "dfn#",
"body": "<dfn id=\"$1\">$2</dfn>$0"
,
"Definition.":
"prefix": "dfn.",
"body": "<dfn class=\"$1\">$2</dfn>$0"
,
"Definition Term#":
"prefix": "dt#",
"body": "<dt id=\"$1\">$2</dt>$0"
,
"Definition Term.":
"prefix": "dt.",
"body": "<dt class=\"$1\">$2</dt>$0"
,
"Div#":
"prefix": "div#",
"body": "<div id=\"$1\">\n\t$2\n</div>$0"
,
"Div,":
"prefix": "div.",
"body": "<div class=\"$1\">\n\t$2\n</div>$0"
,
"Emphasis#":
"prefix": "em#",
"body": "<em id=\"$1\">$2</em>$0"
,
"Embed#":
"prefix": "embed#",
"body": "<embed id=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Embed.":
"prefix": "embed.",
"body": "<embed class=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Fieldset#":
"prefix": "fieldset#",
"body": "<fieldset id=\"$1\">$2</fieldset>$0"
,
"Fieldset.":
"prefix": "fieldset.",
"body": "<fieldset class=\"$1\">$2</fieldset>$0"
,
"Figure Caption#":
"prefix": "figcaption#",
"body": "<figcaption id=\"$1\">$2</figcaption>$0"
,
"Figure Caption.":
"prefix": "figcaption#",
"body": "<figcaption class=\"$1\">$2</figcaption>$0"
,
"Figure#":
"prefix": "figure#",
"body": "<figure id=\"$1\">$2</figure>$0"
,
"Figure.":
"prefix": "figure.",
"body": "<figure class=\"$1\">$2</figure>$0"
,
"Footer#":
"prefix": "footer#",
"body": "<footer id=\"$1\">$2</footer>$0"
,
"Footer.":
"prefix": "footer.",
"body": "<footer class=\"$1\">$2</footer>$0"
,
"Form#":
"prefix": "form#",
"body": "<form id=\"${1:form_id}\" action=\"${2:index.html}\" method=\"${3:post}\">\n\t$4\n</form>"
,
"Form.":
"prefix": "form.",
"body": "<form id=\"${1:form_id}\" class=\"$2\" action=\"${3:index.html}\" method=\"${4:post}\">\n\t$5\n</form>"
,
"Heading 1#":
"prefix": "h1#",
"body": "<h1 id=\"$1\">$2</h1>$0"
,
"Heading 1.":
"prefix": "h1.",
"body": "<h1 class=\"$1\">$2</h1>$0"
,
"Heading 2#":
"prefix": "h2#",
"body": "<h2 id=\"$1\">$2</h2>$0"
,
"Heading 2.":
"prefix": "h2.",
"body": "<h2 class=\"$1\">$2</h2>$0"
,
"Heading 3#":
"prefix": "h3#",
"body": "<h3 id=\"$1\">$2</h3>$0"
,
"Heading 3.":
"prefix": "h3.",
"body": "<h3 class=\"$1\">$2</h3>$0"
,
"Heading 4#":
"prefix": "h4#",
"body": "<h4 id=\"$1\">$2</h4>$0"
,
"Heading 4.":
"prefix": "h4.",
"body": "<h4 class=\"$1\">$2</h4>$0"
,
"Heading 5#":
"prefix": "h5#",
"body": "<h5 id=\"$1\">$2</h5>$0"
,
"Heading 5.":
"prefix": "h5.",
"body": "<h5 class=\"$1\">$2</h5>$0"
,
"Heading 6#":
"prefix": "h6#",
"body": "<h6 id=\"$1\">$2</h6>$0"
,
"Heading 6.":
"prefix": "h6.",
"body": "<h6 class=\"$1\">$2</h6>$0"
,
"Head#":
"prefix": "head#",
"body": "<head id=\"$1\">\n\t$2\n</head>"
,
"Head.":
"prefix": "head.",
"body": "<head class=\"$1\">\n\t$2\n</head>"
,
"Header#":
"prefix": "header#",
"body": "<header id=\"$1\">\n\t$2\n</header>"
,
"Header.":
"prefix": "header.",
"body": "<header class=\"$1\">\n\t$2\n</header>"
,
"Horizontal Rule#":
"prefix": "hr#",
"body": "<hr id=\"$1\">$0"
,
"Horizontal Rule.":
"prefix": "hr.",
"body": "<hr class=\"$1\">$0"
,
"Italic#":
"prefix": "i#",
"body": "<i id=\"$1\">$2</i>$0"
,
"Italic.":
"prefix": "i.",
"body": "<i class=\"$1\">$2</i>$0"
,
"Inline Frame#":
"prefix": "iframe#",
"body": "<iframe id=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Inline Frame.":
"prefix": "iframe.",
"body": "<iframe class=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Input#":
"prefix": "input#",
"body": "<input id=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Input.":
"prefix": "input.",
"body": "<input class=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Image#":
"prefix": "img#",
"body": "<img id=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Image.":
"prefix": "img.",
"body": "<img class=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Inserted Text#":
"prefix": "ins#",
"body": "<ins id=\"$1\">$2</ins>$0"
,
"Inserted Text.":
"prefix": "ins.",
"body": "<ins class=\"$1\">$2</ins>$0"
,
"Keyboard Input#":
"prefix": "kbd#",
"body": "<kbd id=\"$1\">$2</kbd>$0"
,
"Keyboard Input.":
"prefix": "kbd.",
"body": "<kbd class=\"$1\">$2</kbd>$0"
,
"Keygen#":
"prefix": "keygen#",
"body": "<keygen id=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Keygen.":
"prefix": "keygen.",
"body": "<keygen class=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Label#":
"prefix": "label#",
"body": "<label id=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Label.":
"prefix": "label.",
"body": "<label class=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Legend#":
"prefix": "legend#",
"body": "<legend id=\"$1\">$2</legend>$0"
,
"Legend.":
"prefix": "legend.",
"body": "<legend class=\"$1\">$2</legend>$0"
,
"List Item#":
"prefix": "li#",
"body": "<li id=\"$1\">$2</li>$0"
,
"List Item.":
"prefix": "li.",
"body": "<li class=\"$1\">$2</li>$0"
,
"Link#":
"prefix": "link#",
"body": "<link id=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Link.":
"prefix": "link.",
"body": "<link class=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Main#":
"prefix": "main#",
"body": "<main id=\"$1\">\n\t$2\n</main>"
,
"Main.":
"prefix": "main.",
"body": "<main class=\"$1\">\n\t$2\n</main>"
,
"Map#":
"prefix": "map#",
"body": "<map id=\"$1\">\n\t$2\n</map>"
,
"Map.":
"prefix": "map.",
"body": "<map class=\"$1\">\n\t$2\n</map>"
,
"Mark#":
"prefix": "mark#",
"body": "<mark id=\"$1\">$2</mark>$0"
,
"Mark.":
"prefix": "mark.",
"body": "<mark class=\"$1\">$2</mark>$0"
,
"Menu#":
"prefix": "menu#",
"body": "<menu id=\"$1\">\n\t$2\n</menu>"
,
"Menu.":
"prefix": "menu.",
"body": "<menu class=\"$1\">\n\t$2\n</menu>"
,
"Menu Item#":
"prefix": "menuitem#",
"body": "<menuitem id=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Menu Item,":
"prefix": "menuitem.",
"body": "<menuitem class=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Meter#":
"prefix": "meter#",
"body": "<meter id=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Meter.":
"prefix": "meter.",
"body": "<meter class=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Mail Anchor#":
"prefix": "mailto#",
"body": "<a id=\"$1\" href=\"mailto:${2:joe@example.com}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Mail Anchor.":
"prefix": "mailto.",
"body": "<a class\"$1\" href=\"mailto:${2:joe@example.com}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Meta#":
"prefix": "meta#",
"body": "<meta id=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Meta.":
"prefix": "meta.",
"body": "<meta class=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Navigation#":
"prefix": "nav#",
"body": "<nav id=\"$1\">\n\t$2\n</nav>"
,
"Navigation.":
"prefix": "nav.",
"body": "<nav class=\"$1\">\n\t$2\n</nav>"
,
"Noscript#":
"prefix": "noscript#",
"body": "<noscript id=\"$1\">\n\t$2\n</noscript>"
,
"Noscript.":
"prefix": "noscript.",
"body": "<noscript class=\"$1\">\n\t$2\n</noscript>"
,
"Object#":
"prefix": "object#",
"body": "<object id=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Object.":
"prefix": "object.",
"body": "<object class=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Ordered List#":
"prefix": "ol#",
"body": "<ol id=\"$1\">\n\t$2\n</ol>"
,
"Ordered List.":
"prefix": "ol.",
"body": "<ol class=\"$1\">\n\t$2\n</ol>"
,
"Option Group#":
"prefix": "optgroup#",
"body": "<optgroup id=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option Group.":
"prefix": "optgroup.",
"body": "<optgroup class=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option#":
"prefix": "opt#",
"body": "<option id=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Option.":
"prefix": "opt.",
"body": "<option class=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Output#":
"prefix": "output#",
"body": "<output id=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Output.":
"prefix": "output.",
"body": "<output class=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Paragraph#":
"prefix": "p#",
"body": "<p id=\"$1\">\n\t$2\n</p>"
,
"Paragraph.":
"prefix": "p.",
"body": "<p class=\"$1\">\n\t$2\n</p>"
,
"Section#":
"prefix": "section#",
"body": "<section id=\"$1\">\n\t$2\n</section>"
,
"Section.":
"prefix": "section.",
"body": "<section class=\"$1\">\n\t$2\n</section>"
,
"Small#":
"prefix": "small#",
"body": "<small id=\"$1\">$2</small>$0"
,
"Small.":
"prefix": "small.",
"body": "<small class=\"$1\">$2</small>$0"
,
"Span#":
"prefix": "span#",
"body": "<span id=\"$1\">$2</span>$0"
,
"Span.":
"prefix": "span.",
"body": "<span class=\"$1\">$2</span>$0"
,
"Table#":
"prefix": "table#",
"body": "<table id=\"$1\">\n\t$2\n</table>"
,
"Table.":
"prefix": "table.",
"body": "<table class=\"$1\">\n\t$2\n</table>"
,
"Table Cell#":
"prefix": "td#",
"body": "<td id=\"$1\">\n\t$2\n</td>"
,
"Table Cell.":
"prefix": "td.",
"body": "<td class=\"$1\">\n\t$2\n</td>"
,
"Table Header Cell#":
"prefix": "th#",
"body": "<th id=\"$1\">\n\t$2\n</th>"
,
"Table Header Cell.":
"prefix": "th.",
"body": "<th class=\"$1\">\n\t$2\n</th>"
,
"Table Row#":
"prefix": "tr#",
"body": "<tr id=\"$1\">\n\t$2\n</tr>"
,
"Table Row.":
"prefix": "tr.",
"body": "<tr class=\"$1\">\n\t$2\n</tr>"
,
"Unordered List#":
"prefix": "ul#",
"body": "<ul id=\"$1\">\n\t$2\n</ul>"
,
"Unordered List.":
"prefix": "ul.",
"body": "<ul class=\"$1\">\n\t$2\n</ul>"
,
| 41455 | ".text.html":
"Anchor#":
"prefix": "a#",
"body": "<a id=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Anchor.":
"prefix": "a.",
"body": "<a class=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Abbreviation#":
"prefix": "abbr#",
"body": "<abbr id=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Abbreviation.":
"prefix": "abbr.",
"body": "<abbr class=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Area#":
"prefix": "area#",
"body": "<area id=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Area.":
"prefix": "area.",
"body": "<area class=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Article#":
"prefix": "article#",
"body": "<article id=\"$1\">\n\t$2\n</article>"
,
"Aside#":
"prefix": "aside#",
"body": "<aside id=\"$1\">\n\t$2\n</aside>"
,
"Aside.":
"prefix": "aside.",
"body": "<aside class=\"$1\">\n\t$2\n</aside>"
,
"Audio#":
"prefix": "audio#",
"body": "<audio id=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Audio.":
"prefix": "audio.",
"body": "<audio class=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Bold#":
"prefix": "b#",
"body": "<b id=\"$1\">$2</b>$0"
,
"Bold.":
"prefix": "b.",
"body": "<b class=\"$1\">$2</b>$0"
,
"Base#":
"prefix": "base#",
"body": "<base id=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Base.":
"prefix": "base.",
"body": "<base class=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Bi-Directional Isolation#":
"prefix": "bdi#",
"body": "<bdi id=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Isolation.":
"prefix": "bdi.",
"body": "<bdi class=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Override#":
"prefix": "bdo#",
"body": "<bdo id=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Bi-Directional Override.":
"prefix": "bdo.",
"body": "<bdo class=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Blockquote#":
"prefix": "blockquote#",
"body": "<blockquote id=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Blockquote.":
"prefix": "blockquote.",
"body": "<blockquote class=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Line Breaker#":
"prefix": "br#",
"body": "<br id=\"$1\">$0"
,
"Line Breaker.":
"prefix": "br.",
"body": "<br class=\"$1\">$0"
,
"Button#":
"prefix": "button#",
"body": "<button id=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Button.":
"prefix": "button.",
"body": "<button class=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Caption#":
"prefix": "caption#",
"body": "<caption id=\"$1\">$2</caption>$0"
,
"Caption.":
"prefix": "caption.",
"body": "<caption class=\"$1\">$2</caption>$0"
,
"Citation#":
"prefix": "cite#",
"body": "<cite id=\"$1\">$2</cite>$0"
,
"Citation.":
"prefix": "cite.",
"body": "<cite class=\"$1\">$2</cite>$0"
,
"Code#":
"prefix": "code#",
"body": "<code id=\"$1\">$2</code>$0"
,
"Code.":
"prefix": "code.",
"body": "<code class=\"$1\">$2</code>$0"
,
"Column#":
"prefix": "col#",
"body": "<col id=\"$1\">$2</col>$0"
,
"Column.":
"prefix": "col.",
"body": "<col class=\"$1\">$2</col>$0"
,
"Column Group#":
"prefix": "colgroup#",
"body": "<colgroup id=\"$1\">$2</colgroup>$0"
,
"Column Group.":
"prefix": "colgroup.",
"body": "<colgroup class=\"$1\">$2</colgroup>$0"
,
"Content#":
"prefix": "content#",
"body": "<content id=\"$1\" select=\"$2\">$3</content>$0"
,
"Content.":
"prefix": "content.",
"body": "<content class=\"$1\" select=\"$2\">$3</content>$0"
,
"Data#":
"prefix": "data#",
"body": "<data id=\"$1\" value=\"$2\">$3</data>$0"
,
"Data.":
"prefix": "data.",
"body": "<data class=\"$1\" value=\"$2\">$3</data>$0"
,
"Description#":
"prefix": "dd#",
"body": "<dd id=\"$1\">$2</dd>$0"
,
"Description.":
"prefix": "dd.",
"body": "<dd class=\"$1\">$2</dd>$0"
,
"Deleted Text#":
"prefix": "del#",
"body": "<del id=\"$1\">$2</del>$0"
,
"Deleted Text.":
"prefix": "del.",
"body": "<del class=\"$1\">$2</del>$0"
,
"Details#":
"prefix": "details#",
"body": "<details id=\"$1\" ${2:open}>$3</details>$0"
,
"Details.":
"prefix": "details.",
"body": "<details class=\"$1\" ${2:open}>$3</details>$0"
,
"Definition#":
"prefix": "dfn#",
"body": "<dfn id=\"$1\">$2</dfn>$0"
,
"Definition.":
"prefix": "dfn.",
"body": "<dfn class=\"$1\">$2</dfn>$0"
,
"Definition Term#":
"prefix": "dt#",
"body": "<dt id=\"$1\">$2</dt>$0"
,
"Definition Term.":
"prefix": "dt.",
"body": "<dt class=\"$1\">$2</dt>$0"
,
"Div#":
"prefix": "div#",
"body": "<div id=\"$1\">\n\t$2\n</div>$0"
,
"Div,":
"prefix": "div.",
"body": "<div class=\"$1\">\n\t$2\n</div>$0"
,
"Emphasis#":
"prefix": "em#",
"body": "<em id=\"$1\">$2</em>$0"
,
"Embed#":
"prefix": "embed#",
"body": "<embed id=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Embed.":
"prefix": "embed.",
"body": "<embed class=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Fieldset#":
"prefix": "fieldset#",
"body": "<fieldset id=\"$1\">$2</fieldset>$0"
,
"Fieldset.":
"prefix": "fieldset.",
"body": "<fieldset class=\"$1\">$2</fieldset>$0"
,
"Figure Caption#":
"prefix": "figcaption#",
"body": "<figcaption id=\"$1\">$2</figcaption>$0"
,
"Figure Caption.":
"prefix": "figcaption#",
"body": "<figcaption class=\"$1\">$2</figcaption>$0"
,
"Figure#":
"prefix": "figure#",
"body": "<figure id=\"$1\">$2</figure>$0"
,
"Figure.":
"prefix": "figure.",
"body": "<figure class=\"$1\">$2</figure>$0"
,
"Footer#":
"prefix": "footer#",
"body": "<footer id=\"$1\">$2</footer>$0"
,
"Footer.":
"prefix": "footer.",
"body": "<footer class=\"$1\">$2</footer>$0"
,
"Form#":
"prefix": "form#",
"body": "<form id=\"${1:form_id}\" action=\"${2:index.html}\" method=\"${3:post}\">\n\t$4\n</form>"
,
"Form.":
"prefix": "form.",
"body": "<form id=\"${1:form_id}\" class=\"$2\" action=\"${3:index.html}\" method=\"${4:post}\">\n\t$5\n</form>"
,
"Heading 1#":
"prefix": "h1#",
"body": "<h1 id=\"$1\">$2</h1>$0"
,
"Heading 1.":
"prefix": "h1.",
"body": "<h1 class=\"$1\">$2</h1>$0"
,
"Heading 2#":
"prefix": "h2#",
"body": "<h2 id=\"$1\">$2</h2>$0"
,
"Heading 2.":
"prefix": "h2.",
"body": "<h2 class=\"$1\">$2</h2>$0"
,
"Heading 3#":
"prefix": "h3#",
"body": "<h3 id=\"$1\">$2</h3>$0"
,
"Heading 3.":
"prefix": "h3.",
"body": "<h3 class=\"$1\">$2</h3>$0"
,
"Heading 4#":
"prefix": "h4#",
"body": "<h4 id=\"$1\">$2</h4>$0"
,
"Heading 4.":
"prefix": "h4.",
"body": "<h4 class=\"$1\">$2</h4>$0"
,
"Heading 5#":
"prefix": "h5#",
"body": "<h5 id=\"$1\">$2</h5>$0"
,
"Heading 5.":
"prefix": "h5.",
"body": "<h5 class=\"$1\">$2</h5>$0"
,
"Heading 6#":
"prefix": "h6#",
"body": "<h6 id=\"$1\">$2</h6>$0"
,
"Heading 6.":
"prefix": "h6.",
"body": "<h6 class=\"$1\">$2</h6>$0"
,
"Head#":
"prefix": "head#",
"body": "<head id=\"$1\">\n\t$2\n</head>"
,
"Head.":
"prefix": "head.",
"body": "<head class=\"$1\">\n\t$2\n</head>"
,
"Header#":
"prefix": "header#",
"body": "<header id=\"$1\">\n\t$2\n</header>"
,
"Header.":
"prefix": "header.",
"body": "<header class=\"$1\">\n\t$2\n</header>"
,
"Horizontal Rule#":
"prefix": "hr#",
"body": "<hr id=\"$1\">$0"
,
"Horizontal Rule.":
"prefix": "hr.",
"body": "<hr class=\"$1\">$0"
,
"Italic#":
"prefix": "i#",
"body": "<i id=\"$1\">$2</i>$0"
,
"Italic.":
"prefix": "i.",
"body": "<i class=\"$1\">$2</i>$0"
,
"Inline Frame#":
"prefix": "iframe#",
"body": "<iframe id=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Inline Frame.":
"prefix": "iframe.",
"body": "<iframe class=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Input#":
"prefix": "input#",
"body": "<input id=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Input.":
"prefix": "input.",
"body": "<input class=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Image#":
"prefix": "img#",
"body": "<img id=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Image.":
"prefix": "img.",
"body": "<img class=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Inserted Text#":
"prefix": "ins#",
"body": "<ins id=\"$1\">$2</ins>$0"
,
"Inserted Text.":
"prefix": "ins.",
"body": "<ins class=\"$1\">$2</ins>$0"
,
"Keyboard Input#":
"prefix": "kbd#",
"body": "<kbd id=\"$1\">$2</kbd>$0"
,
"Keyboard Input.":
"prefix": "kbd.",
"body": "<kbd class=\"$1\">$2</kbd>$0"
,
"Keygen#":
"prefix": "keygen#",
"body": "<keygen id=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Keygen.":
"prefix": "keygen.",
"body": "<keygen class=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Label#":
"prefix": "label#",
"body": "<label id=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Label.":
"prefix": "label.",
"body": "<label class=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Legend#":
"prefix": "legend#",
"body": "<legend id=\"$1\">$2</legend>$0"
,
"Legend.":
"prefix": "legend.",
"body": "<legend class=\"$1\">$2</legend>$0"
,
"List Item#":
"prefix": "li#",
"body": "<li id=\"$1\">$2</li>$0"
,
"List Item.":
"prefix": "li.",
"body": "<li class=\"$1\">$2</li>$0"
,
"Link#":
"prefix": "link#",
"body": "<link id=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Link.":
"prefix": "link.",
"body": "<link class=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Main#":
"prefix": "main#",
"body": "<main id=\"$1\">\n\t$2\n</main>"
,
"Main.":
"prefix": "main.",
"body": "<main class=\"$1\">\n\t$2\n</main>"
,
"Map#":
"prefix": "map#",
"body": "<map id=\"$1\">\n\t$2\n</map>"
,
"Map.":
"prefix": "map.",
"body": "<map class=\"$1\">\n\t$2\n</map>"
,
"Mark#":
"prefix": "mark#",
"body": "<mark id=\"$1\">$2</mark>$0"
,
"Mark.":
"prefix": "mark.",
"body": "<mark class=\"$1\">$2</mark>$0"
,
"Menu#":
"prefix": "menu#",
"body": "<menu id=\"$1\">\n\t$2\n</menu>"
,
"Menu.":
"prefix": "menu.",
"body": "<menu class=\"$1\">\n\t$2\n</menu>"
,
"Menu Item#":
"prefix": "menuitem#",
"body": "<menuitem id=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Menu Item,":
"prefix": "menuitem.",
"body": "<menuitem class=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Meter#":
"prefix": "meter#",
"body": "<meter id=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Meter.":
"prefix": "meter.",
"body": "<meter class=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Mail Anchor#":
"prefix": "mailto#",
"body": "<a id=\"$1\" href=\"mailto:${2:<EMAIL>}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Mail Anchor.":
"prefix": "mailto.",
"body": "<a class\"$1\" href=\"mailto:${2:<EMAIL>}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Meta#":
"prefix": "meta#",
"body": "<meta id=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Meta.":
"prefix": "meta.",
"body": "<meta class=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Navigation#":
"prefix": "nav#",
"body": "<nav id=\"$1\">\n\t$2\n</nav>"
,
"Navigation.":
"prefix": "nav.",
"body": "<nav class=\"$1\">\n\t$2\n</nav>"
,
"Noscript#":
"prefix": "noscript#",
"body": "<noscript id=\"$1\">\n\t$2\n</noscript>"
,
"Noscript.":
"prefix": "noscript.",
"body": "<noscript class=\"$1\">\n\t$2\n</noscript>"
,
"Object#":
"prefix": "object#",
"body": "<object id=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Object.":
"prefix": "object.",
"body": "<object class=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Ordered List#":
"prefix": "ol#",
"body": "<ol id=\"$1\">\n\t$2\n</ol>"
,
"Ordered List.":
"prefix": "ol.",
"body": "<ol class=\"$1\">\n\t$2\n</ol>"
,
"Option Group#":
"prefix": "optgroup#",
"body": "<optgroup id=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option Group.":
"prefix": "optgroup.",
"body": "<optgroup class=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option#":
"prefix": "opt#",
"body": "<option id=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Option.":
"prefix": "opt.",
"body": "<option class=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Output#":
"prefix": "output#",
"body": "<output id=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Output.":
"prefix": "output.",
"body": "<output class=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Paragraph#":
"prefix": "p#",
"body": "<p id=\"$1\">\n\t$2\n</p>"
,
"Paragraph.":
"prefix": "p.",
"body": "<p class=\"$1\">\n\t$2\n</p>"
,
"Section#":
"prefix": "section#",
"body": "<section id=\"$1\">\n\t$2\n</section>"
,
"Section.":
"prefix": "section.",
"body": "<section class=\"$1\">\n\t$2\n</section>"
,
"Small#":
"prefix": "small#",
"body": "<small id=\"$1\">$2</small>$0"
,
"Small.":
"prefix": "small.",
"body": "<small class=\"$1\">$2</small>$0"
,
"Span#":
"prefix": "span#",
"body": "<span id=\"$1\">$2</span>$0"
,
"Span.":
"prefix": "span.",
"body": "<span class=\"$1\">$2</span>$0"
,
"Table#":
"prefix": "table#",
"body": "<table id=\"$1\">\n\t$2\n</table>"
,
"Table.":
"prefix": "table.",
"body": "<table class=\"$1\">\n\t$2\n</table>"
,
"Table Cell#":
"prefix": "td#",
"body": "<td id=\"$1\">\n\t$2\n</td>"
,
"Table Cell.":
"prefix": "td.",
"body": "<td class=\"$1\">\n\t$2\n</td>"
,
"Table Header Cell#":
"prefix": "th#",
"body": "<th id=\"$1\">\n\t$2\n</th>"
,
"Table Header Cell.":
"prefix": "th.",
"body": "<th class=\"$1\">\n\t$2\n</th>"
,
"Table Row#":
"prefix": "tr#",
"body": "<tr id=\"$1\">\n\t$2\n</tr>"
,
"Table Row.":
"prefix": "tr.",
"body": "<tr class=\"$1\">\n\t$2\n</tr>"
,
"Unordered List#":
"prefix": "ul#",
"body": "<ul id=\"$1\">\n\t$2\n</ul>"
,
"Unordered List.":
"prefix": "ul.",
"body": "<ul class=\"$1\">\n\t$2\n</ul>"
,
| true | ".text.html":
"Anchor#":
"prefix": "a#",
"body": "<a id=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Anchor.":
"prefix": "a.",
"body": "<a class=\"$1\" href=\"${2:#}\">$3</a>$0"
,
"Abbreviation#":
"prefix": "abbr#",
"body": "<abbr id=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Abbreviation.":
"prefix": "abbr.",
"body": "<abbr class=\"$1\" title=\"$2\">$3</abbr>$0"
,
"Area#":
"prefix": "area#",
"body": "<area id=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Area.":
"prefix": "area.",
"body": "<area class=\"$1\" ${2:shape=\"${3:default}\"} coords=\"$4\" ${5:href=\"${6:#}\"} />$0"
,
"Article#":
"prefix": "article#",
"body": "<article id=\"$1\">\n\t$2\n</article>"
,
"Aside#":
"prefix": "aside#",
"body": "<aside id=\"$1\">\n\t$2\n</aside>"
,
"Aside.":
"prefix": "aside.",
"body": "<aside class=\"$1\">\n\t$2\n</aside>"
,
"Audio#":
"prefix": "audio#",
"body": "<audio id=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Audio.":
"prefix": "audio.",
"body": "<audio class=\"$1\" src=\"$2\">\n\t$3\n</audio>"
,
"Bold#":
"prefix": "b#",
"body": "<b id=\"$1\">$2</b>$0"
,
"Bold.":
"prefix": "b.",
"body": "<b class=\"$1\">$2</b>$0"
,
"Base#":
"prefix": "base#",
"body": "<base id=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Base.":
"prefix": "base.",
"body": "<base class=\"$1\" href=\"${2:#}\" target=\"${3:_blank}\" />$0"
,
"Bi-Directional Isolation#":
"prefix": "bdi#",
"body": "<bdi id=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Isolation.":
"prefix": "bdi.",
"body": "<bdi class=\"$1\" dir=\"${2:auto}\">$3</bdi>$0"
,
"Bi-Directional Override#":
"prefix": "bdo#",
"body": "<bdo id=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Bi-Directional Override.":
"prefix": "bdo.",
"body": "<bdo class=\"$1\" dir=\"${2:auto}\">$3</bdo>$0"
,
"Blockquote#":
"prefix": "blockquote#",
"body": "<blockquote id=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Blockquote.":
"prefix": "blockquote.",
"body": "<blockquote class=\"$1\" cite=\"${2:http://}\">\n\t$3\n</blockquote>"
,
"Line Breaker#":
"prefix": "br#",
"body": "<br id=\"$1\">$0"
,
"Line Breaker.":
"prefix": "br.",
"body": "<br class=\"$1\">$0"
,
"Button#":
"prefix": "button#",
"body": "<button id=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Button.":
"prefix": "button.",
"body": "<button class=\"$1\" name=\"${2:button}\">$3</button>$0"
,
"Caption#":
"prefix": "caption#",
"body": "<caption id=\"$1\">$2</caption>$0"
,
"Caption.":
"prefix": "caption.",
"body": "<caption class=\"$1\">$2</caption>$0"
,
"Citation#":
"prefix": "cite#",
"body": "<cite id=\"$1\">$2</cite>$0"
,
"Citation.":
"prefix": "cite.",
"body": "<cite class=\"$1\">$2</cite>$0"
,
"Code#":
"prefix": "code#",
"body": "<code id=\"$1\">$2</code>$0"
,
"Code.":
"prefix": "code.",
"body": "<code class=\"$1\">$2</code>$0"
,
"Column#":
"prefix": "col#",
"body": "<col id=\"$1\">$2</col>$0"
,
"Column.":
"prefix": "col.",
"body": "<col class=\"$1\">$2</col>$0"
,
"Column Group#":
"prefix": "colgroup#",
"body": "<colgroup id=\"$1\">$2</colgroup>$0"
,
"Column Group.":
"prefix": "colgroup.",
"body": "<colgroup class=\"$1\">$2</colgroup>$0"
,
"Content#":
"prefix": "content#",
"body": "<content id=\"$1\" select=\"$2\">$3</content>$0"
,
"Content.":
"prefix": "content.",
"body": "<content class=\"$1\" select=\"$2\">$3</content>$0"
,
"Data#":
"prefix": "data#",
"body": "<data id=\"$1\" value=\"$2\">$3</data>$0"
,
"Data.":
"prefix": "data.",
"body": "<data class=\"$1\" value=\"$2\">$3</data>$0"
,
"Description#":
"prefix": "dd#",
"body": "<dd id=\"$1\">$2</dd>$0"
,
"Description.":
"prefix": "dd.",
"body": "<dd class=\"$1\">$2</dd>$0"
,
"Deleted Text#":
"prefix": "del#",
"body": "<del id=\"$1\">$2</del>$0"
,
"Deleted Text.":
"prefix": "del.",
"body": "<del class=\"$1\">$2</del>$0"
,
"Details#":
"prefix": "details#",
"body": "<details id=\"$1\" ${2:open}>$3</details>$0"
,
"Details.":
"prefix": "details.",
"body": "<details class=\"$1\" ${2:open}>$3</details>$0"
,
"Definition#":
"prefix": "dfn#",
"body": "<dfn id=\"$1\">$2</dfn>$0"
,
"Definition.":
"prefix": "dfn.",
"body": "<dfn class=\"$1\">$2</dfn>$0"
,
"Definition Term#":
"prefix": "dt#",
"body": "<dt id=\"$1\">$2</dt>$0"
,
"Definition Term.":
"prefix": "dt.",
"body": "<dt class=\"$1\">$2</dt>$0"
,
"Div#":
"prefix": "div#",
"body": "<div id=\"$1\">\n\t$2\n</div>$0"
,
"Div,":
"prefix": "div.",
"body": "<div class=\"$1\">\n\t$2\n</div>$0"
,
"Emphasis#":
"prefix": "em#",
"body": "<em id=\"$1\">$2</em>$0"
,
"Embed#":
"prefix": "embed#",
"body": "<embed id=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Embed.":
"prefix": "embed.",
"body": "<embed class=\"$1\" type=\"${2:video/quicktime}\" src=\"${3:#}\" width=\"${4:300}\" height=\"${5:300}\">$0"
,
"Fieldset#":
"prefix": "fieldset#",
"body": "<fieldset id=\"$1\">$2</fieldset>$0"
,
"Fieldset.":
"prefix": "fieldset.",
"body": "<fieldset class=\"$1\">$2</fieldset>$0"
,
"Figure Caption#":
"prefix": "figcaption#",
"body": "<figcaption id=\"$1\">$2</figcaption>$0"
,
"Figure Caption.":
"prefix": "figcaption#",
"body": "<figcaption class=\"$1\">$2</figcaption>$0"
,
"Figure#":
"prefix": "figure#",
"body": "<figure id=\"$1\">$2</figure>$0"
,
"Figure.":
"prefix": "figure.",
"body": "<figure class=\"$1\">$2</figure>$0"
,
"Footer#":
"prefix": "footer#",
"body": "<footer id=\"$1\">$2</footer>$0"
,
"Footer.":
"prefix": "footer.",
"body": "<footer class=\"$1\">$2</footer>$0"
,
"Form#":
"prefix": "form#",
"body": "<form id=\"${1:form_id}\" action=\"${2:index.html}\" method=\"${3:post}\">\n\t$4\n</form>"
,
"Form.":
"prefix": "form.",
"body": "<form id=\"${1:form_id}\" class=\"$2\" action=\"${3:index.html}\" method=\"${4:post}\">\n\t$5\n</form>"
,
"Heading 1#":
"prefix": "h1#",
"body": "<h1 id=\"$1\">$2</h1>$0"
,
"Heading 1.":
"prefix": "h1.",
"body": "<h1 class=\"$1\">$2</h1>$0"
,
"Heading 2#":
"prefix": "h2#",
"body": "<h2 id=\"$1\">$2</h2>$0"
,
"Heading 2.":
"prefix": "h2.",
"body": "<h2 class=\"$1\">$2</h2>$0"
,
"Heading 3#":
"prefix": "h3#",
"body": "<h3 id=\"$1\">$2</h3>$0"
,
"Heading 3.":
"prefix": "h3.",
"body": "<h3 class=\"$1\">$2</h3>$0"
,
"Heading 4#":
"prefix": "h4#",
"body": "<h4 id=\"$1\">$2</h4>$0"
,
"Heading 4.":
"prefix": "h4.",
"body": "<h4 class=\"$1\">$2</h4>$0"
,
"Heading 5#":
"prefix": "h5#",
"body": "<h5 id=\"$1\">$2</h5>$0"
,
"Heading 5.":
"prefix": "h5.",
"body": "<h5 class=\"$1\">$2</h5>$0"
,
"Heading 6#":
"prefix": "h6#",
"body": "<h6 id=\"$1\">$2</h6>$0"
,
"Heading 6.":
"prefix": "h6.",
"body": "<h6 class=\"$1\">$2</h6>$0"
,
"Head#":
"prefix": "head#",
"body": "<head id=\"$1\">\n\t$2\n</head>"
,
"Head.":
"prefix": "head.",
"body": "<head class=\"$1\">\n\t$2\n</head>"
,
"Header#":
"prefix": "header#",
"body": "<header id=\"$1\">\n\t$2\n</header>"
,
"Header.":
"prefix": "header.",
"body": "<header class=\"$1\">\n\t$2\n</header>"
,
"Horizontal Rule#":
"prefix": "hr#",
"body": "<hr id=\"$1\">$0"
,
"Horizontal Rule.":
"prefix": "hr.",
"body": "<hr class=\"$1\">$0"
,
"Italic#":
"prefix": "i#",
"body": "<i id=\"$1\">$2</i>$0"
,
"Italic.":
"prefix": "i.",
"body": "<i class=\"$1\">$2</i>$0"
,
"Inline Frame#":
"prefix": "iframe#",
"body": "<iframe id=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Inline Frame.":
"prefix": "iframe.",
"body": "<iframe class=\"$1\" src=\"$2\" width=\"$3\" height=\"$4\">$5</iframe>$0"
,
"Input#":
"prefix": "input#",
"body": "<input id=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Input.":
"prefix": "input.",
"body": "<input class=\"$1\" type=\"${2:button}\" name=\"${3:some_name}\" value=\"$4\">$0"
,
"Image#":
"prefix": "img#",
"body": "<img id=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Image.":
"prefix": "img.",
"body": "<img class=\"$1\" src=\"$2\" alt=\"$3\" />$0"
,
"Inserted Text#":
"prefix": "ins#",
"body": "<ins id=\"$1\">$2</ins>$0"
,
"Inserted Text.":
"prefix": "ins.",
"body": "<ins class=\"$1\">$2</ins>$0"
,
"Keyboard Input#":
"prefix": "kbd#",
"body": "<kbd id=\"$1\">$2</kbd>$0"
,
"Keyboard Input.":
"prefix": "kbd.",
"body": "<kbd class=\"$1\">$2</kbd>$0"
,
"Keygen#":
"prefix": "keygen#",
"body": "<keygen id=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Keygen.":
"prefix": "keygen.",
"body": "<keygen class=\"$1\" name=\"${2:name}\" challenge=\"${3:string}\" keytype=\"${4:RSA}\" keyparams=\"${5:medium}\">$0"
,
"Label#":
"prefix": "label#",
"body": "<label id=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Label.":
"prefix": "label.",
"body": "<label class=\"$1\" ${2:for=\"$3\"}></label>$0"
,
"Legend#":
"prefix": "legend#",
"body": "<legend id=\"$1\">$2</legend>$0"
,
"Legend.":
"prefix": "legend.",
"body": "<legend class=\"$1\">$2</legend>$0"
,
"List Item#":
"prefix": "li#",
"body": "<li id=\"$1\">$2</li>$0"
,
"List Item.":
"prefix": "li.",
"body": "<li class=\"$1\">$2</li>$0"
,
"Link#":
"prefix": "link#",
"body": "<link id=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Link.":
"prefix": "link.",
"body": "<link class=\"$1\" rel=\"${2:stylesheet}\" href=\"${3:/css/master.css}\" media=\"${4:screen}\" title=\"${5:no title}\" charset=\"${6:utf-8}\">$0"
,
"Main#":
"prefix": "main#",
"body": "<main id=\"$1\">\n\t$2\n</main>"
,
"Main.":
"prefix": "main.",
"body": "<main class=\"$1\">\n\t$2\n</main>"
,
"Map#":
"prefix": "map#",
"body": "<map id=\"$1\">\n\t$2\n</map>"
,
"Map.":
"prefix": "map.",
"body": "<map class=\"$1\">\n\t$2\n</map>"
,
"Mark#":
"prefix": "mark#",
"body": "<mark id=\"$1\">$2</mark>$0"
,
"Mark.":
"prefix": "mark.",
"body": "<mark class=\"$1\">$2</mark>$0"
,
"Menu#":
"prefix": "menu#",
"body": "<menu id=\"$1\">\n\t$2\n</menu>"
,
"Menu.":
"prefix": "menu.",
"body": "<menu class=\"$1\">\n\t$2\n</menu>"
,
"Menu Item#":
"prefix": "menuitem#",
"body": "<menuitem id=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Menu Item,":
"prefix": "menuitem.",
"body": "<menuitem class=\"$1\" type=\"${2:command}\" label=\"${3:Save}\">$0"
,
"Meter#":
"prefix": "meter#",
"body": "<meter id=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Meter.":
"prefix": "meter.",
"body": "<meter class=\"$1\" min=\"${2:200}\" max=\"${3:500}\" value=\"${4:350}\">$0"
,
"Mail Anchor#":
"prefix": "mailto#",
"body": "<a id=\"$1\" href=\"mailto:${2:PI:EMAIL:<EMAIL>END_PI}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Mail Anchor.":
"prefix": "mailto.",
"body": "<a class\"$1\" href=\"mailto:${2:PI:EMAIL:<EMAIL>END_PI}?subject=${3:feedback}\">${4:email me}</a>$0"
,
"Meta#":
"prefix": "meta#",
"body": "<meta id=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Meta.":
"prefix": "meta.",
"body": "<meta class=\"$1\" name=\"${2:name}\" content=\"${3:content}\">$0"
,
"Navigation#":
"prefix": "nav#",
"body": "<nav id=\"$1\">\n\t$2\n</nav>"
,
"Navigation.":
"prefix": "nav.",
"body": "<nav class=\"$1\">\n\t$2\n</nav>"
,
"Noscript#":
"prefix": "noscript#",
"body": "<noscript id=\"$1\">\n\t$2\n</noscript>"
,
"Noscript.":
"prefix": "noscript.",
"body": "<noscript class=\"$1\">\n\t$2\n</noscript>"
,
"Object#":
"prefix": "object#",
"body": "<object id=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Object.":
"prefix": "object.",
"body": "<object class=\"$1\" data=\"${2:http://}\" type=\"${3:mimetype}\">$4</object>$0"
,
"Ordered List#":
"prefix": "ol#",
"body": "<ol id=\"$1\">\n\t$2\n</ol>"
,
"Ordered List.":
"prefix": "ol.",
"body": "<ol class=\"$1\">\n\t$2\n</ol>"
,
"Option Group#":
"prefix": "optgroup#",
"body": "<optgroup id=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option Group.":
"prefix": "optgroup.",
"body": "<optgroup class=\"$1\" label=\"${2:Group 1}\">\n\t$3\n</optgroup>"
,
"Option#":
"prefix": "opt#",
"body": "<option id=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Option.":
"prefix": "opt.",
"body": "<option class=\"$1\"${2: value=\"${3:option}\"}>${4:option}</option>$0"
,
"Output#":
"prefix": "output#",
"body": "<output id=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Output.":
"prefix": "output.",
"body": "<output class=\"$1\" name=\"${2:result}\">$3</output>$0"
,
"Paragraph#":
"prefix": "p#",
"body": "<p id=\"$1\">\n\t$2\n</p>"
,
"Paragraph.":
"prefix": "p.",
"body": "<p class=\"$1\">\n\t$2\n</p>"
,
"Section#":
"prefix": "section#",
"body": "<section id=\"$1\">\n\t$2\n</section>"
,
"Section.":
"prefix": "section.",
"body": "<section class=\"$1\">\n\t$2\n</section>"
,
"Small#":
"prefix": "small#",
"body": "<small id=\"$1\">$2</small>$0"
,
"Small.":
"prefix": "small.",
"body": "<small class=\"$1\">$2</small>$0"
,
"Span#":
"prefix": "span#",
"body": "<span id=\"$1\">$2</span>$0"
,
"Span.":
"prefix": "span.",
"body": "<span class=\"$1\">$2</span>$0"
,
"Table#":
"prefix": "table#",
"body": "<table id=\"$1\">\n\t$2\n</table>"
,
"Table.":
"prefix": "table.",
"body": "<table class=\"$1\">\n\t$2\n</table>"
,
"Table Cell#":
"prefix": "td#",
"body": "<td id=\"$1\">\n\t$2\n</td>"
,
"Table Cell.":
"prefix": "td.",
"body": "<td class=\"$1\">\n\t$2\n</td>"
,
"Table Header Cell#":
"prefix": "th#",
"body": "<th id=\"$1\">\n\t$2\n</th>"
,
"Table Header Cell.":
"prefix": "th.",
"body": "<th class=\"$1\">\n\t$2\n</th>"
,
"Table Row#":
"prefix": "tr#",
"body": "<tr id=\"$1\">\n\t$2\n</tr>"
,
"Table Row.":
"prefix": "tr.",
"body": "<tr class=\"$1\">\n\t$2\n</tr>"
,
"Unordered List#":
"prefix": "ul#",
"body": "<ul id=\"$1\">\n\t$2\n</ul>"
,
"Unordered List.":
"prefix": "ul.",
"body": "<ul class=\"$1\">\n\t$2\n</ul>"
,
|
[
{
"context": "ters.Sepia extends App.TrackFilters.Base\n name: 'Sepia'\n filterClass: WAGNER.SepiaPass\n",
"end": 71,
"score": 0.6139640212059021,
"start": 68,
"tag": "NAME",
"value": "Sep"
}
] | public-source/javascripts/models/trackFilters/sepia.coffee | lekevicius/vijual | 1 | class App.TrackFilters.Sepia extends App.TrackFilters.Base
name: 'Sepia'
filterClass: WAGNER.SepiaPass
| 194663 | class App.TrackFilters.Sepia extends App.TrackFilters.Base
name: '<NAME>ia'
filterClass: WAGNER.SepiaPass
| true | class App.TrackFilters.Sepia extends App.TrackFilters.Base
name: 'PI:NAME:<NAME>END_PIia'
filterClass: WAGNER.SepiaPass
|
[
{
"context": "ecode a password correctly', ->\n password = \"A39CADD77ED72A9C75467D0F5A5C88BFCD75370DD63E3388D3F402AF50C4E5029071B0965C343B99B6D6636A8698562DDB2EE51020D87EA3\"\n result = \"HelloWorld\"\n expect(cisco_d",
"end": 476,
"score": 0.9889700412750244,
"start": 364,
"tag"... | test/test.coffee | kkoscielniak/cisco-password-decoder-cli | 0 | chai = require 'chai'
expect = chai.expect
cisco_decoder = require('../src/index')
describe 'remove_nullbyte', ->
it 'should remove nullbyte', ->
expect(cisco_decoder.remove_nullbyte("HelloWorld\u0000")).to.equal "HelloWorld"
describe 'cisco_password_decoder', ->
task1 = task2 = null
it 'should decode a password correctly', ->
password = "A39CADD77ED72A9C75467D0F5A5C88BFCD75370DD63E3388D3F402AF50C4E5029071B0965C343B99B6D6636A8698562DDB2EE51020D87EA3"
result = "HelloWorld"
expect(cisco_decoder.decrypt_password(password)).to.equal result
| 21571 | chai = require 'chai'
expect = chai.expect
cisco_decoder = require('../src/index')
describe 'remove_nullbyte', ->
it 'should remove nullbyte', ->
expect(cisco_decoder.remove_nullbyte("HelloWorld\u0000")).to.equal "HelloWorld"
describe 'cisco_password_decoder', ->
task1 = task2 = null
it 'should decode a password correctly', ->
password = "<PASSWORD>"
result = "HelloWorld"
expect(cisco_decoder.decrypt_password(password)).to.equal result
| true | chai = require 'chai'
expect = chai.expect
cisco_decoder = require('../src/index')
describe 'remove_nullbyte', ->
it 'should remove nullbyte', ->
expect(cisco_decoder.remove_nullbyte("HelloWorld\u0000")).to.equal "HelloWorld"
describe 'cisco_password_decoder', ->
task1 = task2 = null
it 'should decode a password correctly', ->
password = "PI:PASSWORD:<PASSWORD>END_PI"
result = "HelloWorld"
expect(cisco_decoder.decrypt_password(password)).to.equal result
|
[
{
"context": " url: 'mongodb://localhost/session'\n secret: '1234567890QWERTY'\n maxAge : new Date(Date.now() + 24 * 3600000",
"end": 6204,
"score": 0.9947549104690552,
"start": 6188,
"tag": "KEY",
"value": "1234567890QWERTY"
}
] | src/app.coffee | breeswish/swall-server | 2 | express = require 'express'
path = require 'path'
favicon = require 'serve-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
mongoose = require 'mongoose'
urlparser = require 'url'
https = require 'https'
fs = require 'fs'
compression = require 'compression'
spdy = require 'spdy'
GLOBAL.DEBUG = true
GLOBAL.config = require './config.json'
app = require('express')()
if DEBUG
server = require('http').Server(app)
else
spdyOptions =
key: fs.readFileSync(path.join(__dirname, '../www_swall_me.key'))
cert: fs.readFileSync(path.join(__dirname, '../www_swall_me_bundle.crt'))
server = spdy.createServer(spdyOptions, app)
GLOBAL.io = require('socket.io')(server)
if DEBUG
# server = app.listen 3000, ()->
# host = server.address().address
# port = server.address().port
# console.log('App listening at http://%s:%s', host, port)
server.listen 3000
else
server.listen 443
app_http = require('express')()
app_http.all '*', (req, res)->
res.redirect 'https://swall.me' + req.url
res.end()
if not DEBUG
app_http.listen 80
app.use compression()
routes = require '../build/routes/index'
users = require '../build/routes/users'
GLOBAL.db = mongoose.createConnection 'mongodb://localhost/swall'
usrInfo = mongoose.Schema
username: String
password: String
activities: Array
time: String
msgInfo = mongoose.Schema
color: String
id: Number
time: Number
ip: String
ua: String
msg: String
belongto: String
actInfo = mongoose.Schema
actid: Number
title: String
buttonbox: Array
colors: Array
keywords: Array
User = db.model 'User', usrInfo
Comment = db.model 'Comment', msgInfo
Activity = db.model 'Activity', actInfo
GLOBAL.User = User
GLOBAL.Comment = Comment
GLOBAL.Activity = Activity
GLOBAL.info = {}
# Filter
# GLOBAL.myFilter = (msg, array)->
#Function used to darken.
GLOBAL.colorLuminance = (hex, lum)->
hex = String(hex).replace(/[^0-9a-f]/gi, '')
if (hex.length < 6)
hex = hex[0]+hex[0]+hex[1]+hex[1]+hex[2]+hex[2]
lum = lum || 0
rgb = "#"
i = 0
while i<3
c = parseInt(hex.substr(i*2,2), 16)
c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16)
rgb += ("00"+c).substr(c.length)
++i
return rgb
GLOBAL.calButtonWidth = (id)->
((100 - (info[id].buttonbox.length - 1) * 1.25) / info[id].buttonbox.length) + "%"
GLOBAL.calButtonHeight = (id)->
((100 - (info[id].buttonbox.length - 1) * 5) / info[id].buttonbox.length) + "%"
Activity.find {'actid': 1}, (err, docs)->
if err
return console.log err
else if docs.length == 0
id_1 =
actid: 1
title: '2014同济大学软件学院迎新晚会'
buttonbox: [
{bg: '#f8f8f8', bb: colorLuminance('#f8f8ff', -0.2)}
{bg: '#79bd8f', bb: colorLuminance('#79bd8f', -0.2)}
{bg: '#00b8ff', bb: colorLuminance('#00b8ff', -0.2)}
]
colors: [
'#f8f8f8'
'#79bd8f'
'#00b8ff'
]
keywords: config.keywords
activity1 = Activity id_1
activity1.save (err, activity1)->
if err
return console.log err
Activity.find {}, (err, docs)->
if err
return console.log err
for activity in docs
info['id_' + activity.actid] = activity
info['id_' + activity.actid].buttonwidth = calButtonWidth('id_1')
info['id_' + activity.actid].buttonheight = calButtonHeight('id_1')
info.page = 1
Activity.count (err, count)->
info.total_activity = count
console.log count
checkMsg = (msg, keywords)->
for keyword in keywords
if msg.indexOf(keyword) != -1
return true
return false
GLOBAL.filterKeyword = (msg, keywords)->
# English with punctuation
english = msg.replace /[\u4e00-\u9fff\u3400-\u4dff\uf900-\ufaff0-9\s]/g, ''
english = english.toLowerCase()
# Chinese with punctuation
chinese = msg.replace /[A-Za-z0-9\s]/g, ''
# English without punctuation
engNoPu = english.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
# Chinese without punctuation
chiNoPu = chinese.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
if (
checkMsg(msg, keywords) or
checkMsg(english, keywords) or
checkMsg(chinese, keywords) or
checkMsg(engNoPu, keywords) or
checkMsg(chiNoPu, keywords)
)
true
else
false
io.on 'connect', (socket)->
# Client ask for message
socket.on '/subscribe', (data)->
# add to subscribe pool
socket.join data.id
socket.emit 'sucscribeOk', data.id
socket.on '/unsubscribe', (data)->
if data == 'all'
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
else
# unsubscribe socket, data.id
socket.leave data.id
socket.on 'disconnect', ()->
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
io.on 'disconnect', ()->
console.log 'disconnected.'
# view engine setup
app.set 'views', path.join(__dirname, '../views')
app.set 'view engine', 'ejs'
app.use(logger('dev'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({extended: false}))
app.use(cookieParser())
app.use(express.static(path.join __dirname, 'public'))
session = require('express-session')
MongoStore = require('connect-mongo')(session)
app.use session
store: new MongoStore
url: 'mongodb://localhost/session'
secret: '1234567890QWERTY'
maxAge : new Date(Date.now() + 24 * 3600000) # 1 day
expires : new Date(Date.now() + 24 * 3600000) # 1 day
saveUninitialized: true,
resave: true
app.use '/', routes
app.use '/users', users
# catch 404 and forward to error handler
app.use (req, res, next)->
err = new Error('Not Found')
err.status = 404
res.status 404
res.render '404'
# error handlers
# development error handler
# will print stacktrace
if (app.get('env') == 'development')
app.use (err, req, res, next)->
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
})
# production error handler
# no stacktraces leaked to user
app.use (err, req, res, next)->
res.status(err.status || 500)
res.render('error', {
message: err.message,
error: {}
})
module.exports = app
| 23551 | express = require 'express'
path = require 'path'
favicon = require 'serve-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
mongoose = require 'mongoose'
urlparser = require 'url'
https = require 'https'
fs = require 'fs'
compression = require 'compression'
spdy = require 'spdy'
GLOBAL.DEBUG = true
GLOBAL.config = require './config.json'
app = require('express')()
if DEBUG
server = require('http').Server(app)
else
spdyOptions =
key: fs.readFileSync(path.join(__dirname, '../www_swall_me.key'))
cert: fs.readFileSync(path.join(__dirname, '../www_swall_me_bundle.crt'))
server = spdy.createServer(spdyOptions, app)
GLOBAL.io = require('socket.io')(server)
if DEBUG
# server = app.listen 3000, ()->
# host = server.address().address
# port = server.address().port
# console.log('App listening at http://%s:%s', host, port)
server.listen 3000
else
server.listen 443
app_http = require('express')()
app_http.all '*', (req, res)->
res.redirect 'https://swall.me' + req.url
res.end()
if not DEBUG
app_http.listen 80
app.use compression()
routes = require '../build/routes/index'
users = require '../build/routes/users'
GLOBAL.db = mongoose.createConnection 'mongodb://localhost/swall'
usrInfo = mongoose.Schema
username: String
password: String
activities: Array
time: String
msgInfo = mongoose.Schema
color: String
id: Number
time: Number
ip: String
ua: String
msg: String
belongto: String
actInfo = mongoose.Schema
actid: Number
title: String
buttonbox: Array
colors: Array
keywords: Array
User = db.model 'User', usrInfo
Comment = db.model 'Comment', msgInfo
Activity = db.model 'Activity', actInfo
GLOBAL.User = User
GLOBAL.Comment = Comment
GLOBAL.Activity = Activity
GLOBAL.info = {}
# Filter
# GLOBAL.myFilter = (msg, array)->
#Function used to darken.
GLOBAL.colorLuminance = (hex, lum)->
hex = String(hex).replace(/[^0-9a-f]/gi, '')
if (hex.length < 6)
hex = hex[0]+hex[0]+hex[1]+hex[1]+hex[2]+hex[2]
lum = lum || 0
rgb = "#"
i = 0
while i<3
c = parseInt(hex.substr(i*2,2), 16)
c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16)
rgb += ("00"+c).substr(c.length)
++i
return rgb
GLOBAL.calButtonWidth = (id)->
((100 - (info[id].buttonbox.length - 1) * 1.25) / info[id].buttonbox.length) + "%"
GLOBAL.calButtonHeight = (id)->
((100 - (info[id].buttonbox.length - 1) * 5) / info[id].buttonbox.length) + "%"
Activity.find {'actid': 1}, (err, docs)->
if err
return console.log err
else if docs.length == 0
id_1 =
actid: 1
title: '2014同济大学软件学院迎新晚会'
buttonbox: [
{bg: '#f8f8f8', bb: colorLuminance('#f8f8ff', -0.2)}
{bg: '#79bd8f', bb: colorLuminance('#79bd8f', -0.2)}
{bg: '#00b8ff', bb: colorLuminance('#00b8ff', -0.2)}
]
colors: [
'#f8f8f8'
'#79bd8f'
'#00b8ff'
]
keywords: config.keywords
activity1 = Activity id_1
activity1.save (err, activity1)->
if err
return console.log err
Activity.find {}, (err, docs)->
if err
return console.log err
for activity in docs
info['id_' + activity.actid] = activity
info['id_' + activity.actid].buttonwidth = calButtonWidth('id_1')
info['id_' + activity.actid].buttonheight = calButtonHeight('id_1')
info.page = 1
Activity.count (err, count)->
info.total_activity = count
console.log count
checkMsg = (msg, keywords)->
for keyword in keywords
if msg.indexOf(keyword) != -1
return true
return false
GLOBAL.filterKeyword = (msg, keywords)->
# English with punctuation
english = msg.replace /[\u4e00-\u9fff\u3400-\u4dff\uf900-\ufaff0-9\s]/g, ''
english = english.toLowerCase()
# Chinese with punctuation
chinese = msg.replace /[A-Za-z0-9\s]/g, ''
# English without punctuation
engNoPu = english.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
# Chinese without punctuation
chiNoPu = chinese.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
if (
checkMsg(msg, keywords) or
checkMsg(english, keywords) or
checkMsg(chinese, keywords) or
checkMsg(engNoPu, keywords) or
checkMsg(chiNoPu, keywords)
)
true
else
false
io.on 'connect', (socket)->
# Client ask for message
socket.on '/subscribe', (data)->
# add to subscribe pool
socket.join data.id
socket.emit 'sucscribeOk', data.id
socket.on '/unsubscribe', (data)->
if data == 'all'
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
else
# unsubscribe socket, data.id
socket.leave data.id
socket.on 'disconnect', ()->
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
io.on 'disconnect', ()->
console.log 'disconnected.'
# view engine setup
app.set 'views', path.join(__dirname, '../views')
app.set 'view engine', 'ejs'
app.use(logger('dev'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({extended: false}))
app.use(cookieParser())
app.use(express.static(path.join __dirname, 'public'))
session = require('express-session')
MongoStore = require('connect-mongo')(session)
app.use session
store: new MongoStore
url: 'mongodb://localhost/session'
secret: '<KEY>'
maxAge : new Date(Date.now() + 24 * 3600000) # 1 day
expires : new Date(Date.now() + 24 * 3600000) # 1 day
saveUninitialized: true,
resave: true
app.use '/', routes
app.use '/users', users
# catch 404 and forward to error handler
app.use (req, res, next)->
err = new Error('Not Found')
err.status = 404
res.status 404
res.render '404'
# error handlers
# development error handler
# will print stacktrace
if (app.get('env') == 'development')
app.use (err, req, res, next)->
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
})
# production error handler
# no stacktraces leaked to user
app.use (err, req, res, next)->
res.status(err.status || 500)
res.render('error', {
message: err.message,
error: {}
})
module.exports = app
| true | express = require 'express'
path = require 'path'
favicon = require 'serve-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
mongoose = require 'mongoose'
urlparser = require 'url'
https = require 'https'
fs = require 'fs'
compression = require 'compression'
spdy = require 'spdy'
GLOBAL.DEBUG = true
GLOBAL.config = require './config.json'
app = require('express')()
if DEBUG
server = require('http').Server(app)
else
spdyOptions =
key: fs.readFileSync(path.join(__dirname, '../www_swall_me.key'))
cert: fs.readFileSync(path.join(__dirname, '../www_swall_me_bundle.crt'))
server = spdy.createServer(spdyOptions, app)
GLOBAL.io = require('socket.io')(server)
if DEBUG
# server = app.listen 3000, ()->
# host = server.address().address
# port = server.address().port
# console.log('App listening at http://%s:%s', host, port)
server.listen 3000
else
server.listen 443
app_http = require('express')()
app_http.all '*', (req, res)->
res.redirect 'https://swall.me' + req.url
res.end()
if not DEBUG
app_http.listen 80
app.use compression()
routes = require '../build/routes/index'
users = require '../build/routes/users'
GLOBAL.db = mongoose.createConnection 'mongodb://localhost/swall'
usrInfo = mongoose.Schema
username: String
password: String
activities: Array
time: String
msgInfo = mongoose.Schema
color: String
id: Number
time: Number
ip: String
ua: String
msg: String
belongto: String
actInfo = mongoose.Schema
actid: Number
title: String
buttonbox: Array
colors: Array
keywords: Array
User = db.model 'User', usrInfo
Comment = db.model 'Comment', msgInfo
Activity = db.model 'Activity', actInfo
GLOBAL.User = User
GLOBAL.Comment = Comment
GLOBAL.Activity = Activity
GLOBAL.info = {}
# Filter
# GLOBAL.myFilter = (msg, array)->
#Function used to darken.
GLOBAL.colorLuminance = (hex, lum)->
hex = String(hex).replace(/[^0-9a-f]/gi, '')
if (hex.length < 6)
hex = hex[0]+hex[0]+hex[1]+hex[1]+hex[2]+hex[2]
lum = lum || 0
rgb = "#"
i = 0
while i<3
c = parseInt(hex.substr(i*2,2), 16)
c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16)
rgb += ("00"+c).substr(c.length)
++i
return rgb
GLOBAL.calButtonWidth = (id)->
((100 - (info[id].buttonbox.length - 1) * 1.25) / info[id].buttonbox.length) + "%"
GLOBAL.calButtonHeight = (id)->
((100 - (info[id].buttonbox.length - 1) * 5) / info[id].buttonbox.length) + "%"
Activity.find {'actid': 1}, (err, docs)->
if err
return console.log err
else if docs.length == 0
id_1 =
actid: 1
title: '2014同济大学软件学院迎新晚会'
buttonbox: [
{bg: '#f8f8f8', bb: colorLuminance('#f8f8ff', -0.2)}
{bg: '#79bd8f', bb: colorLuminance('#79bd8f', -0.2)}
{bg: '#00b8ff', bb: colorLuminance('#00b8ff', -0.2)}
]
colors: [
'#f8f8f8'
'#79bd8f'
'#00b8ff'
]
keywords: config.keywords
activity1 = Activity id_1
activity1.save (err, activity1)->
if err
return console.log err
Activity.find {}, (err, docs)->
if err
return console.log err
for activity in docs
info['id_' + activity.actid] = activity
info['id_' + activity.actid].buttonwidth = calButtonWidth('id_1')
info['id_' + activity.actid].buttonheight = calButtonHeight('id_1')
info.page = 1
Activity.count (err, count)->
info.total_activity = count
console.log count
checkMsg = (msg, keywords)->
for keyword in keywords
if msg.indexOf(keyword) != -1
return true
return false
GLOBAL.filterKeyword = (msg, keywords)->
# English with punctuation
english = msg.replace /[\u4e00-\u9fff\u3400-\u4dff\uf900-\ufaff0-9\s]/g, ''
english = english.toLowerCase()
# Chinese with punctuation
chinese = msg.replace /[A-Za-z0-9\s]/g, ''
# English without punctuation
engNoPu = english.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
# Chinese without punctuation
chiNoPu = chinese.replace /[\ |\~\~|\`\`|\!\!|\@\@|\#\#|\$\¥|\%\%|\^\^|\&\—|\*\*|\(\(|\)\)|\-\-|\_\—|\+\+|\=\=|\|\||\\\\|\[\[|\]\]|\{\{|\}\}|\;\;|\:\:|\"\“\”|\'\‘\’|\,\,|\<\《|\.\。|\>\》|\/\、\/|\?\?]/g, ''
if (
checkMsg(msg, keywords) or
checkMsg(english, keywords) or
checkMsg(chinese, keywords) or
checkMsg(engNoPu, keywords) or
checkMsg(chiNoPu, keywords)
)
true
else
false
io.on 'connect', (socket)->
# Client ask for message
socket.on '/subscribe', (data)->
# add to subscribe pool
socket.join data.id
socket.emit 'sucscribeOk', data.id
socket.on '/unsubscribe', (data)->
if data == 'all'
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
else
# unsubscribe socket, data.id
socket.leave data.id
socket.on 'disconnect', ()->
# unsubscribeAll socket
for room in io.sockets.adapter.rooms
socket.leave room
io.on 'disconnect', ()->
console.log 'disconnected.'
# view engine setup
app.set 'views', path.join(__dirname, '../views')
app.set 'view engine', 'ejs'
app.use(logger('dev'))
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({extended: false}))
app.use(cookieParser())
app.use(express.static(path.join __dirname, 'public'))
session = require('express-session')
MongoStore = require('connect-mongo')(session)
app.use session
store: new MongoStore
url: 'mongodb://localhost/session'
secret: 'PI:KEY:<KEY>END_PI'
maxAge : new Date(Date.now() + 24 * 3600000) # 1 day
expires : new Date(Date.now() + 24 * 3600000) # 1 day
saveUninitialized: true,
resave: true
app.use '/', routes
app.use '/users', users
# catch 404 and forward to error handler
app.use (req, res, next)->
err = new Error('Not Found')
err.status = 404
res.status 404
res.render '404'
# error handlers
# development error handler
# will print stacktrace
if (app.get('env') == 'development')
app.use (err, req, res, next)->
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
})
# production error handler
# no stacktraces leaked to user
app.use (err, req, res, next)->
res.status(err.status || 500)
res.render('error', {
message: err.message,
error: {}
})
module.exports = app
|
[
{
"context": "erName: data.prefix + i.trim()\n password: @makeId()\n roles: data.roles\n message: da",
"end": 1065,
"score": 0.9953889846801758,
"start": 1058,
"tag": "PASSWORD",
"value": "@makeId"
},
{
"context": "rName: data.prefix + @makeId()\n ... | Web.App/app/modules/admin/views/users-generator-view.coffee | vip32/eventfeedback | 0 | application = require 'application'
vent = require 'vent'
module.exports = class UsersGeneratorView extends Backbone.Marionette.CompositeView
id: 'users-generator-view'
template: require './templates/users-generator'
itemView: require './users-generator-item-view'
itemViewContainer: '#js-users'
events:
'click #js-generate': 'onGenerate'
'click #js-clear': 'onClear'
'click #js-print': 'onPrintClick'
initialize: (options) ->
@resources = options?.resources
@roles = options?.roles
serializeData: ->
resources: @resources?.toJSON()
roles: @roles?.pluck('name')
itemViewOptions: ->
resources: @resources
onShow: ->
scrollTo(0,0)
onClear: (e) ->
e.preventDefault()
@collection.reset()
onGenerate: (e) ->
e.preventDefault()
@collection.reset()
data = Backbone.Syphon.serialize(@)
# add by names
for i in data.accountnames.split(';')
if i.trim() isnt ""
@collection.add
userName: data.prefix + i.trim()
password: @makeId()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
# add by amount
if data.amount > 0
for i in [1..data.amount]
@collection.add
userName: data.prefix + @makeId()
password: @makeId()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
vent.trigger 'save:users'
onPrintClick: (e) ->
e.preventDefault()
# add a page-break after every 4th element
$('#js-users .list-group-item:nth-child(4n)').css('page-break-after', 'always')
# setup the user list for printing
css = '<link href="www/stylesheets/app.css" rel="stylesheet" type="text/css">'
window.frames["print_frame"].document.body.innerHTML= css + document.getElementById("js-users").innerHTML
window.frames["print_frame"].window.focus()
window.frames["print_frame"].window.print()
makeId: ->
text = ''
possible = 'abcdefghjkmnpqrstuvwxy23456789'
i = 0
while i < 5
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
onSave: ->
vent.trigger 'save:users'
# TODO: button > trigger event for controller to save all 'dirty' models
#onBack: =>
# vent.trigger 'admin:users:edit'
onClose: ->
#vent.off 'navigation:back', @onBack
log 'user-generator view close'
| 191658 | application = require 'application'
vent = require 'vent'
module.exports = class UsersGeneratorView extends Backbone.Marionette.CompositeView
id: 'users-generator-view'
template: require './templates/users-generator'
itemView: require './users-generator-item-view'
itemViewContainer: '#js-users'
events:
'click #js-generate': 'onGenerate'
'click #js-clear': 'onClear'
'click #js-print': 'onPrintClick'
initialize: (options) ->
@resources = options?.resources
@roles = options?.roles
serializeData: ->
resources: @resources?.toJSON()
roles: @roles?.pluck('name')
itemViewOptions: ->
resources: @resources
onShow: ->
scrollTo(0,0)
onClear: (e) ->
e.preventDefault()
@collection.reset()
onGenerate: (e) ->
e.preventDefault()
@collection.reset()
data = Backbone.Syphon.serialize(@)
# add by names
for i in data.accountnames.split(';')
if i.trim() isnt ""
@collection.add
userName: data.prefix + i.trim()
password: <PASSWORD>()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
# add by amount
if data.amount > 0
for i in [1..data.amount]
@collection.add
userName: data.prefix + @makeId()
password: <PASSWORD>()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
vent.trigger 'save:users'
onPrintClick: (e) ->
e.preventDefault()
# add a page-break after every 4th element
$('#js-users .list-group-item:nth-child(4n)').css('page-break-after', 'always')
# setup the user list for printing
css = '<link href="www/stylesheets/app.css" rel="stylesheet" type="text/css">'
window.frames["print_frame"].document.body.innerHTML= css + document.getElementById("js-users").innerHTML
window.frames["print_frame"].window.focus()
window.frames["print_frame"].window.print()
makeId: ->
text = ''
possible = 'abcdefghjkmnpqrstuvwxy23456789'
i = 0
while i < 5
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
onSave: ->
vent.trigger 'save:users'
# TODO: button > trigger event for controller to save all 'dirty' models
#onBack: =>
# vent.trigger 'admin:users:edit'
onClose: ->
#vent.off 'navigation:back', @onBack
log 'user-generator view close'
| true | application = require 'application'
vent = require 'vent'
module.exports = class UsersGeneratorView extends Backbone.Marionette.CompositeView
id: 'users-generator-view'
template: require './templates/users-generator'
itemView: require './users-generator-item-view'
itemViewContainer: '#js-users'
events:
'click #js-generate': 'onGenerate'
'click #js-clear': 'onClear'
'click #js-print': 'onPrintClick'
initialize: (options) ->
@resources = options?.resources
@roles = options?.roles
serializeData: ->
resources: @resources?.toJSON()
roles: @roles?.pluck('name')
itemViewOptions: ->
resources: @resources
onShow: ->
scrollTo(0,0)
onClear: (e) ->
e.preventDefault()
@collection.reset()
onGenerate: (e) ->
e.preventDefault()
@collection.reset()
data = Backbone.Syphon.serialize(@)
# add by names
for i in data.accountnames.split(';')
if i.trim() isnt ""
@collection.add
userName: data.prefix + i.trim()
password: PI:PASSWORD:<PASSWORD>END_PI()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
# add by amount
if data.amount > 0
for i in [1..data.amount]
@collection.add
userName: data.prefix + @makeId()
password: PI:PASSWORD:<PASSWORD>END_PI()
roles: data.roles
message: data.message
activefrom: data.activefrom
activetill: data.activetill
active: true
dirty: true
vent.trigger 'save:users'
onPrintClick: (e) ->
e.preventDefault()
# add a page-break after every 4th element
$('#js-users .list-group-item:nth-child(4n)').css('page-break-after', 'always')
# setup the user list for printing
css = '<link href="www/stylesheets/app.css" rel="stylesheet" type="text/css">'
window.frames["print_frame"].document.body.innerHTML= css + document.getElementById("js-users").innerHTML
window.frames["print_frame"].window.focus()
window.frames["print_frame"].window.print()
makeId: ->
text = ''
possible = 'abcdefghjkmnpqrstuvwxy23456789'
i = 0
while i < 5
text += possible.charAt(Math.floor(Math.random() * possible.length))
i++
text
onSave: ->
vent.trigger 'save:users'
# TODO: button > trigger event for controller to save all 'dirty' models
#onBack: =>
# vent.trigger 'admin:users:edit'
onClose: ->
#vent.off 'navigation:back', @onBack
log 'user-generator view close'
|
[
{
"context": "'path'\n\n\n# modified from cli-scrape\n#\n# (c) 2012 Philip Thrasher & 2015 Richard Smith-Unna\n#\n# cli-scrape may be ",
"end": 229,
"score": 0.9998822212219238,
"start": 214,
"tag": "NAME",
"value": "Philip Thrasher"
},
{
"context": "om cli-scrape\n#\n# (c) 2012 ... | src/bo.coffee | Blahah/beau-selector | 0 | fs = require 'fs'
path = require 'path'
log = require 'verbalize'
optimist = require 'optimist'
jsdom = require 'jsdom'
log = require 'npmlog'
path = require 'path'
# modified from cli-scrape
#
# (c) 2012 Philip Thrasher & 2015 Richard Smith-Unna
#
# cli-scrape may be freely distributed under the MIT license.
# For all details and documentation:
#
# http://pthrasher.github.com/cli-scrape/
# Usage
# -----
#
# `cat file.html | bo 'p:first-child'`
#
# `wget http://whatthecommit.com/ | bo '//p[0]'`
# use strict, yo!
'use strict'
absPath = (relPath) ->
path.resolve __dirname, relPath
# These are the libraries used to do the parsing on the page. If the query is
# an xpath query, XPATH\_LIBS is used. If not, CSS\_LIBS is used instead.
XPATH_LIBS = [absPath('wgxpath.install.js')]
CSS_LIBS = [absPath('qwery.min.js', 'qwery-pseudos.min.js')]
# useXPath
# --------
#
# Determine whether or not the query passed in is an xpath query or a css query
useXPath = (query) ->
query.indexOf('/') is 0
# getArgs
# -------
#
# Wrapper for optimise that allows us to clean up the args as they come in.
getArgs = ->
usage = 'Usage: bo [xpath|css]\n\n' +
'If no attribute is given, innerHTML, text or a plain string\n' +
'will be returned (depending on what is matched)'
argv = optimist
.usage(usage)
.alias('a', 'attribute')
.default('a', undefined)
.alias('o', 'outer')
.describe('o', 'get outer XML of the element')
.alias('l', 'loglevel')
.default('l', 'silent')
.demand(1)
.argv
[ query ] = argv._
query = query.trim()
attribute = argv.attribute?.trim()
outer = argv.outer
loglevel = argv.loglevel.trim()
{
query
attribute
outer
loglevel
}
# fetchHTML
# ---------
#
# This is essentially a simple logging wrapper around request.get
fetchHTML = (url, cb) ->
request.get url, (err, response, body) ->
if err?
log.error 'http', "[#{response.statusCode}] #{err}"
return cb err, response, body
log.verbose 'http', "[#{response.statusCode}] Fetched '#{url}' successfully."
cb err, response, body
# domParse
# --------
#
# This is essentially a simple logging wrapper around jsdom.env
domParse = (html, libs, cb) ->
opts =
html: html
scripts: libs
parsingMode: 'xml'
done: (err, window) ->
if err?
log.error 'parse', "Error processing DOM with libs: [ '#{libs.join '\', \''}' ]. (#{err})"
return cb err, window
log.verbose 'parse', "DOM parse successful with libs: [ '#{libs.join '\', \''}' ]."
cb err, window
jsdom.env opts
# elToString
# ----------
#
# depending on the query given by the user, we will be getting an html element,
# or a plain old string. This should be handled elegantly.
elToString = (el, attribute, outer) ->
ret = ''
if attribute
log.verbose 'elToString', "Fetching #{attribute} from '#{el}'."
ret = el.getAttribute(attribute)
else if outer && el.outerHTML?
log.verbose 'elToString', "Fetching outerHTML from '#{el}'."
ret = el.outerHTML.trim()
else if el.innerHTML?
log.verbose 'elToString', "Fetching innerHTML from '#{el}'."
ret = el.innerHTML.trim()
else if el.textContent?
log.verbose 'elToString', "Fetching textContent from '#{el}'."
ret = el.textContent.trim()
else if Object.prototype.toString.call(el) is '[object String]'
log.verbose 'elToString', "Content already a string: '#{el}'."
ret = el.trim()
# replace newlines with spaces so we get one result per line
ret.replace(/\r?\n|\r/g, ' ')
# executeXPath
# ---------------
#
# Executes the given xpath query against the given window.document object using
# google's wicked fast xpath
executeXPath = (query, window) ->
unless window.wgxpath?
log.error 'xpath', 'xpath selector engine not found!'
return []
window.wgxpath.install()
document = window.document
els = []
log.verbose 'xpath', "Evaluating query '#{query}'."
result = document.evaluate(query, document, null, 7, null)
len = result.snapshotLength
log.verbose 'xpath', "Found #{len} match(es)."
if len > 0
els = (result.snapshotItem(i) for i in [0...len])
els
# executeCSSQuery
# ---------------
#
# Executes the given css query against the given window.document object using
# window.qwery
executeCSSQuery = (query, window) ->
unless window.qwery?
log.error 'css', 'qwery selector engine not found!'
return []
log.verbose 'css', "Evaluating query '#{query}'."
els = window.qwery query
log.verbose 'css', "Found #{els.length} match(es)."
els
# Get the args
{ query, attribute, outer, loglevel } = getArgs()
# Set our default logging level, only log items at this level and higher.
# The default loglevel is err - only show errors
log.level = loglevel
process.stdin.setEncoding 'utf8'
doc = ""
process.stdin.on 'readable', () ->
chunk = process.stdin.read()
if chunk != null
doc += chunk
process.stdin.on 'end', () ->
if err?
# If we hit an error while fetching the page, return 1 so that our
# command can be chained in the shell.
return process.exit 1
# Determin if we're using xpath or not, and set the libs appropriately.
xpath = useXPath query
libs = CSS_LIBS
if xpath
libs = XPATH_LIBS
domParse doc, libs, (err, window) ->
if err?
# If we hit an error while parsing the document, return 1 so that our
# command can be chained in the shell.
return process.exit(1)
if xpath
results = executeXPath(query, window)
else
results = executeCSSQuery(query, window)
if not results or results.length < 1
# We had no results, so go ahead and exit 1 so that our command can be
# chained in the shell.
return process.exit(1)
strings = []
for result in results
strings.push elToString(result, attribute, outer)
# Print the strings to stdout in such a way that they're all
# separated by a newline, but the last line doesn't end in one.
process.stdout.write strings.join '\n'
| 100490 | fs = require 'fs'
path = require 'path'
log = require 'verbalize'
optimist = require 'optimist'
jsdom = require 'jsdom'
log = require 'npmlog'
path = require 'path'
# modified from cli-scrape
#
# (c) 2012 <NAME> & 2015 <NAME>
#
# cli-scrape may be freely distributed under the MIT license.
# For all details and documentation:
#
# http://pthrasher.github.com/cli-scrape/
# Usage
# -----
#
# `cat file.html | bo 'p:first-child'`
#
# `wget http://whatthecommit.com/ | bo '//p[0]'`
# use strict, yo!
'use strict'
absPath = (relPath) ->
path.resolve __dirname, relPath
# These are the libraries used to do the parsing on the page. If the query is
# an xpath query, XPATH\_LIBS is used. If not, CSS\_LIBS is used instead.
XPATH_LIBS = [absPath('wgxpath.install.js')]
CSS_LIBS = [absPath('qwery.min.js', 'qwery-pseudos.min.js')]
# useXPath
# --------
#
# Determine whether or not the query passed in is an xpath query or a css query
useXPath = (query) ->
query.indexOf('/') is 0
# getArgs
# -------
#
# Wrapper for optimise that allows us to clean up the args as they come in.
getArgs = ->
usage = 'Usage: bo [xpath|css]\n\n' +
'If no attribute is given, innerHTML, text or a plain string\n' +
'will be returned (depending on what is matched)'
argv = optimist
.usage(usage)
.alias('a', 'attribute')
.default('a', undefined)
.alias('o', 'outer')
.describe('o', 'get outer XML of the element')
.alias('l', 'loglevel')
.default('l', 'silent')
.demand(1)
.argv
[ query ] = argv._
query = query.trim()
attribute = argv.attribute?.trim()
outer = argv.outer
loglevel = argv.loglevel.trim()
{
query
attribute
outer
loglevel
}
# fetchHTML
# ---------
#
# This is essentially a simple logging wrapper around request.get
fetchHTML = (url, cb) ->
request.get url, (err, response, body) ->
if err?
log.error 'http', "[#{response.statusCode}] #{err}"
return cb err, response, body
log.verbose 'http', "[#{response.statusCode}] Fetched '#{url}' successfully."
cb err, response, body
# domParse
# --------
#
# This is essentially a simple logging wrapper around jsdom.env
domParse = (html, libs, cb) ->
opts =
html: html
scripts: libs
parsingMode: 'xml'
done: (err, window) ->
if err?
log.error 'parse', "Error processing DOM with libs: [ '#{libs.join '\', \''}' ]. (#{err})"
return cb err, window
log.verbose 'parse', "DOM parse successful with libs: [ '#{libs.join '\', \''}' ]."
cb err, window
jsdom.env opts
# elToString
# ----------
#
# depending on the query given by the user, we will be getting an html element,
# or a plain old string. This should be handled elegantly.
elToString = (el, attribute, outer) ->
ret = ''
if attribute
log.verbose 'elToString', "Fetching #{attribute} from '#{el}'."
ret = el.getAttribute(attribute)
else if outer && el.outerHTML?
log.verbose 'elToString', "Fetching outerHTML from '#{el}'."
ret = el.outerHTML.trim()
else if el.innerHTML?
log.verbose 'elToString', "Fetching innerHTML from '#{el}'."
ret = el.innerHTML.trim()
else if el.textContent?
log.verbose 'elToString', "Fetching textContent from '#{el}'."
ret = el.textContent.trim()
else if Object.prototype.toString.call(el) is '[object String]'
log.verbose 'elToString', "Content already a string: '#{el}'."
ret = el.trim()
# replace newlines with spaces so we get one result per line
ret.replace(/\r?\n|\r/g, ' ')
# executeXPath
# ---------------
#
# Executes the given xpath query against the given window.document object using
# google's wicked fast xpath
executeXPath = (query, window) ->
unless window.wgxpath?
log.error 'xpath', 'xpath selector engine not found!'
return []
window.wgxpath.install()
document = window.document
els = []
log.verbose 'xpath', "Evaluating query '#{query}'."
result = document.evaluate(query, document, null, 7, null)
len = result.snapshotLength
log.verbose 'xpath', "Found #{len} match(es)."
if len > 0
els = (result.snapshotItem(i) for i in [0...len])
els
# executeCSSQuery
# ---------------
#
# Executes the given css query against the given window.document object using
# window.qwery
executeCSSQuery = (query, window) ->
unless window.qwery?
log.error 'css', 'qwery selector engine not found!'
return []
log.verbose 'css', "Evaluating query '#{query}'."
els = window.qwery query
log.verbose 'css', "Found #{els.length} match(es)."
els
# Get the args
{ query, attribute, outer, loglevel } = getArgs()
# Set our default logging level, only log items at this level and higher.
# The default loglevel is err - only show errors
log.level = loglevel
process.stdin.setEncoding 'utf8'
doc = ""
process.stdin.on 'readable', () ->
chunk = process.stdin.read()
if chunk != null
doc += chunk
process.stdin.on 'end', () ->
if err?
# If we hit an error while fetching the page, return 1 so that our
# command can be chained in the shell.
return process.exit 1
# Determin if we're using xpath or not, and set the libs appropriately.
xpath = useXPath query
libs = CSS_LIBS
if xpath
libs = XPATH_LIBS
domParse doc, libs, (err, window) ->
if err?
# If we hit an error while parsing the document, return 1 so that our
# command can be chained in the shell.
return process.exit(1)
if xpath
results = executeXPath(query, window)
else
results = executeCSSQuery(query, window)
if not results or results.length < 1
# We had no results, so go ahead and exit 1 so that our command can be
# chained in the shell.
return process.exit(1)
strings = []
for result in results
strings.push elToString(result, attribute, outer)
# Print the strings to stdout in such a way that they're all
# separated by a newline, but the last line doesn't end in one.
process.stdout.write strings.join '\n'
| true | fs = require 'fs'
path = require 'path'
log = require 'verbalize'
optimist = require 'optimist'
jsdom = require 'jsdom'
log = require 'npmlog'
path = require 'path'
# modified from cli-scrape
#
# (c) 2012 PI:NAME:<NAME>END_PI & 2015 PI:NAME:<NAME>END_PI
#
# cli-scrape may be freely distributed under the MIT license.
# For all details and documentation:
#
# http://pthrasher.github.com/cli-scrape/
# Usage
# -----
#
# `cat file.html | bo 'p:first-child'`
#
# `wget http://whatthecommit.com/ | bo '//p[0]'`
# use strict, yo!
'use strict'
absPath = (relPath) ->
path.resolve __dirname, relPath
# These are the libraries used to do the parsing on the page. If the query is
# an xpath query, XPATH\_LIBS is used. If not, CSS\_LIBS is used instead.
XPATH_LIBS = [absPath('wgxpath.install.js')]
CSS_LIBS = [absPath('qwery.min.js', 'qwery-pseudos.min.js')]
# useXPath
# --------
#
# Determine whether or not the query passed in is an xpath query or a css query
useXPath = (query) ->
query.indexOf('/') is 0
# getArgs
# -------
#
# Wrapper for optimise that allows us to clean up the args as they come in.
getArgs = ->
usage = 'Usage: bo [xpath|css]\n\n' +
'If no attribute is given, innerHTML, text or a plain string\n' +
'will be returned (depending on what is matched)'
argv = optimist
.usage(usage)
.alias('a', 'attribute')
.default('a', undefined)
.alias('o', 'outer')
.describe('o', 'get outer XML of the element')
.alias('l', 'loglevel')
.default('l', 'silent')
.demand(1)
.argv
[ query ] = argv._
query = query.trim()
attribute = argv.attribute?.trim()
outer = argv.outer
loglevel = argv.loglevel.trim()
{
query
attribute
outer
loglevel
}
# fetchHTML
# ---------
#
# This is essentially a simple logging wrapper around request.get
fetchHTML = (url, cb) ->
request.get url, (err, response, body) ->
if err?
log.error 'http', "[#{response.statusCode}] #{err}"
return cb err, response, body
log.verbose 'http', "[#{response.statusCode}] Fetched '#{url}' successfully."
cb err, response, body
# domParse
# --------
#
# This is essentially a simple logging wrapper around jsdom.env
domParse = (html, libs, cb) ->
opts =
html: html
scripts: libs
parsingMode: 'xml'
done: (err, window) ->
if err?
log.error 'parse', "Error processing DOM with libs: [ '#{libs.join '\', \''}' ]. (#{err})"
return cb err, window
log.verbose 'parse', "DOM parse successful with libs: [ '#{libs.join '\', \''}' ]."
cb err, window
jsdom.env opts
# elToString
# ----------
#
# depending on the query given by the user, we will be getting an html element,
# or a plain old string. This should be handled elegantly.
elToString = (el, attribute, outer) ->
ret = ''
if attribute
log.verbose 'elToString', "Fetching #{attribute} from '#{el}'."
ret = el.getAttribute(attribute)
else if outer && el.outerHTML?
log.verbose 'elToString', "Fetching outerHTML from '#{el}'."
ret = el.outerHTML.trim()
else if el.innerHTML?
log.verbose 'elToString', "Fetching innerHTML from '#{el}'."
ret = el.innerHTML.trim()
else if el.textContent?
log.verbose 'elToString', "Fetching textContent from '#{el}'."
ret = el.textContent.trim()
else if Object.prototype.toString.call(el) is '[object String]'
log.verbose 'elToString', "Content already a string: '#{el}'."
ret = el.trim()
# replace newlines with spaces so we get one result per line
ret.replace(/\r?\n|\r/g, ' ')
# executeXPath
# ---------------
#
# Executes the given xpath query against the given window.document object using
# google's wicked fast xpath
executeXPath = (query, window) ->
unless window.wgxpath?
log.error 'xpath', 'xpath selector engine not found!'
return []
window.wgxpath.install()
document = window.document
els = []
log.verbose 'xpath', "Evaluating query '#{query}'."
result = document.evaluate(query, document, null, 7, null)
len = result.snapshotLength
log.verbose 'xpath', "Found #{len} match(es)."
if len > 0
els = (result.snapshotItem(i) for i in [0...len])
els
# executeCSSQuery
# ---------------
#
# Executes the given css query against the given window.document object using
# window.qwery
executeCSSQuery = (query, window) ->
unless window.qwery?
log.error 'css', 'qwery selector engine not found!'
return []
log.verbose 'css', "Evaluating query '#{query}'."
els = window.qwery query
log.verbose 'css', "Found #{els.length} match(es)."
els
# Get the args
{ query, attribute, outer, loglevel } = getArgs()
# Set our default logging level, only log items at this level and higher.
# The default loglevel is err - only show errors
log.level = loglevel
process.stdin.setEncoding 'utf8'
doc = ""
process.stdin.on 'readable', () ->
chunk = process.stdin.read()
if chunk != null
doc += chunk
process.stdin.on 'end', () ->
if err?
# If we hit an error while fetching the page, return 1 so that our
# command can be chained in the shell.
return process.exit 1
# Determin if we're using xpath or not, and set the libs appropriately.
xpath = useXPath query
libs = CSS_LIBS
if xpath
libs = XPATH_LIBS
domParse doc, libs, (err, window) ->
if err?
# If we hit an error while parsing the document, return 1 so that our
# command can be chained in the shell.
return process.exit(1)
if xpath
results = executeXPath(query, window)
else
results = executeCSSQuery(query, window)
if not results or results.length < 1
# We had no results, so go ahead and exit 1 so that our command can be
# chained in the shell.
return process.exit(1)
strings = []
for result in results
strings.push elToString(result, attribute, outer)
# Print the strings to stdout in such a way that they're all
# separated by a newline, but the last line doesn't end in one.
process.stdout.write strings.join '\n'
|
[
{
"context": "nt-size: 12.3199996948242px; text-align: center;\">Miriam McCormick</td>\n</tr>\n</tbody>\n</table>\n<p><span style=\"colo",
"end": 6060,
"score": 0.9998807907104492,
"start": 6044,
"tag": "NAME",
"value": "Miriam McCormick"
}
] | minitests/html.coffee | edwinksl/zotero-better-bibtex | 0 | html = """
<p><span>When I first had a « student » tell me that she doesn’t ‹ believe in evolution › I was at a loss of how to respond. To
me, that sounded like someone telling me that she didn’t believe in gravity. It seemed both irrational and wrong.
Experiences like this are common; we think that one’s actual belief can deviate from how one “ought to believe”. The
dominant view among contemporary philosophers is that any belief formed against the evidence is impermissible. On such a
view, which I call “evidentialism,” it is easy to diagnosis what is wrong with my student’s belief. I use the term
“pragmatism” to refer to the view that some non-evidentially based beliefs are permissible. A central aim of this book
is to defend pragmatism. One challenge to the pragmatist view I defend is to show how we can distinguish pernicious
non-evidentially based beliefs from those that are permissible.<span
class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino,
serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing:
normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" /><a
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" name="more"></a><span
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">Most contemporary theorists think that to appeal to the norms of agency in thinking about how to believe is to
make a category mistake. In believing we aim to gain truth or avoid falsehood, so when we believe for reasons that are
opposed to these aims, we can be criticized for violating these norms. While there is some disagreement about the
precise relationship between belief and truth, very few fundamentally question the view that beliefs require their own
separate ethics. The central contention of this book is that they do not; that, instead the ethics of belief and action
are unified. Nonetheless I think that in most cases, we should not deviate from evidentialist principles because
following these principles leads us to truth and knowledge. I argue, however, that the value of truth and knowledge is
instrumental; having true beliefs helps us achieve our goals, flourish, and be excellent human beings. It is thus
possible that some beliefs can help us achieve these goals independently of their truth-value, or of their being
evidentially based.<span class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family:
Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant:
normal; font-weight: normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start;
text-indent: 0px; text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width:
0px; background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype',
Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal;
letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /></p>
<table class="tr-caption-container" style="padding: 8px; margin-bottom: 0.5em; position: relative; border: 1px solid
transparent; box-shadow: rgba(0, 0, 0, 0.0980392) 1px 1px 5px; color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; float: left; margin-right: 1em;
text-align: left; background: #882222;" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td style="text-align: center;"><a style="text-decoration: none; color: #f1c232; clear: left; margin-bottom: 1em;
margin-left: auto; margin-right: auto;"
href="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"><img
style="border: none; position: relative; padding: 0px; box-shadow: rgba(0, 0, 0, 0.0980392) 0px 0px 0px; background:
transparent;" src="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"
border="0" alt="" /></a></td>
</tr>
<tr>
<td class="tr-caption" style="font-size: 12.3199996948242px; text-align: center;">Miriam McCormick</td>
</tr>
</tbody>
</table>
<p><span style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">This book is divided into two main parts, “Doxastic Norms” and “Doxastic Responsibility.” In Part I, I review
and critique a number of defenses of evidentialism before turning to my argument that the norms for belief are
ultimately practical. Those who oppose this pragmatist conception of doxastic norms will point out that, given the
involuntary nature of belief, we cannot believe for practical reasons. This is why a discussion of doxastic norms is
intertwined with the issue of doxastic control and responsibility. Part II focuses on these issues; I argue that beliefs
are products of our agency, something we have an active role in shaping and maintaining. The two parts of the book are
two sides of the same coin. That the norms of agency apply to both belief and action demands that we can make sense of
doxastic agency. And that we can exercise control in the doxastic realm naturally leads to the view that the same norms
guide both action and belief.</span></p>
"""
console.log(LaTeX.text2latex(html))
| 54977 | html = """
<p><span>When I first had a « student » tell me that she doesn’t ‹ believe in evolution › I was at a loss of how to respond. To
me, that sounded like someone telling me that she didn’t believe in gravity. It seemed both irrational and wrong.
Experiences like this are common; we think that one’s actual belief can deviate from how one “ought to believe”. The
dominant view among contemporary philosophers is that any belief formed against the evidence is impermissible. On such a
view, which I call “evidentialism,” it is easy to diagnosis what is wrong with my student’s belief. I use the term
“pragmatism” to refer to the view that some non-evidentially based beliefs are permissible. A central aim of this book
is to defend pragmatism. One challenge to the pragmatist view I defend is to show how we can distinguish pernicious
non-evidentially based beliefs from those that are permissible.<span
class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino,
serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing:
normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" /><a
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" name="more"></a><span
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">Most contemporary theorists think that to appeal to the norms of agency in thinking about how to believe is to
make a category mistake. In believing we aim to gain truth or avoid falsehood, so when we believe for reasons that are
opposed to these aims, we can be criticized for violating these norms. While there is some disagreement about the
precise relationship between belief and truth, very few fundamentally question the view that beliefs require their own
separate ethics. The central contention of this book is that they do not; that, instead the ethics of belief and action
are unified. Nonetheless I think that in most cases, we should not deviate from evidentialist principles because
following these principles leads us to truth and knowledge. I argue, however, that the value of truth and knowledge is
instrumental; having true beliefs helps us achieve our goals, flourish, and be excellent human beings. It is thus
possible that some beliefs can help us achieve these goals independently of their truth-value, or of their being
evidentially based.<span class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family:
Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant:
normal; font-weight: normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start;
text-indent: 0px; text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width:
0px; background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype',
Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal;
letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /></p>
<table class="tr-caption-container" style="padding: 8px; margin-bottom: 0.5em; position: relative; border: 1px solid
transparent; box-shadow: rgba(0, 0, 0, 0.0980392) 1px 1px 5px; color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; float: left; margin-right: 1em;
text-align: left; background: #882222;" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td style="text-align: center;"><a style="text-decoration: none; color: #f1c232; clear: left; margin-bottom: 1em;
margin-left: auto; margin-right: auto;"
href="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"><img
style="border: none; position: relative; padding: 0px; box-shadow: rgba(0, 0, 0, 0.0980392) 0px 0px 0px; background:
transparent;" src="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"
border="0" alt="" /></a></td>
</tr>
<tr>
<td class="tr-caption" style="font-size: 12.3199996948242px; text-align: center;"><NAME></td>
</tr>
</tbody>
</table>
<p><span style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">This book is divided into two main parts, “Doxastic Norms” and “Doxastic Responsibility.” In Part I, I review
and critique a number of defenses of evidentialism before turning to my argument that the norms for belief are
ultimately practical. Those who oppose this pragmatist conception of doxastic norms will point out that, given the
involuntary nature of belief, we cannot believe for practical reasons. This is why a discussion of doxastic norms is
intertwined with the issue of doxastic control and responsibility. Part II focuses on these issues; I argue that beliefs
are products of our agency, something we have an active role in shaping and maintaining. The two parts of the book are
two sides of the same coin. That the norms of agency apply to both belief and action demands that we can make sense of
doxastic agency. And that we can exercise control in the doxastic realm naturally leads to the view that the same norms
guide both action and belief.</span></p>
"""
console.log(LaTeX.text2latex(html))
| true | html = """
<p><span>When I first had a « student » tell me that she doesn’t ‹ believe in evolution › I was at a loss of how to respond. To
me, that sounded like someone telling me that she didn’t believe in gravity. It seemed both irrational and wrong.
Experiences like this are common; we think that one’s actual belief can deviate from how one “ought to believe”. The
dominant view among contemporary philosophers is that any belief formed against the evidence is impermissible. On such a
view, which I call “evidentialism,” it is easy to diagnosis what is wrong with my student’s belief. I use the term
“pragmatism” to refer to the view that some non-evidentially based beliefs are permissible. A central aim of this book
is to defend pragmatism. One challenge to the pragmatist view I defend is to show how we can distinguish pernicious
non-evidentially based beliefs from those that are permissible.<span
class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino,
serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing:
normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" /><a
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; background-color: #882222;" name="more"></a><span
style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">Most contemporary theorists think that to appeal to the norms of agency in thinking about how to believe is to
make a category mistake. In believing we aim to gain truth or avoid falsehood, so when we believe for reasons that are
opposed to these aims, we can be criticized for violating these norms. While there is some disagreement about the
precise relationship between belief and truth, very few fundamentally question the view that beliefs require their own
separate ethics. The central contention of this book is that they do not; that, instead the ethics of belief and action
are unified. Nonetheless I think that in most cases, we should not deviate from evidentialist principles because
following these principles leads us to truth and knowledge. I argue, however, that the value of truth and knowledge is
instrumental; having true beliefs helps us achieve our goals, flourish, and be excellent human beings. It is thus
possible that some beliefs can help us achieve these goals independently of their truth-value, or of their being
evidentially based.<span class="Apple-converted-space"> </span></span><br style="color: #ffffff; font-family:
Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant:
normal; font-weight: normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start;
text-indent: 0px; text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width:
0px; background-color: #882222;" /><br style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype',
Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal;
letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px;
text-transform: none; white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px;
background-color: #882222;" /></p>
<table class="tr-caption-container" style="padding: 8px; margin-bottom: 0.5em; position: relative; border: 1px solid
transparent; box-shadow: rgba(0, 0, 0, 0.0980392) 1px 1px 5px; color: #ffffff; font-family: Georgia, Utopia, 'Palatino
Linotype', Palatino, serif; font-size: 15.3999996185303px; font-style: normal; font-variant: normal; font-weight:
normal; letter-spacing: normal; line-height: 21.5599994659424px; orphans: auto; text-indent: 0px; text-transform: none;
white-space: normal; widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; float: left; margin-right: 1em;
text-align: left; background: #882222;" border="0" cellspacing="0" cellpadding="0">
<tbody>
<tr>
<td style="text-align: center;"><a style="text-decoration: none; color: #f1c232; clear: left; margin-bottom: 1em;
margin-left: auto; margin-right: auto;"
href="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"><img
style="border: none; position: relative; padding: 0px; box-shadow: rgba(0, 0, 0, 0.0980392) 0px 0px 0px; background:
transparent;" src="http://2.bp.blogspot.com/-4ApvglmDwR0/VVxtSFeNtPI/AAAAAAAAAxM/vfr4sDGYbTY/s1600/mccormick_miriam.jpg"
border="0" alt="" /></a></td>
</tr>
<tr>
<td class="tr-caption" style="font-size: 12.3199996948242px; text-align: center;">PI:NAME:<NAME>END_PI</td>
</tr>
</tbody>
</table>
<p><span style="color: #ffffff; font-family: Georgia, Utopia, 'Palatino Linotype', Palatino, serif; font-size:
15.3999996185303px; font-style: normal; font-variant: normal; font-weight: normal; letter-spacing: normal; line-height:
21.5599994659424px; orphans: auto; text-align: start; text-indent: 0px; text-transform: none; white-space: normal;
widows: 1; word-spacing: 0px; -webkit-text-stroke-width: 0px; display: inline !important; float: none; background-color:
#882222;">This book is divided into two main parts, “Doxastic Norms” and “Doxastic Responsibility.” In Part I, I review
and critique a number of defenses of evidentialism before turning to my argument that the norms for belief are
ultimately practical. Those who oppose this pragmatist conception of doxastic norms will point out that, given the
involuntary nature of belief, we cannot believe for practical reasons. This is why a discussion of doxastic norms is
intertwined with the issue of doxastic control and responsibility. Part II focuses on these issues; I argue that beliefs
are products of our agency, something we have an active role in shaping and maintaining. The two parts of the book are
two sides of the same coin. That the norms of agency apply to both belief and action demands that we can make sense of
doxastic agency. And that we can exercise control in the doxastic realm naturally leads to the view that the same norms
guide both action and belief.</span></p>
"""
console.log(LaTeX.text2latex(html))
|
[
{
"context": "\n\nspotifyApi = new SpotifyWebApi ->\n clientId : '0f39154551684e12b45ba72e18d830b6',\n clientSecret : '57cb935d95f14c1ab427ff62eb37f",
"end": 236,
"score": 0.9981421232223511,
"start": 204,
"tag": "KEY",
"value": "0f39154551684e12b45ba72e18d830b6"
},
{
"context": "9... | lib/spotify.coffee | jac32/AtomicSpotify | 0 | SpotifyWebApi = require 'spotify-web-api-node'
control = require './control'
SpotifyView = require './spotify-view'
{CompositeDisposable} = require 'atom'
spotifyApi = new SpotifyWebApi ->
clientId : '0f39154551684e12b45ba72e18d830b6',
clientSecret : '57cb935d95f14c1ab427ff62eb37f148',
redirectUri : 'NA'
module.exports = Spotify =
subscriptions: null
myPackageView: null
leftPanel: null
#leftPanel = atom.workspace.addLeftPanel(item: spotifyView.getElement(), visible: true) kendrick
activate: (state) ->
@spotifyView = new SpotifyView(state.spotifyViewState)
@searchPanel = atom.workspace.addLeftPanel(item: @spotifyView.getElement(), visible: false)
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:current-track': => control.currentTrack()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-pause': => control.playPause()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-next': => control.playNext()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-prev': => control.playPrev()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:track-duration': => control.trackDuration()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:toggle-verbosity': => control.toggleVebosity()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-selection': => playSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:search-selection': => searchSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:increase-volume': => control.incVol()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:decrease-volume': => control.decVol()
setInterval control.checkTrack, 1000
deactivate: ->
@subscriptions.dispose()
@spotifyView.destroy()
playSelection = ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
control.playUri selection
searchSelection = () ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
searchTracks(selection).then (tracks) ->
searchAlbums(selection).then (albums) ->
searchPlayLists(selection).then (playlists) ->
Spotify.spotifyView.setData(tracks, albums, playlists)
unless Spotify.searchPanel.isVisible()
Spotify.searchPanel.show()
track = tracks.tracks.items[0]
#control.playUri track.uri, track.album.uri
searchTracks = (query) ->
spotifyApi.searchTracks(query).then (data) ->
return data.body
, (err) ->
console.log 'Track search failed', err
searchAlbums = (query) ->
spotifyApi.searchAlbums(query).then (data) ->
return data.body
, (err) ->
console.log 'Album search failed', err
searchPlayLists = (query) ->
spotifyApi.searchPlaylists(query).then (data) ->
return data.body
, (err) ->
console.log 'Playlist search failed', err
| 85320 | SpotifyWebApi = require 'spotify-web-api-node'
control = require './control'
SpotifyView = require './spotify-view'
{CompositeDisposable} = require 'atom'
spotifyApi = new SpotifyWebApi ->
clientId : '<KEY>',
clientSecret : '<KEY>',
redirectUri : 'NA'
module.exports = Spotify =
subscriptions: null
myPackageView: null
leftPanel: null
#leftPanel = atom.workspace.addLeftPanel(item: spotifyView.getElement(), visible: true) kendrick
activate: (state) ->
@spotifyView = new SpotifyView(state.spotifyViewState)
@searchPanel = atom.workspace.addLeftPanel(item: @spotifyView.getElement(), visible: false)
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:current-track': => control.currentTrack()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-pause': => control.playPause()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-next': => control.playNext()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-prev': => control.playPrev()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:track-duration': => control.trackDuration()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:toggle-verbosity': => control.toggleVebosity()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-selection': => playSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:search-selection': => searchSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:increase-volume': => control.incVol()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:decrease-volume': => control.decVol()
setInterval control.checkTrack, 1000
deactivate: ->
@subscriptions.dispose()
@spotifyView.destroy()
playSelection = ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
control.playUri selection
searchSelection = () ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
searchTracks(selection).then (tracks) ->
searchAlbums(selection).then (albums) ->
searchPlayLists(selection).then (playlists) ->
Spotify.spotifyView.setData(tracks, albums, playlists)
unless Spotify.searchPanel.isVisible()
Spotify.searchPanel.show()
track = tracks.tracks.items[0]
#control.playUri track.uri, track.album.uri
searchTracks = (query) ->
spotifyApi.searchTracks(query).then (data) ->
return data.body
, (err) ->
console.log 'Track search failed', err
searchAlbums = (query) ->
spotifyApi.searchAlbums(query).then (data) ->
return data.body
, (err) ->
console.log 'Album search failed', err
searchPlayLists = (query) ->
spotifyApi.searchPlaylists(query).then (data) ->
return data.body
, (err) ->
console.log 'Playlist search failed', err
| true | SpotifyWebApi = require 'spotify-web-api-node'
control = require './control'
SpotifyView = require './spotify-view'
{CompositeDisposable} = require 'atom'
spotifyApi = new SpotifyWebApi ->
clientId : 'PI:KEY:<KEY>END_PI',
clientSecret : 'PI:KEY:<KEY>END_PI',
redirectUri : 'NA'
module.exports = Spotify =
subscriptions: null
myPackageView: null
leftPanel: null
#leftPanel = atom.workspace.addLeftPanel(item: spotifyView.getElement(), visible: true) kendrick
activate: (state) ->
@spotifyView = new SpotifyView(state.spotifyViewState)
@searchPanel = atom.workspace.addLeftPanel(item: @spotifyView.getElement(), visible: false)
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:current-track': => control.currentTrack()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-pause': => control.playPause()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-next': => control.playNext()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-prev': => control.playPrev()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:track-duration': => control.trackDuration()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:toggle-verbosity': => control.toggleVebosity()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:play-selection': => playSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:search-selection': => searchSelection()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:increase-volume': => control.incVol()
@subscriptions.add atom.commands.add 'atom-workspace', 'spotify:decrease-volume': => control.decVol()
setInterval control.checkTrack, 1000
deactivate: ->
@subscriptions.dispose()
@spotifyView.destroy()
playSelection = ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
control.playUri selection
searchSelection = () ->
if editor = atom.workspace.getActiveTextEditor()
selection = editor.getSelectedText()
searchTracks(selection).then (tracks) ->
searchAlbums(selection).then (albums) ->
searchPlayLists(selection).then (playlists) ->
Spotify.spotifyView.setData(tracks, albums, playlists)
unless Spotify.searchPanel.isVisible()
Spotify.searchPanel.show()
track = tracks.tracks.items[0]
#control.playUri track.uri, track.album.uri
searchTracks = (query) ->
spotifyApi.searchTracks(query).then (data) ->
return data.body
, (err) ->
console.log 'Track search failed', err
searchAlbums = (query) ->
spotifyApi.searchAlbums(query).then (data) ->
return data.body
, (err) ->
console.log 'Album search failed', err
searchPlayLists = (query) ->
spotifyApi.searchPlaylists(query).then (data) ->
return data.body
, (err) ->
console.log 'Playlist search failed', err
|
[
{
"context": " {for step, i in @props.steps\n step._key ?= Math.random()\n <MediaCard key={step._key} class",
"end": 3580,
"score": 0.6857658624649048,
"start": 3576,
"tag": "KEY",
"value": "Math"
},
{
"context": "step, i in @props.steps\n step._key ?= Math.rando... | app/lib/tutorial.cjsx | alexbfree/Panoptes-Front-End | 0 | React = require 'react'
Dialog = require 'modal-form/dialog'
StepThrough = require '../components/step-through'
MediaCard = require '../components/media-card'
{Markdown} = (require 'markdownz').default
apiClient = require 'panoptes-client/lib/api-client'
completedThisSession = {}
window?.tutorialsCompletedThisSession = completedThisSession
module.exports = React.createClass
displayName: 'Tutorial'
statics:
find: ({workflow}) ->
# Prefer fetching the tutorial for the workflow, if a workflow is given.
if workflow?
apiClient.type('tutorials').get workflow_id: workflow.id
.then (tutorials) ->
# Backwards compatibility for null kind values. We assume these are standard tutorials.
onlyStandardTutorials = tutorials.filter (tutorial) ->
tutorial.kind in ['tutorial', null]
onlyStandardTutorials[0]
else
Promise.resolve()
startIfNecessary: ({workflow, user, preferences}) ->
@find({workflow}).then (tutorial) =>
if tutorial?
@checkIfCompleted(tutorial, user, preferences).then (completed) =>
unless completed
@start tutorial, user
checkIfCompleted: (tutorial, user, preferences) ->
if user?
window.prefs = preferences
Promise.resolve preferences?.preferences?.tutorials_completed_at?[tutorial.id]?
else
Promise.resolve completedThisSession[tutorial.id]?
start: (tutorial, user) ->
TutorialComponent = this
if tutorial.steps.length isnt 0
awaitTutorialMedia = tutorial.get 'attached_images'
.catch ->
# Checking for attached images throws if there are none.
[]
.then (mediaResources) ->
mediaByID = {}
for mediaResource in mediaResources
mediaByID[mediaResource.id] = mediaResource
mediaByID
awaitTutorialMedia.then (mediaByID) =>
Dialog.alert(<TutorialComponent steps={tutorial.steps} media={mediaByID} />, {
className: 'tutorial-dialog',
required: true,
closeButton: true
})
.catch =>
null # We don't really care if the user canceled or completed the tutorial.
.then =>
@markComplete tutorial, user
markComplete: (tutorial, user) ->
now = new Date().toISOString()
completedThisSession[tutorial.id] = now
if user?
tutorial.get('project').then (project) ->
user.get('project_preferences', project_id: project.id).then ([projectPreferences]) ->
projectPreferences ?= apiClient.type('project_preferences').create
links:
project: project.id
preferences: {}
# Build this manually. Having an index (even as a strings) keys creates an array.
projectPreferences.preferences ?= {}
projectPreferences.preferences.tutorials_completed_at ?= {}
projectPreferences.update "preferences.tutorials_completed_at.#{tutorial.id}": now
projectPreferences.save()
propTypes:
steps: React.PropTypes.arrayOf React.PropTypes.shape
media: React.PropTypes.string
content: React.PropTypes.string
getDefaultProps: ->
steps: []
media: {}
render: ->
isIE = 'ActiveXObject' of window
if isIE
tutorialStyle = height: '85vh'
<StepThrough ref="stepThrough" className="tutorial-steps" style={tutorialStyle}>
{for step, i in @props.steps
step._key ?= Math.random()
<MediaCard key={step._key} className="tutorial-step" src={@props.media[step.media]?.src}>
<Markdown>{step.content}</Markdown>
<hr />
<p style={textAlign: 'center'}>
{if i is @props.steps.length - 1
<button type="submit" className="major-button">Let’s go!</button>
else
<button type="button" className="standard-button" onClick={@handleNextClick}>Continue</button>}
</p>
</MediaCard>}
</StepThrough>
handleNextClick: ->
@refs.stepThrough.goNext()
| 48846 | React = require 'react'
Dialog = require 'modal-form/dialog'
StepThrough = require '../components/step-through'
MediaCard = require '../components/media-card'
{Markdown} = (require 'markdownz').default
apiClient = require 'panoptes-client/lib/api-client'
completedThisSession = {}
window?.tutorialsCompletedThisSession = completedThisSession
module.exports = React.createClass
displayName: 'Tutorial'
statics:
find: ({workflow}) ->
# Prefer fetching the tutorial for the workflow, if a workflow is given.
if workflow?
apiClient.type('tutorials').get workflow_id: workflow.id
.then (tutorials) ->
# Backwards compatibility for null kind values. We assume these are standard tutorials.
onlyStandardTutorials = tutorials.filter (tutorial) ->
tutorial.kind in ['tutorial', null]
onlyStandardTutorials[0]
else
Promise.resolve()
startIfNecessary: ({workflow, user, preferences}) ->
@find({workflow}).then (tutorial) =>
if tutorial?
@checkIfCompleted(tutorial, user, preferences).then (completed) =>
unless completed
@start tutorial, user
checkIfCompleted: (tutorial, user, preferences) ->
if user?
window.prefs = preferences
Promise.resolve preferences?.preferences?.tutorials_completed_at?[tutorial.id]?
else
Promise.resolve completedThisSession[tutorial.id]?
start: (tutorial, user) ->
TutorialComponent = this
if tutorial.steps.length isnt 0
awaitTutorialMedia = tutorial.get 'attached_images'
.catch ->
# Checking for attached images throws if there are none.
[]
.then (mediaResources) ->
mediaByID = {}
for mediaResource in mediaResources
mediaByID[mediaResource.id] = mediaResource
mediaByID
awaitTutorialMedia.then (mediaByID) =>
Dialog.alert(<TutorialComponent steps={tutorial.steps} media={mediaByID} />, {
className: 'tutorial-dialog',
required: true,
closeButton: true
})
.catch =>
null # We don't really care if the user canceled or completed the tutorial.
.then =>
@markComplete tutorial, user
markComplete: (tutorial, user) ->
now = new Date().toISOString()
completedThisSession[tutorial.id] = now
if user?
tutorial.get('project').then (project) ->
user.get('project_preferences', project_id: project.id).then ([projectPreferences]) ->
projectPreferences ?= apiClient.type('project_preferences').create
links:
project: project.id
preferences: {}
# Build this manually. Having an index (even as a strings) keys creates an array.
projectPreferences.preferences ?= {}
projectPreferences.preferences.tutorials_completed_at ?= {}
projectPreferences.update "preferences.tutorials_completed_at.#{tutorial.id}": now
projectPreferences.save()
propTypes:
steps: React.PropTypes.arrayOf React.PropTypes.shape
media: React.PropTypes.string
content: React.PropTypes.string
getDefaultProps: ->
steps: []
media: {}
render: ->
isIE = 'ActiveXObject' of window
if isIE
tutorialStyle = height: '85vh'
<StepThrough ref="stepThrough" className="tutorial-steps" style={tutorialStyle}>
{for step, i in @props.steps
step._key ?= <KEY>.<KEY>()
<MediaCard key={step._key} className="tutorial-step" src={@props.media[step.media]?.src}>
<Markdown>{step.content}</Markdown>
<hr />
<p style={textAlign: 'center'}>
{if i is @props.steps.length - 1
<button type="submit" className="major-button">Let’s go!</button>
else
<button type="button" className="standard-button" onClick={@handleNextClick}>Continue</button>}
</p>
</MediaCard>}
</StepThrough>
handleNextClick: ->
@refs.stepThrough.goNext()
| true | React = require 'react'
Dialog = require 'modal-form/dialog'
StepThrough = require '../components/step-through'
MediaCard = require '../components/media-card'
{Markdown} = (require 'markdownz').default
apiClient = require 'panoptes-client/lib/api-client'
completedThisSession = {}
window?.tutorialsCompletedThisSession = completedThisSession
module.exports = React.createClass
displayName: 'Tutorial'
statics:
find: ({workflow}) ->
# Prefer fetching the tutorial for the workflow, if a workflow is given.
if workflow?
apiClient.type('tutorials').get workflow_id: workflow.id
.then (tutorials) ->
# Backwards compatibility for null kind values. We assume these are standard tutorials.
onlyStandardTutorials = tutorials.filter (tutorial) ->
tutorial.kind in ['tutorial', null]
onlyStandardTutorials[0]
else
Promise.resolve()
startIfNecessary: ({workflow, user, preferences}) ->
@find({workflow}).then (tutorial) =>
if tutorial?
@checkIfCompleted(tutorial, user, preferences).then (completed) =>
unless completed
@start tutorial, user
checkIfCompleted: (tutorial, user, preferences) ->
if user?
window.prefs = preferences
Promise.resolve preferences?.preferences?.tutorials_completed_at?[tutorial.id]?
else
Promise.resolve completedThisSession[tutorial.id]?
start: (tutorial, user) ->
TutorialComponent = this
if tutorial.steps.length isnt 0
awaitTutorialMedia = tutorial.get 'attached_images'
.catch ->
# Checking for attached images throws if there are none.
[]
.then (mediaResources) ->
mediaByID = {}
for mediaResource in mediaResources
mediaByID[mediaResource.id] = mediaResource
mediaByID
awaitTutorialMedia.then (mediaByID) =>
Dialog.alert(<TutorialComponent steps={tutorial.steps} media={mediaByID} />, {
className: 'tutorial-dialog',
required: true,
closeButton: true
})
.catch =>
null # We don't really care if the user canceled or completed the tutorial.
.then =>
@markComplete tutorial, user
markComplete: (tutorial, user) ->
now = new Date().toISOString()
completedThisSession[tutorial.id] = now
if user?
tutorial.get('project').then (project) ->
user.get('project_preferences', project_id: project.id).then ([projectPreferences]) ->
projectPreferences ?= apiClient.type('project_preferences').create
links:
project: project.id
preferences: {}
# Build this manually. Having an index (even as a strings) keys creates an array.
projectPreferences.preferences ?= {}
projectPreferences.preferences.tutorials_completed_at ?= {}
projectPreferences.update "preferences.tutorials_completed_at.#{tutorial.id}": now
projectPreferences.save()
propTypes:
steps: React.PropTypes.arrayOf React.PropTypes.shape
media: React.PropTypes.string
content: React.PropTypes.string
getDefaultProps: ->
steps: []
media: {}
render: ->
isIE = 'ActiveXObject' of window
if isIE
tutorialStyle = height: '85vh'
<StepThrough ref="stepThrough" className="tutorial-steps" style={tutorialStyle}>
{for step, i in @props.steps
step._key ?= PI:KEY:<KEY>END_PI.PI:KEY:<KEY>END_PI()
<MediaCard key={step._key} className="tutorial-step" src={@props.media[step.media]?.src}>
<Markdown>{step.content}</Markdown>
<hr />
<p style={textAlign: 'center'}>
{if i is @props.steps.length - 1
<button type="submit" className="major-button">Let’s go!</button>
else
<button type="button" className="standard-button" onClick={@handleNextClick}>Continue</button>}
</p>
</MediaCard>}
</StepThrough>
handleNextClick: ->
@refs.stepThrough.goNext()
|
[
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>\nAll rights reserved.\n\nRedistri",
"end": 42,
"score": 0.9998423457145691,
"start": 24,
"tag": "NAME",
"value": "Alexander Cherniuk"
},
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmai... | library/membrane/events.coffee | ts33kr/granite | 6 | ###
Copyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
asciify = require "asciify"
connect = require "connect"
logger = require "winston"
uuid = require "node-uuid"
events = require "eventemitter2"
assert = require "assert"
colors = require "colors"
crypto = require "crypto"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{EOL} = require "os"
{format} = require "util"
{STATUS_CODES} = require "http"
{Barebones} = require "./skeleton"
{remote, external} = require "./remote"
{coffee} = require "./runtime"
# This is an internal abstract base class that is not intended for
# being used directly. The class is being used by the implementation
# of framework sysrems to segregate the implementation of the visual
# core from the convenience API targeted to be used by a developers.
# Please refer to the `Screenplay` class for actual implementation.
# This can also contain non-developer internals of the visual core.
module.exports.EventsToolkit = class EventsToolkit extends Barebones
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# Use this static method to mark up the remote/external methods
# that need to be automaticalled called, once everything is set
# on the client site and before the entrypoint gets executed. It
# is a get idea to place generic or setup code in the autocalls.
# Refer to `inlineAutocalls` method for params interpretation.
@autocall: (xparameters, xmethod) ->
assert identify = @identify().underline
method = _.find arguments, _.isFunction
parameters = _.find arguments, _.isObject
notFunction = "no function is passed in"
message = "Autocall sequence invoke in %s"
assert _.isFunction(method), notFunction
isRemote = _.isObject try method?.remote
method = external method unless isRemote
method.remote.autocall = parameters or {}
source = try method.remote.source or null
assert _.isString(source), "cant compile"
logger.silly message.grey, identify or 0
return method # return the implementation
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@awaiting: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Awaiting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.on(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The awaiting directive is a lot like `awaiting`, except the
# implementation will be called onky once, exactly at when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds an event using `once` with the context.
@onetimer: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Onetimer %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.once(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The exclusive directive is a lot like `awaiting`, except it
# removes all the event listeners that could have been binded
# to the event. And only once that has been done, it binds the
# supplied listener to the event, which will make it the only
# listener of that event at the point of a method invocation.
@exclusive: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Exclusive %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
k = "#{symbol}.removeAllListeners(%s)"
t = "#{symbol}.on(%s, #{symbol}.#{key})"
binder = format t, JSON.stringify event
killer = format k, JSON.stringify event
"(#{killer}; #{binder})".toString()
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the $root main service context
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@synchronize: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Rooting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
assert select = "$root".toString().toLowerCase()
return auto (symbol, key, context) -> _.once ->
b = "#{symbol}.#{key}.bind(#{symbol})"
t = "#{select}.on(%s, #{b})" # bind FN
return format t, JSON.stringify event
| 203970 | ###
Copyright (c) 2013, <NAME> <<EMAIL>>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
asciify = require "asciify"
connect = require "connect"
logger = require "winston"
uuid = require "node-uuid"
events = require "eventemitter2"
assert = require "assert"
colors = require "colors"
crypto = require "crypto"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{EOL} = require "os"
{format} = require "util"
{STATUS_CODES} = require "http"
{Barebones} = require "./skeleton"
{remote, external} = require "./remote"
{coffee} = require "./runtime"
# This is an internal abstract base class that is not intended for
# being used directly. The class is being used by the implementation
# of framework sysrems to segregate the implementation of the visual
# core from the convenience API targeted to be used by a developers.
# Please refer to the `Screenplay` class for actual implementation.
# This can also contain non-developer internals of the visual core.
module.exports.EventsToolkit = class EventsToolkit extends Barebones
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# Use this static method to mark up the remote/external methods
# that need to be automaticalled called, once everything is set
# on the client site and before the entrypoint gets executed. It
# is a get idea to place generic or setup code in the autocalls.
# Refer to `inlineAutocalls` method for params interpretation.
@autocall: (xparameters, xmethod) ->
assert identify = @identify().underline
method = _.find arguments, _.isFunction
parameters = _.find arguments, _.isObject
notFunction = "no function is passed in"
message = "Autocall sequence invoke in %s"
assert _.isFunction(method), notFunction
isRemote = _.isObject try method?.remote
method = external method unless isRemote
method.remote.autocall = parameters or {}
source = try method.remote.source or null
assert _.isString(source), "cant compile"
logger.silly message.grey, identify or 0
return method # return the implementation
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@awaiting: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Awaiting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.on(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The awaiting directive is a lot like `awaiting`, except the
# implementation will be called onky once, exactly at when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds an event using `once` with the context.
@onetimer: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Onetimer %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.once(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The exclusive directive is a lot like `awaiting`, except it
# removes all the event listeners that could have been binded
# to the event. And only once that has been done, it binds the
# supplied listener to the event, which will make it the only
# listener of that event at the point of a method invocation.
@exclusive: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Exclusive %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
k = "#{symbol}.removeAllListeners(%s)"
t = "#{symbol}.on(%s, #{symbol}.#{key})"
binder = format t, JSON.stringify event
killer = format k, JSON.stringify event
"(#{killer}; #{binder})".toString()
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the $root main service context
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@synchronize: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Rooting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
assert select = "$root".toString().toLowerCase()
return auto (symbol, key, context) -> _.once ->
b = "#{symbol}.#{key}.bind(#{symbol})"
t = "#{select}.on(%s, #{b})" # bind FN
return format t, JSON.stringify event
| true | ###
Copyright (c) 2013, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
asciify = require "asciify"
connect = require "connect"
logger = require "winston"
uuid = require "node-uuid"
events = require "eventemitter2"
assert = require "assert"
colors = require "colors"
crypto = require "crypto"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{EOL} = require "os"
{format} = require "util"
{STATUS_CODES} = require "http"
{Barebones} = require "./skeleton"
{remote, external} = require "./remote"
{coffee} = require "./runtime"
# This is an internal abstract base class that is not intended for
# being used directly. The class is being used by the implementation
# of framework sysrems to segregate the implementation of the visual
# core from the convenience API targeted to be used by a developers.
# Please refer to the `Screenplay` class for actual implementation.
# This can also contain non-developer internals of the visual core.
module.exports.EventsToolkit = class EventsToolkit extends Barebones
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# Use this static method to mark up the remote/external methods
# that need to be automaticalled called, once everything is set
# on the client site and before the entrypoint gets executed. It
# is a get idea to place generic or setup code in the autocalls.
# Refer to `inlineAutocalls` method for params interpretation.
@autocall: (xparameters, xmethod) ->
assert identify = @identify().underline
method = _.find arguments, _.isFunction
parameters = _.find arguments, _.isObject
notFunction = "no function is passed in"
message = "Autocall sequence invoke in %s"
assert _.isFunction(method), notFunction
isRemote = _.isObject try method?.remote
method = external method unless isRemote
method.remote.autocall = parameters or {}
source = try method.remote.source or null
assert _.isString(source), "cant compile"
logger.silly message.grey, identify or 0
return method # return the implementation
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@awaiting: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Awaiting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.on(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The awaiting directive is a lot like `awaiting`, except the
# implementation will be called onky once, exactly at when the
# specified signal is emited on the current context (service)
# object. Effectively, it is the same as creating the autocall
# that explicitly binds an event using `once` with the context.
@onetimer: (xevent, xmethod) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Onetimer %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
t = "#{symbol}.once(%s, #{symbol}.#{key})"
return format t, JSON.stringify event
# The exclusive directive is a lot like `awaiting`, except it
# removes all the event listeners that could have been binded
# to the event. And only once that has been done, it binds the
# supplied listener to the event, which will make it the only
# listener of that event at the point of a method invocation.
@exclusive: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Exclusive %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
return auto (symbol, key, context) -> _.once ->
k = "#{symbol}.removeAllListeners(%s)"
t = "#{symbol}.on(%s, #{symbol}.#{key})"
binder = format t, JSON.stringify event
killer = format k, JSON.stringify event
"(#{killer}; #{binder})".toString()
# The awaiting directive is a lot like `autocall`, except the
# implementation will not be immediatelly , but rather when the
# specified signal is emited on the $root main service context
# object. Effectively, it is the same as creating the autocall
# that explicitly binds the event using `on` with the context.
@synchronize: (event, method) ->
assert identify = this.identify().underline
invalidMethod = "found an invalid function"
invalidEvent = "found invalid event supplied"
message = "Rooting %s event for method in %s"
event = _.find(arguments, _.isString) or null
method = _.find(arguments, _.isFunction) or 0
assert not _.isEmpty(event or 0), invalidEvent
assert _.isFunction(method or 0), invalidMethod
assert method = @autocall new Object(), method
assert _.isObject method.remote.autocall or 0
assert (try method.remote.meta.event = event)
logger.silly message.grey, event.bold, identify
auto = (fn) -> method.remote.auto = fn; method
assert select = "$root".toString().toLowerCase()
return auto (symbol, key, context) -> _.once ->
b = "#{symbol}.#{key}.bind(#{symbol})"
t = "#{select}.on(%s, #{b})" # bind FN
return format t, JSON.stringify event
|
[
{
"context": "\n # use redis stash\n key = \"duration-#{session}\"\n @redis.incrby key, Math.roun",
"end": 6576,
"score": 0.8546648025512695,
"start": 6565,
"tag": "KEY",
"value": "duration-#{"
},
{
"context": " redis stash\n key = \"durati... | src/streammachine/analytics/index.coffee | firebrandv2/FirebrandNetwork.ga | 342 | _ = require "underscore"
URL = require "url"
winston = require "winston"
tz = require "timezone"
nconf = require "nconf"
elasticsearch = require "@elastic/elasticsearch"
BatchedQueue = require "../util/batched_queue"
IdxWriter = require "./idx_writer"
ESTemplates = require "./es_templates"
debug = require("debug")("sm:analytics")
# This module is responsible for:
# * Listen for session_start and listen interactions
# * Watch for sessions that are no longer active. Finalize them, attaching
# stats and duration, and throwing out sessions that did not meet minimum
# requirements
# * Answer questions about current number of listeners at any given time
# * Produce old-style w3c output for listener stats
module.exports = class Analytics
constructor: (@opts,cb) ->
@_uri = URL.parse @opts.config.es_uri
@log = @opts.log
@_timeout_sec = Number(@opts.config.finalize_secs)
if @opts.redis
@redis = @opts.redis.client
es_uri = @opts.config.es_uri
@idx_prefix = @opts.config.es_prefix
@log.debug "Connecting to Elasticsearch at #{es_uri} with prefix of #{@idx_prefix}"
debug "Connecting to ES at #{es_uri}, prefix #{@idx_prefix}"
apiVersion = '1.7'
if (typeof @opts.config.es_api_version != 'undefined')
apiVersion = @opts.config.es_api_version.toString()
@es = new elasticsearch.Client
node: es_uri
apiVersion: apiVersion
requestTimeout: @opts.config.request_timeout || 30000
@idx_batch = new BatchedQueue
batch: @opts.config.index_batch
latency: @opts.config.index_latency
@idx_writer = new IdxWriter @es, @log.child(submodule:"idx_writer")
@idx_writer.on "error", (err) =>
@log.error err
@idx_batch.pipe(@idx_writer)
# track open sessions
@sessions = {}
@local = tz(require "timezone/zones")(nconf.get("timezone")||"UTC")
# -- Load our Templates -- #
@_loadTemplates (err) =>
if err
console.error err
cb? err
else
# do something...
debug "Hitting cb after loading templates"
cb? null, @
# -- are there any sessions that should be finalized? -- #
# when was our last finalized session?
#last_session = @influx.query "SELECT max(time) from sessions", (err,res) =>
# console.log "last session is ", err, res
# what sessions have we seen since then?
# -- Redis Session Sweep -- #
if @redis
@log.info "Analytics setting up Redis session sweeper"
setInterval =>
# look for sessions that should be written (score less than now)
@redis.zrangebyscore "session-timeouts", 0, Math.floor( Number(new Date) / 1000), (err,sessions) =>
return @log.error "Error fetching sessions to finalize: #{err}" if err
_sFunc = =>
if s = sessions.shift()
@_triggerSession s
_sFunc()
_sFunc()
, 5*1000
#----------
_loadTemplates: (cb) ->
errors = []
debug "Loading #{Object.keys(ESTemplates).length} ES templates"
_loaded = _.after Object.keys(ESTemplates).length, =>
if errors.length > 0
debug "Failed to load one or more ES templates: #{errors.join(" | ")}"
@log.info errors
cb new Error "Failed to load index templates: #{ errors.join(" | ") }"
else
debug "ES templates loaded successfully."
cb null
for t,obj of ESTemplates
debug "Loading ES mapping for #{@idx_prefix}-#{t}"
@log.info "Loading Elasticsearch mappings for #{@idx_prefix}-#{t}"
tmplt = _.extend {}, obj, template:"#{@idx_prefix}-#{t}-*"
@log.info tmplt
@es.indices.putTemplate name:"#{@idx_prefix}-#{t}-template", body:tmplt, (err) =>
errors.push err if err
_loaded()
#----------
#----------
_log: (obj,cb) ->
session_id = null
if !obj.client?.session_id
cb? new Error "Object does not contain a session ID"
return false
# write one index per day of data
index_date = tz(obj.time,"%F")
time = new Date( obj.time )
# clean up IPv4 IP addresses stuck in IPv6
if obj.client?.ip
obj.client.ip = obj.client.ip.replace /^::ffff:/, ""
@_indicesForTimeRange "listens", time, (err,idx) =>
switch obj.type
when "session_start"
@idx_batch.write index:idx[0], body:
time: new Date(obj.time)
session_id: obj.client.session_id
stream: obj.stream_group || obj.stream
client: obj.client
type: "start"
cb? null
# -- start tracking the session -- #
when "listen"
# do we know of other duration for this session?
@_getStashedDurationFor obj.client.session_id, obj.duration, (err,dur) =>
@idx_batch.write index:idx[0], body:
session_id: obj.client.session_id
time: new Date(obj.time)
kbytes: obj.kbytes
duration: obj.duration
session_duration: dur
stream: obj.stream
client: obj.client
offsetSeconds: obj.offsetSeconds
contentTime: obj.contentTime
type: "listen"
cb? null
# -- update our timer -- #
@_updateSessionTimerFor obj.client.session_id, (err) =>
#----------
# Given a session id and duration, add the given duration to any
# existing cached duration and return the accumulated number
_getStashedDurationFor: (session,duration,cb) ->
if @redis
# use redis stash
key = "duration-#{session}"
@redis.incrby key, Math.round(duration), (err,res) =>
cb err, res
# set a TTL on our key, so that it doesn't stay indefinitely
@redis.pexpire key, 5*60*1000, (err) =>
@log.error "Failed to set Redis TTL for #{key}: #{err}" if err
else
# use memory stash
s = @_ensureMemorySession session
s.duration += duration
cb null, s.duration
#----------
_updateSessionTimerFor: (session,cb) ->
if @_timeout_sec <= 0
# timeouts are disabled
return cb null
if @redis
# this will set the score, or update it if the session is
# already in the set
timeout_at = (Number(new Date) / 1000) + @_timeout_sec
@redis.zadd "session-timeouts", timeout_at, session, (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
s.timeout = setTimeout =>
@_triggerSession session
, @_timeout_sec * 1000
cb null
#----------
_scrubSessionFor: (session,cb) ->
if @redis
@redis.zrem "session-timeouts", session, (err) =>
return cb err if err
@redis.del "duration-#{session}", (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
delete @sessions[session]
cb null
#----------
_ensureMemorySession: (session) ->
@sessions[ session ] ||=
duration:0, last_seen_at:Number(new Date()), timeout:null
#----------
_triggerSession: (session) ->
@_scrubSessionFor session, (err) =>
return @log.error "Error cleaning session cache: #{err}" if err
@_finalizeSession session, (err,obj) =>
return @log.error "Error assembling session: #{err}" if err
if obj
@_storeSession obj, (err) =>
@log.error "Error writing session: #{err}" if err
#----------
_finalizeSession: (id,cb) ->
@log.silly "Finalizing session for #{ id }"
# This is a little ugly. We need to take several steps:
# 1) Have we ever finalized this session id?
# 2) Look up the session_start for the session_id
# 3) Compute the session's sent kbytes, sent duration, and elapsed duration
# 4) Write a session object
session = {}
# -- Get Started -- #
@_selectPreviousSession id, (err,ts) =>
if err
@log.error err
return cb? err
@_selectSessionStart id, (err,start) =>
if err
@log.error err
return cb err
if !start
@log.debug "Attempt to finalize invalid session. No start event for #{id}."
return cb null, false
@_selectListenTotals id, ts, (err,totals) =>
if err
@log.error err
return cb? err
if !totals
# Session did not have any recorded listen events. Toss it.
return cb null, false
# -- build session -- #
session =
session_id: id
output: start.output
stream: start.stream
time: totals.last_listen
start_time: ts || start.time
client: start.client
kbytes: totals.kbytes
duration: totals.duration
connected: ( Number(totals.last_listen) - Number(ts||start.time) ) / 1000
cb null, session
#----------
_storeSession: (session,cb) ->
# write one index per day of data
index_date = tz(session.time,"%F")
@es.index index:"#{@idx_prefix}-sessions-#{index_date}", type: '_doc', body:session, (err) =>
cb err
#----------
_selectSessionStart: (id,cb) ->
# -- Look up user information from session_start -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "start"
}
]
sort:
time:{order:"desc"}
size: 1
# session start is allowed to be anywhere in the last 72 hours
@_indicesForTimeRange "listens", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying session start for #{id}: #{err}" if err
if res.body.hits && res.body.hits.total.value > 0
cb null, _.extend {}, res.body.hits.hits[0]._source, time:new Date(res.body.hits.hits[0]._source.time)
#----------
_selectPreviousSession: (id,cb) ->
# -- Have we ever finalized this session id? -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "session"
}
]
sort:
time:{order:"desc"}
size:1
@_indicesForTimeRange "sessions", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying for old session #{id}: #{err}" if err
if !res.body.hits || res.body.hits.total.value == 0
cb null, null
else
cb null, new Date(res.body.hits.hits[0]._source.time)
#----------
_selectListenTotals: (id,ts,cb) ->
# -- Query total duration and kbytes sent -- #
filter =
if ts
"and":
filters:[
{ range:{ time:{ gt:ts } } },
{ term:{session_id:id} },
{ term:{type:"listen"}}
]
else
term:{session_id:id}
body =
query:
constant_score:
filter:filter
aggs:
duration:
sum:{ field:"duration" }
kbytes:
sum:{ field:"kbytes" }
last_listen:
max:{ field:"time" }
@_indicesForTimeRange "listens", new Date(), ts||"-72 hours", (err,indices) =>
@es.search index:indices, body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying listens to finalize session #{id}: #{err}" if err
if res.body.hits.total.value > 0
cb null,
requests: res.body.hits.total.value
duration: res.body.aggregations.duration.value
kbytes: res.body.aggregations.kbytes.value
last_listen: new Date(res.body.aggregations.last_listen.value)
else
cb null, null
#----------
_indicesForTimeRange: (idx,start,end,cb) ->
if _.isFunction(end)
cb = end
end = null
start = @local(start)
if _.isString(end) && end[0] == "-"
end = @local(start,end)
indices = []
if end
end = @local(end)
s = start
while true
s = @local(s,"-1 day")
break if s < end
indices.push "#{@idx_prefix}-#{idx}-#{ @local(s,"%F") }"
indices.unshift "#{@idx_prefix}-#{idx}-#{ @local(start,"%F") }"
cb null, _.uniq(indices)
#----------
countListeners: (cb) ->
# -- Query recent listeners -- #
body =
query:
constant_score:
filter:
range:
time:
gt:"now-15m"
size:0
aggs:
listeners_by_minute:
date_histogram:
field: "time"
interval: "minute"
aggs:
duration:
sum:{ field:"duration" }
sessions:
cardinality:{ field:"session_id" }
streams:
terms:{ field:"stream", size:5 }
@_indicesForTimeRange "listens", new Date(), "-15 minutes", (err,indices) =>
@es.search index:indices, type:"listen", body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Failed to query listeners: #{err}" if err
times = []
for obj in res.aggregations.listeners_by_minute.buckets
streams = {}
for sobj in obj.streams.buckets
streams[ sobj.key ] = sobj.doc_count
times.unshift
time: @local(new Date(obj.key),"%F %T%^z")
requests: obj.doc_count
avg_listeners: Math.round( obj.duration.value / 60 )
sessions: obj.sessions.value
requests_by_stream: streams
cb null, times
#----------
class @LogTransport extends winston.Transport
name: "analytics"
constructor: (@a) ->
super level:"interaction"
log: (level,msg,meta,cb) ->
if level == "interaction"
@a._log meta
cb?()
#----------
| 156538 | _ = require "underscore"
URL = require "url"
winston = require "winston"
tz = require "timezone"
nconf = require "nconf"
elasticsearch = require "@elastic/elasticsearch"
BatchedQueue = require "../util/batched_queue"
IdxWriter = require "./idx_writer"
ESTemplates = require "./es_templates"
debug = require("debug")("sm:analytics")
# This module is responsible for:
# * Listen for session_start and listen interactions
# * Watch for sessions that are no longer active. Finalize them, attaching
# stats and duration, and throwing out sessions that did not meet minimum
# requirements
# * Answer questions about current number of listeners at any given time
# * Produce old-style w3c output for listener stats
module.exports = class Analytics
constructor: (@opts,cb) ->
@_uri = URL.parse @opts.config.es_uri
@log = @opts.log
@_timeout_sec = Number(@opts.config.finalize_secs)
if @opts.redis
@redis = @opts.redis.client
es_uri = @opts.config.es_uri
@idx_prefix = @opts.config.es_prefix
@log.debug "Connecting to Elasticsearch at #{es_uri} with prefix of #{@idx_prefix}"
debug "Connecting to ES at #{es_uri}, prefix #{@idx_prefix}"
apiVersion = '1.7'
if (typeof @opts.config.es_api_version != 'undefined')
apiVersion = @opts.config.es_api_version.toString()
@es = new elasticsearch.Client
node: es_uri
apiVersion: apiVersion
requestTimeout: @opts.config.request_timeout || 30000
@idx_batch = new BatchedQueue
batch: @opts.config.index_batch
latency: @opts.config.index_latency
@idx_writer = new IdxWriter @es, @log.child(submodule:"idx_writer")
@idx_writer.on "error", (err) =>
@log.error err
@idx_batch.pipe(@idx_writer)
# track open sessions
@sessions = {}
@local = tz(require "timezone/zones")(nconf.get("timezone")||"UTC")
# -- Load our Templates -- #
@_loadTemplates (err) =>
if err
console.error err
cb? err
else
# do something...
debug "Hitting cb after loading templates"
cb? null, @
# -- are there any sessions that should be finalized? -- #
# when was our last finalized session?
#last_session = @influx.query "SELECT max(time) from sessions", (err,res) =>
# console.log "last session is ", err, res
# what sessions have we seen since then?
# -- Redis Session Sweep -- #
if @redis
@log.info "Analytics setting up Redis session sweeper"
setInterval =>
# look for sessions that should be written (score less than now)
@redis.zrangebyscore "session-timeouts", 0, Math.floor( Number(new Date) / 1000), (err,sessions) =>
return @log.error "Error fetching sessions to finalize: #{err}" if err
_sFunc = =>
if s = sessions.shift()
@_triggerSession s
_sFunc()
_sFunc()
, 5*1000
#----------
_loadTemplates: (cb) ->
errors = []
debug "Loading #{Object.keys(ESTemplates).length} ES templates"
_loaded = _.after Object.keys(ESTemplates).length, =>
if errors.length > 0
debug "Failed to load one or more ES templates: #{errors.join(" | ")}"
@log.info errors
cb new Error "Failed to load index templates: #{ errors.join(" | ") }"
else
debug "ES templates loaded successfully."
cb null
for t,obj of ESTemplates
debug "Loading ES mapping for #{@idx_prefix}-#{t}"
@log.info "Loading Elasticsearch mappings for #{@idx_prefix}-#{t}"
tmplt = _.extend {}, obj, template:"#{@idx_prefix}-#{t}-*"
@log.info tmplt
@es.indices.putTemplate name:"#{@idx_prefix}-#{t}-template", body:tmplt, (err) =>
errors.push err if err
_loaded()
#----------
#----------
_log: (obj,cb) ->
session_id = null
if !obj.client?.session_id
cb? new Error "Object does not contain a session ID"
return false
# write one index per day of data
index_date = tz(obj.time,"%F")
time = new Date( obj.time )
# clean up IPv4 IP addresses stuck in IPv6
if obj.client?.ip
obj.client.ip = obj.client.ip.replace /^::ffff:/, ""
@_indicesForTimeRange "listens", time, (err,idx) =>
switch obj.type
when "session_start"
@idx_batch.write index:idx[0], body:
time: new Date(obj.time)
session_id: obj.client.session_id
stream: obj.stream_group || obj.stream
client: obj.client
type: "start"
cb? null
# -- start tracking the session -- #
when "listen"
# do we know of other duration for this session?
@_getStashedDurationFor obj.client.session_id, obj.duration, (err,dur) =>
@idx_batch.write index:idx[0], body:
session_id: obj.client.session_id
time: new Date(obj.time)
kbytes: obj.kbytes
duration: obj.duration
session_duration: dur
stream: obj.stream
client: obj.client
offsetSeconds: obj.offsetSeconds
contentTime: obj.contentTime
type: "listen"
cb? null
# -- update our timer -- #
@_updateSessionTimerFor obj.client.session_id, (err) =>
#----------
# Given a session id and duration, add the given duration to any
# existing cached duration and return the accumulated number
_getStashedDurationFor: (session,duration,cb) ->
if @redis
# use redis stash
key = "<KEY>session<KEY>}"
@redis.incrby key, Math.round(duration), (err,res) =>
cb err, res
# set a TTL on our key, so that it doesn't stay indefinitely
@redis.pexpire key, 5*60*1000, (err) =>
@log.error "Failed to set Redis TTL for #{key}: #{err}" if err
else
# use memory stash
s = @_ensureMemorySession session
s.duration += duration
cb null, s.duration
#----------
_updateSessionTimerFor: (session,cb) ->
if @_timeout_sec <= 0
# timeouts are disabled
return cb null
if @redis
# this will set the score, or update it if the session is
# already in the set
timeout_at = (Number(new Date) / 1000) + @_timeout_sec
@redis.zadd "session-timeouts", timeout_at, session, (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
s.timeout = setTimeout =>
@_triggerSession session
, @_timeout_sec * 1000
cb null
#----------
_scrubSessionFor: (session,cb) ->
if @redis
@redis.zrem "session-timeouts", session, (err) =>
return cb err if err
@redis.del "duration-#{session}", (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
delete @sessions[session]
cb null
#----------
_ensureMemorySession: (session) ->
@sessions[ session ] ||=
duration:0, last_seen_at:Number(new Date()), timeout:null
#----------
_triggerSession: (session) ->
@_scrubSessionFor session, (err) =>
return @log.error "Error cleaning session cache: #{err}" if err
@_finalizeSession session, (err,obj) =>
return @log.error "Error assembling session: #{err}" if err
if obj
@_storeSession obj, (err) =>
@log.error "Error writing session: #{err}" if err
#----------
_finalizeSession: (id,cb) ->
@log.silly "Finalizing session for #{ id }"
# This is a little ugly. We need to take several steps:
# 1) Have we ever finalized this session id?
# 2) Look up the session_start for the session_id
# 3) Compute the session's sent kbytes, sent duration, and elapsed duration
# 4) Write a session object
session = {}
# -- Get Started -- #
@_selectPreviousSession id, (err,ts) =>
if err
@log.error err
return cb? err
@_selectSessionStart id, (err,start) =>
if err
@log.error err
return cb err
if !start
@log.debug "Attempt to finalize invalid session. No start event for #{id}."
return cb null, false
@_selectListenTotals id, ts, (err,totals) =>
if err
@log.error err
return cb? err
if !totals
# Session did not have any recorded listen events. Toss it.
return cb null, false
# -- build session -- #
session =
session_id: id
output: start.output
stream: start.stream
time: totals.last_listen
start_time: ts || start.time
client: start.client
kbytes: totals.kbytes
duration: totals.duration
connected: ( Number(totals.last_listen) - Number(ts||start.time) ) / 1000
cb null, session
#----------
_storeSession: (session,cb) ->
# write one index per day of data
index_date = tz(session.time,"%F")
@es.index index:"#{@idx_prefix}-sessions-#{index_date}", type: '_doc', body:session, (err) =>
cb err
#----------
_selectSessionStart: (id,cb) ->
# -- Look up user information from session_start -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "start"
}
]
sort:
time:{order:"desc"}
size: 1
# session start is allowed to be anywhere in the last 72 hours
@_indicesForTimeRange "listens", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying session start for #{id}: #{err}" if err
if res.body.hits && res.body.hits.total.value > 0
cb null, _.extend {}, res.body.hits.hits[0]._source, time:new Date(res.body.hits.hits[0]._source.time)
#----------
_selectPreviousSession: (id,cb) ->
# -- Have we ever finalized this session id? -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "session"
}
]
sort:
time:{order:"desc"}
size:1
@_indicesForTimeRange "sessions", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying for old session #{id}: #{err}" if err
if !res.body.hits || res.body.hits.total.value == 0
cb null, null
else
cb null, new Date(res.body.hits.hits[0]._source.time)
#----------
_selectListenTotals: (id,ts,cb) ->
# -- Query total duration and kbytes sent -- #
filter =
if ts
"and":
filters:[
{ range:{ time:{ gt:ts } } },
{ term:{session_id:id} },
{ term:{type:"listen"}}
]
else
term:{session_id:id}
body =
query:
constant_score:
filter:filter
aggs:
duration:
sum:{ field:"duration" }
kbytes:
sum:{ field:"kbytes" }
last_listen:
max:{ field:"time" }
@_indicesForTimeRange "listens", new Date(), ts||"-72 hours", (err,indices) =>
@es.search index:indices, body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying listens to finalize session #{id}: #{err}" if err
if res.body.hits.total.value > 0
cb null,
requests: res.body.hits.total.value
duration: res.body.aggregations.duration.value
kbytes: res.body.aggregations.kbytes.value
last_listen: new Date(res.body.aggregations.last_listen.value)
else
cb null, null
#----------
_indicesForTimeRange: (idx,start,end,cb) ->
if _.isFunction(end)
cb = end
end = null
start = @local(start)
if _.isString(end) && end[0] == "-"
end = @local(start,end)
indices = []
if end
end = @local(end)
s = start
while true
s = @local(s,"-1 day")
break if s < end
indices.push "#{@idx_prefix}-#{idx}-#{ @local(s,"%F") }"
indices.unshift "#{@idx_prefix}-#{idx}-#{ @local(start,"%F") }"
cb null, _.uniq(indices)
#----------
countListeners: (cb) ->
# -- Query recent listeners -- #
body =
query:
constant_score:
filter:
range:
time:
gt:"now-15m"
size:0
aggs:
listeners_by_minute:
date_histogram:
field: "time"
interval: "minute"
aggs:
duration:
sum:{ field:"duration" }
sessions:
cardinality:{ field:"session_id" }
streams:
terms:{ field:"stream", size:5 }
@_indicesForTimeRange "listens", new Date(), "-15 minutes", (err,indices) =>
@es.search index:indices, type:"listen", body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Failed to query listeners: #{err}" if err
times = []
for obj in res.aggregations.listeners_by_minute.buckets
streams = {}
for sobj in obj.streams.buckets
streams[ sobj.key ] = sobj.doc_count
times.unshift
time: @local(new Date(obj.key),"%F %T%^z")
requests: obj.doc_count
avg_listeners: Math.round( obj.duration.value / 60 )
sessions: obj.sessions.value
requests_by_stream: streams
cb null, times
#----------
class @LogTransport extends winston.Transport
name: "analytics"
constructor: (@a) ->
super level:"interaction"
log: (level,msg,meta,cb) ->
if level == "interaction"
@a._log meta
cb?()
#----------
| true | _ = require "underscore"
URL = require "url"
winston = require "winston"
tz = require "timezone"
nconf = require "nconf"
elasticsearch = require "@elastic/elasticsearch"
BatchedQueue = require "../util/batched_queue"
IdxWriter = require "./idx_writer"
ESTemplates = require "./es_templates"
debug = require("debug")("sm:analytics")
# This module is responsible for:
# * Listen for session_start and listen interactions
# * Watch for sessions that are no longer active. Finalize them, attaching
# stats and duration, and throwing out sessions that did not meet minimum
# requirements
# * Answer questions about current number of listeners at any given time
# * Produce old-style w3c output for listener stats
module.exports = class Analytics
constructor: (@opts,cb) ->
@_uri = URL.parse @opts.config.es_uri
@log = @opts.log
@_timeout_sec = Number(@opts.config.finalize_secs)
if @opts.redis
@redis = @opts.redis.client
es_uri = @opts.config.es_uri
@idx_prefix = @opts.config.es_prefix
@log.debug "Connecting to Elasticsearch at #{es_uri} with prefix of #{@idx_prefix}"
debug "Connecting to ES at #{es_uri}, prefix #{@idx_prefix}"
apiVersion = '1.7'
if (typeof @opts.config.es_api_version != 'undefined')
apiVersion = @opts.config.es_api_version.toString()
@es = new elasticsearch.Client
node: es_uri
apiVersion: apiVersion
requestTimeout: @opts.config.request_timeout || 30000
@idx_batch = new BatchedQueue
batch: @opts.config.index_batch
latency: @opts.config.index_latency
@idx_writer = new IdxWriter @es, @log.child(submodule:"idx_writer")
@idx_writer.on "error", (err) =>
@log.error err
@idx_batch.pipe(@idx_writer)
# track open sessions
@sessions = {}
@local = tz(require "timezone/zones")(nconf.get("timezone")||"UTC")
# -- Load our Templates -- #
@_loadTemplates (err) =>
if err
console.error err
cb? err
else
# do something...
debug "Hitting cb after loading templates"
cb? null, @
# -- are there any sessions that should be finalized? -- #
# when was our last finalized session?
#last_session = @influx.query "SELECT max(time) from sessions", (err,res) =>
# console.log "last session is ", err, res
# what sessions have we seen since then?
# -- Redis Session Sweep -- #
if @redis
@log.info "Analytics setting up Redis session sweeper"
setInterval =>
# look for sessions that should be written (score less than now)
@redis.zrangebyscore "session-timeouts", 0, Math.floor( Number(new Date) / 1000), (err,sessions) =>
return @log.error "Error fetching sessions to finalize: #{err}" if err
_sFunc = =>
if s = sessions.shift()
@_triggerSession s
_sFunc()
_sFunc()
, 5*1000
#----------
_loadTemplates: (cb) ->
errors = []
debug "Loading #{Object.keys(ESTemplates).length} ES templates"
_loaded = _.after Object.keys(ESTemplates).length, =>
if errors.length > 0
debug "Failed to load one or more ES templates: #{errors.join(" | ")}"
@log.info errors
cb new Error "Failed to load index templates: #{ errors.join(" | ") }"
else
debug "ES templates loaded successfully."
cb null
for t,obj of ESTemplates
debug "Loading ES mapping for #{@idx_prefix}-#{t}"
@log.info "Loading Elasticsearch mappings for #{@idx_prefix}-#{t}"
tmplt = _.extend {}, obj, template:"#{@idx_prefix}-#{t}-*"
@log.info tmplt
@es.indices.putTemplate name:"#{@idx_prefix}-#{t}-template", body:tmplt, (err) =>
errors.push err if err
_loaded()
#----------
#----------
_log: (obj,cb) ->
session_id = null
if !obj.client?.session_id
cb? new Error "Object does not contain a session ID"
return false
# write one index per day of data
index_date = tz(obj.time,"%F")
time = new Date( obj.time )
# clean up IPv4 IP addresses stuck in IPv6
if obj.client?.ip
obj.client.ip = obj.client.ip.replace /^::ffff:/, ""
@_indicesForTimeRange "listens", time, (err,idx) =>
switch obj.type
when "session_start"
@idx_batch.write index:idx[0], body:
time: new Date(obj.time)
session_id: obj.client.session_id
stream: obj.stream_group || obj.stream
client: obj.client
type: "start"
cb? null
# -- start tracking the session -- #
when "listen"
# do we know of other duration for this session?
@_getStashedDurationFor obj.client.session_id, obj.duration, (err,dur) =>
@idx_batch.write index:idx[0], body:
session_id: obj.client.session_id
time: new Date(obj.time)
kbytes: obj.kbytes
duration: obj.duration
session_duration: dur
stream: obj.stream
client: obj.client
offsetSeconds: obj.offsetSeconds
contentTime: obj.contentTime
type: "listen"
cb? null
# -- update our timer -- #
@_updateSessionTimerFor obj.client.session_id, (err) =>
#----------
# Given a session id and duration, add the given duration to any
# existing cached duration and return the accumulated number
_getStashedDurationFor: (session,duration,cb) ->
if @redis
# use redis stash
key = "PI:KEY:<KEY>END_PIsessionPI:KEY:<KEY>END_PI}"
@redis.incrby key, Math.round(duration), (err,res) =>
cb err, res
# set a TTL on our key, so that it doesn't stay indefinitely
@redis.pexpire key, 5*60*1000, (err) =>
@log.error "Failed to set Redis TTL for #{key}: #{err}" if err
else
# use memory stash
s = @_ensureMemorySession session
s.duration += duration
cb null, s.duration
#----------
_updateSessionTimerFor: (session,cb) ->
if @_timeout_sec <= 0
# timeouts are disabled
return cb null
if @redis
# this will set the score, or update it if the session is
# already in the set
timeout_at = (Number(new Date) / 1000) + @_timeout_sec
@redis.zadd "session-timeouts", timeout_at, session, (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
s.timeout = setTimeout =>
@_triggerSession session
, @_timeout_sec * 1000
cb null
#----------
_scrubSessionFor: (session,cb) ->
if @redis
@redis.zrem "session-timeouts", session, (err) =>
return cb err if err
@redis.del "duration-#{session}", (err) =>
cb err
else
s = @_ensureMemorySession session
clearTimeout s.timeout if s.timeout
delete @sessions[session]
cb null
#----------
_ensureMemorySession: (session) ->
@sessions[ session ] ||=
duration:0, last_seen_at:Number(new Date()), timeout:null
#----------
_triggerSession: (session) ->
@_scrubSessionFor session, (err) =>
return @log.error "Error cleaning session cache: #{err}" if err
@_finalizeSession session, (err,obj) =>
return @log.error "Error assembling session: #{err}" if err
if obj
@_storeSession obj, (err) =>
@log.error "Error writing session: #{err}" if err
#----------
_finalizeSession: (id,cb) ->
@log.silly "Finalizing session for #{ id }"
# This is a little ugly. We need to take several steps:
# 1) Have we ever finalized this session id?
# 2) Look up the session_start for the session_id
# 3) Compute the session's sent kbytes, sent duration, and elapsed duration
# 4) Write a session object
session = {}
# -- Get Started -- #
@_selectPreviousSession id, (err,ts) =>
if err
@log.error err
return cb? err
@_selectSessionStart id, (err,start) =>
if err
@log.error err
return cb err
if !start
@log.debug "Attempt to finalize invalid session. No start event for #{id}."
return cb null, false
@_selectListenTotals id, ts, (err,totals) =>
if err
@log.error err
return cb? err
if !totals
# Session did not have any recorded listen events. Toss it.
return cb null, false
# -- build session -- #
session =
session_id: id
output: start.output
stream: start.stream
time: totals.last_listen
start_time: ts || start.time
client: start.client
kbytes: totals.kbytes
duration: totals.duration
connected: ( Number(totals.last_listen) - Number(ts||start.time) ) / 1000
cb null, session
#----------
_storeSession: (session,cb) ->
# write one index per day of data
index_date = tz(session.time,"%F")
@es.index index:"#{@idx_prefix}-sessions-#{index_date}", type: '_doc', body:session, (err) =>
cb err
#----------
_selectSessionStart: (id,cb) ->
# -- Look up user information from session_start -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "start"
}
]
sort:
time:{order:"desc"}
size: 1
# session start is allowed to be anywhere in the last 72 hours
@_indicesForTimeRange "listens", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying session start for #{id}: #{err}" if err
if res.body.hits && res.body.hits.total.value > 0
cb null, _.extend {}, res.body.hits.hits[0]._source, time:new Date(res.body.hits.hits[0]._source.time)
#----------
_selectPreviousSession: (id,cb) ->
# -- Have we ever finalized this session id? -- #
body =
query:
bool:
must: [
{
match:
"session_id": id
},
{
match:
"type": "session"
}
]
sort:
time:{order:"desc"}
size:1
@_indicesForTimeRange "sessions", new Date(), "-72 hours", (err,indices) =>
@es.search body:body, index:indices, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying for old session #{id}: #{err}" if err
if !res.body.hits || res.body.hits.total.value == 0
cb null, null
else
cb null, new Date(res.body.hits.hits[0]._source.time)
#----------
_selectListenTotals: (id,ts,cb) ->
# -- Query total duration and kbytes sent -- #
filter =
if ts
"and":
filters:[
{ range:{ time:{ gt:ts } } },
{ term:{session_id:id} },
{ term:{type:"listen"}}
]
else
term:{session_id:id}
body =
query:
constant_score:
filter:filter
aggs:
duration:
sum:{ field:"duration" }
kbytes:
sum:{ field:"kbytes" }
last_listen:
max:{ field:"time" }
@_indicesForTimeRange "listens", new Date(), ts||"-72 hours", (err,indices) =>
@es.search index:indices, body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Error querying listens to finalize session #{id}: #{err}" if err
if res.body.hits.total.value > 0
cb null,
requests: res.body.hits.total.value
duration: res.body.aggregations.duration.value
kbytes: res.body.aggregations.kbytes.value
last_listen: new Date(res.body.aggregations.last_listen.value)
else
cb null, null
#----------
_indicesForTimeRange: (idx,start,end,cb) ->
if _.isFunction(end)
cb = end
end = null
start = @local(start)
if _.isString(end) && end[0] == "-"
end = @local(start,end)
indices = []
if end
end = @local(end)
s = start
while true
s = @local(s,"-1 day")
break if s < end
indices.push "#{@idx_prefix}-#{idx}-#{ @local(s,"%F") }"
indices.unshift "#{@idx_prefix}-#{idx}-#{ @local(start,"%F") }"
cb null, _.uniq(indices)
#----------
countListeners: (cb) ->
# -- Query recent listeners -- #
body =
query:
constant_score:
filter:
range:
time:
gt:"now-15m"
size:0
aggs:
listeners_by_minute:
date_histogram:
field: "time"
interval: "minute"
aggs:
duration:
sum:{ field:"duration" }
sessions:
cardinality:{ field:"session_id" }
streams:
terms:{ field:"stream", size:5 }
@_indicesForTimeRange "listens", new Date(), "-15 minutes", (err,indices) =>
@es.search index:indices, type:"listen", body:body, ignoreUnavailable:true, (err,res) =>
return cb new Error "Failed to query listeners: #{err}" if err
times = []
for obj in res.aggregations.listeners_by_minute.buckets
streams = {}
for sobj in obj.streams.buckets
streams[ sobj.key ] = sobj.doc_count
times.unshift
time: @local(new Date(obj.key),"%F %T%^z")
requests: obj.doc_count
avg_listeners: Math.round( obj.duration.value / 60 )
sessions: obj.sessions.value
requests_by_stream: streams
cb null, times
#----------
class @LogTransport extends winston.Transport
name: "analytics"
constructor: (@a) ->
super level:"interaction"
log: (level,msg,meta,cb) ->
if level == "interaction"
@a._log meta
cb?()
#----------
|
[
{
"context": "# @author mrdoob / http://mrdoob.com/\r\n# @author alteredq / http:/",
"end": 16,
"score": 0.9996324181556702,
"start": 10,
"tag": "USERNAME",
"value": "mrdoob"
},
{
"context": "# @author mrdoob / http://mrdoob.com/\r\n# @author alteredq / http://alteredqualia.com/\r\n#... | source/javascripts/new_src/loaders/json.coffee | andrew-aladev/three.js | 0 | # @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author aladjev.andrew@gmail.com
#= require new_src/loaders/loader
#= require new_src/core/geometry
#= require new_src/core/vector_3
#= require new_src/core/face_3
#= require new_src/core/face_4
#= require new_src/core/uv
class JSONLoader extends THREE.Loader
constrcutor: (showStatus) ->
super showStatus
load: (url, callback, texturePath) ->
worker = undefined
scope = this
texturePath = (if texturePath then texturePath else @extractUrlBase(url))
@onLoadStart()
@loadAjaxJSON this, url, callback, texturePath
loadAjaxJSON: (context, url, callback, texturePath, callbackProgress) ->
xhr = new XMLHttpRequest()
length = 0
xhr.onreadystatechange = ->
if xhr.readyState is xhr.DONE
if xhr.status is 200 or xhr.status is 0
if xhr.responseText
json = JSON.parse(xhr.responseText)
context.createModel json, callback, texturePath
else
console.warn "THREE.JSONLoader: [" + url + "] seems to be unreachable or file there is empty"
# in context of more complex asset initialization
# do not block on single failed file
# maybe should go even one more level up
context.onLoadComplete()
else
console.error "THREE.JSONLoader: Couldn't load [" + url + "] [" + xhr.status + "]"
else if xhr.readyState is xhr.LOADING
if callbackProgress
length = xhr.getResponseHeader("Content-Length") if length is 0
callbackProgress
total: length
loaded: xhr.responseText.length
else length = xhr.getResponseHeader("Content-Length") if xhr.readyState is xhr.HEADERS_RECEIVED
xhr.open "GET", url, true
xhr.overrideMimeType "text/plain; charset=x-user-defined" if xhr.overrideMimeType
xhr.setRequestHeader "Content-Type", "text/plain"
xhr.send null
createModel: (json, callback, texturePath) ->
callback new JSONModel(json, texturePath, this)
class JSONModel extends THREE.Geometry
constructor: (json, texturePath, jsonLoader) ->
@json = json
@texturePath = texturePath
@jsonLoader = jsonLoader
if @json.scale isnt undefined
@scale = 1.0 / @json.scale
else
@scale = 1.0
super()
@jsonLoader.initMaterials this, @json.materials, @texturePath
@parseModel()
@parseSkin()
@parseMorphing()
@computeCentroids()
@computeFaceNormals()
@computeTangents() if @jsonLoader.hasNormals this
parseModel: ->
isBitSet = (value, position) ->
value & (1 << position)
faces = @json.faces
vertices = @json.vertices
normals = @json.normals
colors = @json.colors
nUvLayers = 0
# disregard empty arrays
uvs_length = @json.uvs.length
for i in [0...uvs_length]
nUvLayers++ if @json.uvs[i].length
for i in [0...nUvLayers]
@faceUvs[i] = []
@faceVertexUvs[i] = []
offset = 0
zLength = vertices.length
while offset < zLength
vertex = new THREE.Vector3()
vertex.x = vertices[offset++] * @scale
vertex.y = vertices[offset++] * @scale
vertex.z = vertices[offset++] * @scale
@vertices.push vertex
offset = 0
zLength = faces.length
while offset < zLength
type = faces[offset++]
isQuad = isBitSet type, 0
hasMaterial = isBitSet type, 1
hasFaceUv = isBitSet type, 2
hasFaceVertexUv = isBitSet type, 3
hasFaceNormal = isBitSet type, 4
hasFaceVertexNormal = isBitSet type, 5
hasFaceColor = isBitSet type, 6
hasFaceVertexColor = isBitSet type, 7
# console.log(
# "type: ", type
# "bits: ", isQuad
# hasMaterial
# hasFaceUv
# hasFaceVertexUv
# hasFaceNormal
# hasFaceVertexNormal
# hasFaceColor
# hasFaceVertexColor
# )
if isQuad
face = new THREE.Face4()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
face.d = faces[offset++]
nVertices = 4
else
face = new THREE.Face3()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
nVertices = 3
if hasMaterial
materialIndex = faces[offset++]
face.materialIndex = materialIndex
# to get face <=> uv index correspondence
fi = @faces.length
if hasFaceUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
@faceUvs[i][fi] = new THREE.UV u, v
if hasFaceVertexUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvs = []
for j in [0...nVertices]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
uvs[j] = new THREE.UV u, v
@faceVertexUvs[i][fi] = uvs
if hasFaceNormal
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.normal = normal
if hasFaceVertexNormal
for i in [0...nVertices]
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.vertexNormals.push normal
if hasFaceColor
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.color = color
if hasFaceVertexColor
for i in [0...nVertices]
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.vertexColors.push color
@faces.push face
parseSkin: ->
if @json.skinWeights
length = @json.skinWeights.length
for i in [0...length] by 2
x = @json.skinWeights[i]
y = @json.skinWeights[i + 1]
z = 0
w = 0
@skinWeights.push new THREE.Vector4(x, y, z, w)
if @json.skinIndices
length = @json.skinIndices.length
for i in [0...length] by 2
a = @json.skinIndices[i]
b = @json.skinIndices[i + 1]
c = 0
d = 0
@skinIndices.push new THREE.Vector4(a, b, c, d)
@bones = @json.bones
@animation = @json.animation
parseMorphing: ->
if @json.morphTargets isnt undefined
length = @json.morphTargets.length
for i in [0...length]
@morphTargets[i] = {}
@morphTargets[i].name = @json.morphTargets[i].name
@morphTargets[i].vertices = []
dstVertices = @morphTargets[i].vertices
srcVertices = @json.morphTargets[i].vertices
v = 0
vLength = srcVertices.length
for v in [0...vLength] by 3
vertex = new THREE.Vector3()
vertex.x = srcVertices[v] * @scale
vertex.y = srcVertices[v + 1] * @scale
vertex.z = srcVertices[v + 2] * @scale
dstVertices.push vertex
if @json.morphColors isnt undefined
length = @json.morphColors.length
for i in [0...length]
@morphColors[i] = {}
@morphColors[i].name = @json.morphColors[i].name
@morphColors[i].colors = []
dstColors = @morphColors[i].colors
srcColors = @json.morphColors[i].colors
cLength = srcColors.length
for c in [0...cLength] by 3
color = new THREE.Color 0xffaa00
color.setRGB srcColors[c], srcColors[c + 1], srcColors[c + 2]
dstColors.push color
namespace "THREE", (exports) ->
exports.JSONLoader = JSONLoader
exports.JSONModel = JSONModel | 110009 | # @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author <EMAIL>
#= require new_src/loaders/loader
#= require new_src/core/geometry
#= require new_src/core/vector_3
#= require new_src/core/face_3
#= require new_src/core/face_4
#= require new_src/core/uv
class JSONLoader extends THREE.Loader
constrcutor: (showStatus) ->
super showStatus
load: (url, callback, texturePath) ->
worker = undefined
scope = this
texturePath = (if texturePath then texturePath else @extractUrlBase(url))
@onLoadStart()
@loadAjaxJSON this, url, callback, texturePath
loadAjaxJSON: (context, url, callback, texturePath, callbackProgress) ->
xhr = new XMLHttpRequest()
length = 0
xhr.onreadystatechange = ->
if xhr.readyState is xhr.DONE
if xhr.status is 200 or xhr.status is 0
if xhr.responseText
json = JSON.parse(xhr.responseText)
context.createModel json, callback, texturePath
else
console.warn "THREE.JSONLoader: [" + url + "] seems to be unreachable or file there is empty"
# in context of more complex asset initialization
# do not block on single failed file
# maybe should go even one more level up
context.onLoadComplete()
else
console.error "THREE.JSONLoader: Couldn't load [" + url + "] [" + xhr.status + "]"
else if xhr.readyState is xhr.LOADING
if callbackProgress
length = xhr.getResponseHeader("Content-Length") if length is 0
callbackProgress
total: length
loaded: xhr.responseText.length
else length = xhr.getResponseHeader("Content-Length") if xhr.readyState is xhr.HEADERS_RECEIVED
xhr.open "GET", url, true
xhr.overrideMimeType "text/plain; charset=x-user-defined" if xhr.overrideMimeType
xhr.setRequestHeader "Content-Type", "text/plain"
xhr.send null
createModel: (json, callback, texturePath) ->
callback new JSONModel(json, texturePath, this)
class JSONModel extends THREE.Geometry
constructor: (json, texturePath, jsonLoader) ->
@json = json
@texturePath = texturePath
@jsonLoader = jsonLoader
if @json.scale isnt undefined
@scale = 1.0 / @json.scale
else
@scale = 1.0
super()
@jsonLoader.initMaterials this, @json.materials, @texturePath
@parseModel()
@parseSkin()
@parseMorphing()
@computeCentroids()
@computeFaceNormals()
@computeTangents() if @jsonLoader.hasNormals this
parseModel: ->
isBitSet = (value, position) ->
value & (1 << position)
faces = @json.faces
vertices = @json.vertices
normals = @json.normals
colors = @json.colors
nUvLayers = 0
# disregard empty arrays
uvs_length = @json.uvs.length
for i in [0...uvs_length]
nUvLayers++ if @json.uvs[i].length
for i in [0...nUvLayers]
@faceUvs[i] = []
@faceVertexUvs[i] = []
offset = 0
zLength = vertices.length
while offset < zLength
vertex = new THREE.Vector3()
vertex.x = vertices[offset++] * @scale
vertex.y = vertices[offset++] * @scale
vertex.z = vertices[offset++] * @scale
@vertices.push vertex
offset = 0
zLength = faces.length
while offset < zLength
type = faces[offset++]
isQuad = isBitSet type, 0
hasMaterial = isBitSet type, 1
hasFaceUv = isBitSet type, 2
hasFaceVertexUv = isBitSet type, 3
hasFaceNormal = isBitSet type, 4
hasFaceVertexNormal = isBitSet type, 5
hasFaceColor = isBitSet type, 6
hasFaceVertexColor = isBitSet type, 7
# console.log(
# "type: ", type
# "bits: ", isQuad
# hasMaterial
# hasFaceUv
# hasFaceVertexUv
# hasFaceNormal
# hasFaceVertexNormal
# hasFaceColor
# hasFaceVertexColor
# )
if isQuad
face = new THREE.Face4()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
face.d = faces[offset++]
nVertices = 4
else
face = new THREE.Face3()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
nVertices = 3
if hasMaterial
materialIndex = faces[offset++]
face.materialIndex = materialIndex
# to get face <=> uv index correspondence
fi = @faces.length
if hasFaceUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
@faceUvs[i][fi] = new THREE.UV u, v
if hasFaceVertexUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvs = []
for j in [0...nVertices]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
uvs[j] = new THREE.UV u, v
@faceVertexUvs[i][fi] = uvs
if hasFaceNormal
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.normal = normal
if hasFaceVertexNormal
for i in [0...nVertices]
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.vertexNormals.push normal
if hasFaceColor
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.color = color
if hasFaceVertexColor
for i in [0...nVertices]
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.vertexColors.push color
@faces.push face
parseSkin: ->
if @json.skinWeights
length = @json.skinWeights.length
for i in [0...length] by 2
x = @json.skinWeights[i]
y = @json.skinWeights[i + 1]
z = 0
w = 0
@skinWeights.push new THREE.Vector4(x, y, z, w)
if @json.skinIndices
length = @json.skinIndices.length
for i in [0...length] by 2
a = @json.skinIndices[i]
b = @json.skinIndices[i + 1]
c = 0
d = 0
@skinIndices.push new THREE.Vector4(a, b, c, d)
@bones = @json.bones
@animation = @json.animation
parseMorphing: ->
if @json.morphTargets isnt undefined
length = @json.morphTargets.length
for i in [0...length]
@morphTargets[i] = {}
@morphTargets[i].name = @json.morphTargets[i].name
@morphTargets[i].vertices = []
dstVertices = @morphTargets[i].vertices
srcVertices = @json.morphTargets[i].vertices
v = 0
vLength = srcVertices.length
for v in [0...vLength] by 3
vertex = new THREE.Vector3()
vertex.x = srcVertices[v] * @scale
vertex.y = srcVertices[v + 1] * @scale
vertex.z = srcVertices[v + 2] * @scale
dstVertices.push vertex
if @json.morphColors isnt undefined
length = @json.morphColors.length
for i in [0...length]
@morphColors[i] = {}
@morphColors[i].name = @json.morphColors[i].name
@morphColors[i].colors = []
dstColors = @morphColors[i].colors
srcColors = @json.morphColors[i].colors
cLength = srcColors.length
for c in [0...cLength] by 3
color = new THREE.Color 0xffaa00
color.setRGB srcColors[c], srcColors[c + 1], srcColors[c + 2]
dstColors.push color
namespace "THREE", (exports) ->
exports.JSONLoader = JSONLoader
exports.JSONModel = JSONModel | true | # @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author PI:EMAIL:<EMAIL>END_PI
#= require new_src/loaders/loader
#= require new_src/core/geometry
#= require new_src/core/vector_3
#= require new_src/core/face_3
#= require new_src/core/face_4
#= require new_src/core/uv
class JSONLoader extends THREE.Loader
constrcutor: (showStatus) ->
super showStatus
load: (url, callback, texturePath) ->
worker = undefined
scope = this
texturePath = (if texturePath then texturePath else @extractUrlBase(url))
@onLoadStart()
@loadAjaxJSON this, url, callback, texturePath
loadAjaxJSON: (context, url, callback, texturePath, callbackProgress) ->
xhr = new XMLHttpRequest()
length = 0
xhr.onreadystatechange = ->
if xhr.readyState is xhr.DONE
if xhr.status is 200 or xhr.status is 0
if xhr.responseText
json = JSON.parse(xhr.responseText)
context.createModel json, callback, texturePath
else
console.warn "THREE.JSONLoader: [" + url + "] seems to be unreachable or file there is empty"
# in context of more complex asset initialization
# do not block on single failed file
# maybe should go even one more level up
context.onLoadComplete()
else
console.error "THREE.JSONLoader: Couldn't load [" + url + "] [" + xhr.status + "]"
else if xhr.readyState is xhr.LOADING
if callbackProgress
length = xhr.getResponseHeader("Content-Length") if length is 0
callbackProgress
total: length
loaded: xhr.responseText.length
else length = xhr.getResponseHeader("Content-Length") if xhr.readyState is xhr.HEADERS_RECEIVED
xhr.open "GET", url, true
xhr.overrideMimeType "text/plain; charset=x-user-defined" if xhr.overrideMimeType
xhr.setRequestHeader "Content-Type", "text/plain"
xhr.send null
createModel: (json, callback, texturePath) ->
callback new JSONModel(json, texturePath, this)
class JSONModel extends THREE.Geometry
constructor: (json, texturePath, jsonLoader) ->
@json = json
@texturePath = texturePath
@jsonLoader = jsonLoader
if @json.scale isnt undefined
@scale = 1.0 / @json.scale
else
@scale = 1.0
super()
@jsonLoader.initMaterials this, @json.materials, @texturePath
@parseModel()
@parseSkin()
@parseMorphing()
@computeCentroids()
@computeFaceNormals()
@computeTangents() if @jsonLoader.hasNormals this
parseModel: ->
isBitSet = (value, position) ->
value & (1 << position)
faces = @json.faces
vertices = @json.vertices
normals = @json.normals
colors = @json.colors
nUvLayers = 0
# disregard empty arrays
uvs_length = @json.uvs.length
for i in [0...uvs_length]
nUvLayers++ if @json.uvs[i].length
for i in [0...nUvLayers]
@faceUvs[i] = []
@faceVertexUvs[i] = []
offset = 0
zLength = vertices.length
while offset < zLength
vertex = new THREE.Vector3()
vertex.x = vertices[offset++] * @scale
vertex.y = vertices[offset++] * @scale
vertex.z = vertices[offset++] * @scale
@vertices.push vertex
offset = 0
zLength = faces.length
while offset < zLength
type = faces[offset++]
isQuad = isBitSet type, 0
hasMaterial = isBitSet type, 1
hasFaceUv = isBitSet type, 2
hasFaceVertexUv = isBitSet type, 3
hasFaceNormal = isBitSet type, 4
hasFaceVertexNormal = isBitSet type, 5
hasFaceColor = isBitSet type, 6
hasFaceVertexColor = isBitSet type, 7
# console.log(
# "type: ", type
# "bits: ", isQuad
# hasMaterial
# hasFaceUv
# hasFaceVertexUv
# hasFaceNormal
# hasFaceVertexNormal
# hasFaceColor
# hasFaceVertexColor
# )
if isQuad
face = new THREE.Face4()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
face.d = faces[offset++]
nVertices = 4
else
face = new THREE.Face3()
face.a = faces[offset++]
face.b = faces[offset++]
face.c = faces[offset++]
nVertices = 3
if hasMaterial
materialIndex = faces[offset++]
face.materialIndex = materialIndex
# to get face <=> uv index correspondence
fi = @faces.length
if hasFaceUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
@faceUvs[i][fi] = new THREE.UV u, v
if hasFaceVertexUv
for i in [0...nUvLayers]
uvLayer = @json.uvs[i]
uvs = []
for j in [0...nVertices]
uvIndex = faces[offset++]
u = uvLayer[uvIndex * 2]
v = uvLayer[uvIndex * 2 + 1]
uvs[j] = new THREE.UV u, v
@faceVertexUvs[i][fi] = uvs
if hasFaceNormal
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.normal = normal
if hasFaceVertexNormal
for i in [0...nVertices]
normalIndex = faces[offset++] * 3
normal = new THREE.Vector3()
normal.x = normals[normalIndex++]
normal.y = normals[normalIndex++]
normal.z = normals[normalIndex]
face.vertexNormals.push normal
if hasFaceColor
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.color = color
if hasFaceVertexColor
for i in [0...nVertices]
colorIndex = faces[offset++]
color = new THREE.Color colors[colorIndex]
face.vertexColors.push color
@faces.push face
parseSkin: ->
if @json.skinWeights
length = @json.skinWeights.length
for i in [0...length] by 2
x = @json.skinWeights[i]
y = @json.skinWeights[i + 1]
z = 0
w = 0
@skinWeights.push new THREE.Vector4(x, y, z, w)
if @json.skinIndices
length = @json.skinIndices.length
for i in [0...length] by 2
a = @json.skinIndices[i]
b = @json.skinIndices[i + 1]
c = 0
d = 0
@skinIndices.push new THREE.Vector4(a, b, c, d)
@bones = @json.bones
@animation = @json.animation
parseMorphing: ->
if @json.morphTargets isnt undefined
length = @json.morphTargets.length
for i in [0...length]
@morphTargets[i] = {}
@morphTargets[i].name = @json.morphTargets[i].name
@morphTargets[i].vertices = []
dstVertices = @morphTargets[i].vertices
srcVertices = @json.morphTargets[i].vertices
v = 0
vLength = srcVertices.length
for v in [0...vLength] by 3
vertex = new THREE.Vector3()
vertex.x = srcVertices[v] * @scale
vertex.y = srcVertices[v + 1] * @scale
vertex.z = srcVertices[v + 2] * @scale
dstVertices.push vertex
if @json.morphColors isnt undefined
length = @json.morphColors.length
for i in [0...length]
@morphColors[i] = {}
@morphColors[i].name = @json.morphColors[i].name
@morphColors[i].colors = []
dstColors = @morphColors[i].colors
srcColors = @json.morphColors[i].colors
cLength = srcColors.length
for c in [0...cLength] by 3
color = new THREE.Color 0xffaa00
color.setRGB srcColors[c], srcColors[c + 1], srcColors[c + 2]
dstColors.push color
namespace "THREE", (exports) ->
exports.JSONLoader = JSONLoader
exports.JSONModel = JSONModel |
[
{
"context": " hobbyList.add new Hobby(id: 0, name: 'abc', @facade), new Hobby(id: 100, name: 'xyz', @faca",
"end": 6858,
"score": 0.8912148475646973,
"start": 6855,
"tag": "NAME",
"value": "abc"
},
{
"context": "facade)\n\n hobbyList.add {id: 0, name: 'abc'}, {... | spec/lib/base-list.coffee | CureApp/base-domain | 37 |
Facade = require '../base-domain'
{ GeneralFactory, BaseList, Entity, ValueObject,
BaseSyncRepository, BaseAsyncRepository } = Facade
{ MemoryResource } = require '../others'
describe 'BaseList', ->
beforeEach ->
@facade = require('../create-facade').create()
class Hobby extends Entity
@properties:
name: @TYPES.STRING
class NonEntity extends ValueObject
@properties:
name: @TYPES.STRING
class HobbyRepository extends BaseSyncRepository
@modelName: 'hobby'
client: new MemoryResource()
class Diary extends Entity
@properties:
name: @TYPES.STRING
class DiaryRepository extends BaseAsyncRepository
@modelName: 'diary'
client: new MemoryResource()
@facade.addClass 'hobby', Hobby
@facade.addClass 'non-entity', NonEntity
@facade.addClass 'hobby-repository', HobbyRepository
@facade.addClass 'diary', Diary
@facade.addClass 'diary-repository', DiaryRepository
@hobbyRepo = @facade.createRepository('hobby')
@hobbies = (for name, i in ['keyboard', 'jogging', 'cycling']
hobby = @facade.createModel 'hobby', id: 3 - i, name: name
@hobbyRepo.save hobby
)
describe 'constructor', ->
it 'sorts model when sort function is defined', ->
hobbyIds = (hobby.id for hobby in @hobbies)
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
sort: (a, b) -> a.id - b.id
hobbyList = new HobbyList(items: @hobbies, @facade)
hobbyIdsSorted = (hobby.id for hobby in hobbyList.items)
assert.deepEqual hobbyIdsSorted, [1, 2, 3]
describe 'ids', ->
beforeEach ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
class NonEntityList extends BaseList
@itemModelName: 'non-entity'
@facade.addClass 'hobby-list', HobbyList
@facade.addClass 'non-entity-list', NonEntityList
it 'get array when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list')
assert hobbyList.ids instanceof Array
it 'get undefined when the item is not Entity', ->
nonEntityList = @facade.createModel('non-entity-list')
assert nonEntityList.ids is undefined
it 'get array of ids when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list', @hobbies)
assert.deepEqual hobbyList.ids, [3, 2, 1]
describe 'first', ->
it 'returns first value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.first() is @hobbies[0]
describe 'last', ->
it 'returns last value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.last() is @hobbies[2]
describe 'getByIndex', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getByIndex(0) is @hobbies[0]
assert hobbyList.getByIndex(1) is @hobbies[1]
assert hobbyList.getByIndex(2) is @hobbies[2]
describe 'getItem', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getItem(0) is @hobbies[0]
assert hobbyList.getItem(1) is @hobbies[1]
assert hobbyList.getItem(2) is @hobbies[2]
assert.throws(=> hobbyList.getItem(3))
describe 'toArray', ->
it 'returns deeply-equal array to items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert.deepEqual hobbyList.toArray(), hobbyList.items
describe 'forEach', ->
it 'executes function for each item', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
str = ''
hobbyList.forEach (item) ->
str += item.name + '|'
assert str is 'keyboard|jogging|cycling|'
describe 'map', ->
it 'executes function for each item and returns the results', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
ids = hobbyList.map (item) -> item.id
assert.deepEqual ids, [3, 2, 1]
describe 'filter', ->
it 'filter items with given function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
filtered = hobbyList.filter (item) -> item.id is 3
assert.deepEqual filtered, [ @hobbies[0] ]
describe 'some', ->
it 'checks if some items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.some (item) -> item.id is 1
assert not hobbyList.some (item) -> item.id is 4
describe 'every', ->
it 'checks if every items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.every (item) -> item.id?
assert not hobbyList.every (item) -> item.id is 1
describe 'add', ->
it 'appends models', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
Hobby = @facade.getModel 'hobby'
hobbyList.add new Hobby(id: 0, name: 'abc', @facade), new Hobby(id: 100, name: 'xyz', @facade)
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
it 'appends plain objects', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
hobbyList.add {id: 0, name: 'abc'}, {id: 100, name: 'xyz'}
assert hobbyList.length is 5
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
describe 'clear', ->
it 'clears all models', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
describe 'setIds', ->
beforeEach ->
@facade.createRepository('diary').save(id: 'abc', name: 'xxx')
it 'can load data by ids synchronously from BaseSyncRepository', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
list = new HobbyList(null, @facade)
list.setIds(['1', '3'])
list.include()
assert list.length is 2
describe 'remove', ->
it 'removes an item by index', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.remove(1)
assert hobbyList.length is 2
assert hobbyList.ids.length is 2
assert.deepEqual hobbyList.ids, [3, 1]
describe '$remove', ->
it 'removes an item by index and create a new list', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
newList = hobbyList.$remove(1)
assert newList.length is 2
assert newList.ids.length is 2
assert.deepEqual newList.ids, [3, 1]
assert hobbyList.length is 3
assert hobbyList.ids.length is 3
| 61514 |
Facade = require '../base-domain'
{ GeneralFactory, BaseList, Entity, ValueObject,
BaseSyncRepository, BaseAsyncRepository } = Facade
{ MemoryResource } = require '../others'
describe 'BaseList', ->
beforeEach ->
@facade = require('../create-facade').create()
class Hobby extends Entity
@properties:
name: @TYPES.STRING
class NonEntity extends ValueObject
@properties:
name: @TYPES.STRING
class HobbyRepository extends BaseSyncRepository
@modelName: 'hobby'
client: new MemoryResource()
class Diary extends Entity
@properties:
name: @TYPES.STRING
class DiaryRepository extends BaseAsyncRepository
@modelName: 'diary'
client: new MemoryResource()
@facade.addClass 'hobby', Hobby
@facade.addClass 'non-entity', NonEntity
@facade.addClass 'hobby-repository', HobbyRepository
@facade.addClass 'diary', Diary
@facade.addClass 'diary-repository', DiaryRepository
@hobbyRepo = @facade.createRepository('hobby')
@hobbies = (for name, i in ['keyboard', 'jogging', 'cycling']
hobby = @facade.createModel 'hobby', id: 3 - i, name: name
@hobbyRepo.save hobby
)
describe 'constructor', ->
it 'sorts model when sort function is defined', ->
hobbyIds = (hobby.id for hobby in @hobbies)
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
sort: (a, b) -> a.id - b.id
hobbyList = new HobbyList(items: @hobbies, @facade)
hobbyIdsSorted = (hobby.id for hobby in hobbyList.items)
assert.deepEqual hobbyIdsSorted, [1, 2, 3]
describe 'ids', ->
beforeEach ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
class NonEntityList extends BaseList
@itemModelName: 'non-entity'
@facade.addClass 'hobby-list', HobbyList
@facade.addClass 'non-entity-list', NonEntityList
it 'get array when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list')
assert hobbyList.ids instanceof Array
it 'get undefined when the item is not Entity', ->
nonEntityList = @facade.createModel('non-entity-list')
assert nonEntityList.ids is undefined
it 'get array of ids when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list', @hobbies)
assert.deepEqual hobbyList.ids, [3, 2, 1]
describe 'first', ->
it 'returns first value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.first() is @hobbies[0]
describe 'last', ->
it 'returns last value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.last() is @hobbies[2]
describe 'getByIndex', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getByIndex(0) is @hobbies[0]
assert hobbyList.getByIndex(1) is @hobbies[1]
assert hobbyList.getByIndex(2) is @hobbies[2]
describe 'getItem', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getItem(0) is @hobbies[0]
assert hobbyList.getItem(1) is @hobbies[1]
assert hobbyList.getItem(2) is @hobbies[2]
assert.throws(=> hobbyList.getItem(3))
describe 'toArray', ->
it 'returns deeply-equal array to items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert.deepEqual hobbyList.toArray(), hobbyList.items
describe 'forEach', ->
it 'executes function for each item', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
str = ''
hobbyList.forEach (item) ->
str += item.name + '|'
assert str is 'keyboard|jogging|cycling|'
describe 'map', ->
it 'executes function for each item and returns the results', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
ids = hobbyList.map (item) -> item.id
assert.deepEqual ids, [3, 2, 1]
describe 'filter', ->
it 'filter items with given function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
filtered = hobbyList.filter (item) -> item.id is 3
assert.deepEqual filtered, [ @hobbies[0] ]
describe 'some', ->
it 'checks if some items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.some (item) -> item.id is 1
assert not hobbyList.some (item) -> item.id is 4
describe 'every', ->
it 'checks if every items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.every (item) -> item.id?
assert not hobbyList.every (item) -> item.id is 1
describe 'add', ->
it 'appends models', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
Hobby = @facade.getModel 'hobby'
hobbyList.add new Hobby(id: 0, name: '<NAME>', @facade), new Hobby(id: 100, name: 'xyz', @facade)
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
it 'appends plain objects', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
hobbyList.add {id: 0, name: '<NAME>'}, {id: 100, name: 'xyz'}
assert hobbyList.length is 5
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
describe 'clear', ->
it 'clears all models', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
describe 'setIds', ->
beforeEach ->
@facade.createRepository('diary').save(id: 'abc', name: '<NAME>')
it 'can load data by ids synchronously from BaseSyncRepository', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
list = new HobbyList(null, @facade)
list.setIds(['1', '3'])
list.include()
assert list.length is 2
describe 'remove', ->
it 'removes an item by index', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.remove(1)
assert hobbyList.length is 2
assert hobbyList.ids.length is 2
assert.deepEqual hobbyList.ids, [3, 1]
describe '$remove', ->
it 'removes an item by index and create a new list', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
newList = hobbyList.$remove(1)
assert newList.length is 2
assert newList.ids.length is 2
assert.deepEqual newList.ids, [3, 1]
assert hobbyList.length is 3
assert hobbyList.ids.length is 3
| true |
Facade = require '../base-domain'
{ GeneralFactory, BaseList, Entity, ValueObject,
BaseSyncRepository, BaseAsyncRepository } = Facade
{ MemoryResource } = require '../others'
describe 'BaseList', ->
beforeEach ->
@facade = require('../create-facade').create()
class Hobby extends Entity
@properties:
name: @TYPES.STRING
class NonEntity extends ValueObject
@properties:
name: @TYPES.STRING
class HobbyRepository extends BaseSyncRepository
@modelName: 'hobby'
client: new MemoryResource()
class Diary extends Entity
@properties:
name: @TYPES.STRING
class DiaryRepository extends BaseAsyncRepository
@modelName: 'diary'
client: new MemoryResource()
@facade.addClass 'hobby', Hobby
@facade.addClass 'non-entity', NonEntity
@facade.addClass 'hobby-repository', HobbyRepository
@facade.addClass 'diary', Diary
@facade.addClass 'diary-repository', DiaryRepository
@hobbyRepo = @facade.createRepository('hobby')
@hobbies = (for name, i in ['keyboard', 'jogging', 'cycling']
hobby = @facade.createModel 'hobby', id: 3 - i, name: name
@hobbyRepo.save hobby
)
describe 'constructor', ->
it 'sorts model when sort function is defined', ->
hobbyIds = (hobby.id for hobby in @hobbies)
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
sort: (a, b) -> a.id - b.id
hobbyList = new HobbyList(items: @hobbies, @facade)
hobbyIdsSorted = (hobby.id for hobby in hobbyList.items)
assert.deepEqual hobbyIdsSorted, [1, 2, 3]
describe 'ids', ->
beforeEach ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
class NonEntityList extends BaseList
@itemModelName: 'non-entity'
@facade.addClass 'hobby-list', HobbyList
@facade.addClass 'non-entity-list', NonEntityList
it 'get array when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list')
assert hobbyList.ids instanceof Array
it 'get undefined when the item is not Entity', ->
nonEntityList = @facade.createModel('non-entity-list')
assert nonEntityList.ids is undefined
it 'get array of ids when the item is Entity', ->
hobbyList = @facade.createModel('hobby-list', @hobbies)
assert.deepEqual hobbyList.ids, [3, 2, 1]
describe 'first', ->
it 'returns first value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.first() is @hobbies[0]
describe 'last', ->
it 'returns last value of the items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.last() is @hobbies[2]
describe 'getByIndex', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getByIndex(0) is @hobbies[0]
assert hobbyList.getByIndex(1) is @hobbies[1]
assert hobbyList.getByIndex(2) is @hobbies[2]
describe 'getItem', ->
it 'returns items at the given index', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.getItem(0) is @hobbies[0]
assert hobbyList.getItem(1) is @hobbies[1]
assert hobbyList.getItem(2) is @hobbies[2]
assert.throws(=> hobbyList.getItem(3))
describe 'toArray', ->
it 'returns deeply-equal array to items', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert.deepEqual hobbyList.toArray(), hobbyList.items
describe 'forEach', ->
it 'executes function for each item', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
str = ''
hobbyList.forEach (item) ->
str += item.name + '|'
assert str is 'keyboard|jogging|cycling|'
describe 'map', ->
it 'executes function for each item and returns the results', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
ids = hobbyList.map (item) -> item.id
assert.deepEqual ids, [3, 2, 1]
describe 'filter', ->
it 'filter items with given function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
filtered = hobbyList.filter (item) -> item.id is 3
assert.deepEqual filtered, [ @hobbies[0] ]
describe 'some', ->
it 'checks if some items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.some (item) -> item.id is 1
assert not hobbyList.some (item) -> item.id is 4
describe 'every', ->
it 'checks if every items match the condition in function', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.every (item) -> item.id?
assert not hobbyList.every (item) -> item.id is 1
describe 'add', ->
it 'appends models', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
Hobby = @facade.getModel 'hobby'
hobbyList.add new Hobby(id: 0, name: 'PI:NAME:<NAME>END_PI', @facade), new Hobby(id: 100, name: 'xyz', @facade)
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
it 'appends plain objects', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
hobbyList = new HobbyList(items: @hobbies, annualCost: 2000, @facade)
hobbyList.add {id: 0, name: 'PI:NAME:<NAME>END_PI'}, {id: 100, name: 'xyz'}
assert hobbyList.length is 5
assert hobbyList.first().name is 'keyboard'
assert hobbyList.last().name is 'xyz'
describe 'clear', ->
it 'clears all models', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
hobbyList.clear()
assert hobbyList.length is 0
assert hobbyList.ids.length is 0
describe 'setIds', ->
beforeEach ->
@facade.createRepository('diary').save(id: 'abc', name: 'PI:NAME:<NAME>END_PI')
it 'can load data by ids synchronously from BaseSyncRepository', ->
class HobbyList extends BaseList
@itemModelName: 'hobby'
@className: 'hobby-list'
@properties:
annualCost: @TYPES.NUMBER
list = new HobbyList(null, @facade)
list.setIds(['1', '3'])
list.include()
assert list.length is 2
describe 'remove', ->
it 'removes an item by index', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
hobbyList.remove(1)
assert hobbyList.length is 2
assert hobbyList.ids.length is 2
assert.deepEqual hobbyList.ids, [3, 1]
describe '$remove', ->
it 'removes an item by index and create a new list', ->
class HobbyList extends BaseList
@className: 'hobby-list'
@itemModelName: 'hobby'
hobbyList = new HobbyList(items: @hobbies, @facade)
assert hobbyList.length is 3
newList = hobbyList.$remove(1)
assert newList.length is 2
assert newList.ids.length is 2
assert.deepEqual newList.ids, [3, 1]
assert hobbyList.length is 3
assert hobbyList.ids.length is 3
|
[
{
"context": "#\n# * Copyright (c) 2003-2005 Tom Wu\n# * All Rights Reserved.\n# *\n# * Permission is he",
"end": 37,
"score": 0.9997537732124329,
"start": 31,
"tag": "NAME",
"value": "Tom Wu"
},
{
"context": "ed\n\n# public\n\n# \"constants\"\n\n# Copyright (c) 2005 Tom Wu\n# All ... | deps/v8/benchmarks/crypto.coffee | lxe/io.coffee | 0 | #
# * Copyright (c) 2003-2005 Tom Wu
# * All Rights Reserved.
# *
# * Permission is hereby granted, free of charge, to any person obtaining
# * a copy of this software and associated documentation files (the
# * "Software"), to deal in the Software without restriction, including
# * without limitation the rights to use, copy, modify, merge, publish,
# * distribute, sublicense, and/or sell copies of the Software, and to
# * permit persons to whom the Software is furnished to do so, subject to
# * the following conditions:
# *
# * The above copyright notice and this permission notice shall be
# * included in all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
# * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
# * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
# *
# * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
# * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
# * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
# * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
# * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# *
# * In addition, the following condition applies:
# *
# * All redistributions must retain an intact copy of this copyright notice
# * and disclaimer.
#
# The code has been adapted for use as a benchmark by Google.
# Basic JavaScript BN library - subset useful for RSA encryption.
# Bits per digit
# JavaScript engine analysis
# (public) Constructor
BigInteger = (a, b, c) ->
@array = new Array()
if a?
if "number" is typeof a
@fromNumber a, b, c
else if not b? and "string" isnt typeof a
@fromString a, 256
else
@fromString a, b
return
# return new, unset BigInteger
nbi = ->
new BigInteger(null)
# am: Compute w_j += (x*this_i), propagate carries,
# c is initial carry, returns final carry.
# c < 3*dvalue, x < 2*dvalue, this_i < dvalue
# We need to select the fastest one that works in this environment.
# am1: use a single mult and divide to get the high bits,
# max digit bits should be 26 because
# max internal value = 2*dvalue^2-2*dvalue (< 2^53)
am1 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
while --n >= 0
v = x * this_array[i++] + w_array[j] + c
c = Math.floor(v / 0x4000000)
w_array[j++] = v & 0x3ffffff
c
# am2 avoids a big mult-and-extract completely.
# Max digit bits should be <= 30 because we do bitwise ops
# on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
am2 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x7fff
xh = x >> 15
while --n >= 0
l = this_array[i] & 0x7fff
h = this_array[i++] >> 15
m = xh * l + h * xl
l = xl * l + ((m & 0x7fff) << 15) + w_array[j] + (c & 0x3fffffff)
c = (l >>> 30) + (m >>> 15) + xh * h + (c >>> 30)
w_array[j++] = l & 0x3fffffff
c
# Alternately, set max digit bits to 28 since some
# browsers slow down when dealing with 32-bit numbers.
am3 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x3fff
xh = x >> 14
while --n >= 0
l = this_array[i] & 0x3fff
h = this_array[i++] >> 14
m = xh * l + h * xl
l = xl * l + ((m & 0x3fff) << 14) + w_array[j] + c
c = (l >> 28) + (m >> 14) + xh * h
w_array[j++] = l & 0xfffffff
c
# This is tailored to VMs with 2-bit tagging. It makes sure
# that all the computations stay within the 29 bits available.
am4 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x1fff
xh = x >> 13
while --n >= 0
l = this_array[i] & 0x1fff
h = this_array[i++] >> 13
m = xh * l + h * xl
l = xl * l + ((m & 0x1fff) << 13) + w_array[j] + c
c = (l >> 26) + (m >> 13) + xh * h
w_array[j++] = l & 0x3ffffff
c
# am3/28 is best for SM, Rhino, but am4/26 is best for v8.
# Kestrel (Opera 9.5) gets its best result with am4/26.
# IE7 does 9% better with am3/28 than with am4/26.
# Firefox (SM) gets 10% faster with am3/28 than with am4/26.
# Digit conversions
int2char = (n) ->
BI_RM.charAt n
intAt = (s, i) ->
c = BI_RC[s.charCodeAt(i)]
(if (not (c?)) then -1 else c)
# (protected) copy this to r
bnpCopyTo = (r) ->
this_array = @array
r_array = r.array
i = @t - 1
while i >= 0
r_array[i] = this_array[i]
--i
r.t = @t
r.s = @s
return
# (protected) set from integer value x, -DV <= x < DV
bnpFromInt = (x) ->
this_array = @array
@t = 1
@s = (if (x < 0) then -1 else 0)
if x > 0
this_array[0] = x
else if x < -1
this_array[0] = x + DV
else
@t = 0
return
# return bigint initialized to value
nbv = (i) ->
r = nbi()
r.fromInt i
r
# (protected) set from string and radix
bnpFromString = (s, b) ->
this_array = @array
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 256 # byte array
k = 8
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
@fromRadix s, b
return
@t = 0
@s = 0
i = s.length
mi = false
sh = 0
while --i >= 0
x = (if (k is 8) then s[i] & 0xff else intAt(s, i))
if x < 0
mi = true if s.charAt(i) is "-"
continue
mi = false
if sh is 0
this_array[@t++] = x
else if sh + k > BI_DB
this_array[@t - 1] |= (x & ((1 << (BI_DB - sh)) - 1)) << sh
this_array[@t++] = (x >> (BI_DB - sh))
else
this_array[@t - 1] |= x << sh
sh += k
sh -= BI_DB if sh >= BI_DB
if k is 8 and (s[0] & 0x80) isnt 0
@s = -1
this_array[@t - 1] |= ((1 << (BI_DB - sh)) - 1) << sh if sh > 0
@clamp()
BigInteger.ZERO.subTo this, this if mi
return
# (protected) clamp off excess high words
bnpClamp = ->
this_array = @array
c = @s & BI_DM
--@t while @t > 0 and this_array[@t - 1] is c
return
# (public) return string representation in given radix
bnToString = (b) ->
this_array = @array
return "-" + @negate().toString(b) if @s < 0
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
return @toRadix(b)
km = (1 << k) - 1
d = undefined
m = false
r = ""
i = @t
p = BI_DB - (i * BI_DB) % k
if i-- > 0
if p < BI_DB and (d = this_array[i] >> p) > 0
m = true
r = int2char(d)
while i >= 0
if p < k
d = (this_array[i] & ((1 << p) - 1)) << (k - p)
d |= this_array[--i] >> (p += BI_DB - k)
else
d = (this_array[i] >> (p -= k)) & km
if p <= 0
p += BI_DB
--i
m = true if d > 0
r += int2char(d) if m
(if m then r else "0")
# (public) -this
bnNegate = ->
r = nbi()
BigInteger.ZERO.subTo this, r
r
# (public) |this|
bnAbs = ->
(if (@s < 0) then @negate() else this)
# (public) return + if this > a, - if this < a, 0 if equal
bnCompareTo = (a) ->
this_array = @array
a_array = a.array
r = @s - a.s
return r unless r is 0
i = @t
r = i - a.t
return r unless r is 0
return r unless (r = this_array[i] - a_array[i]) is 0 while --i >= 0
0
# returns bit length of the integer x
nbits = (x) ->
r = 1
t = undefined
unless (t = x >>> 16) is 0
x = t
r += 16
unless (t = x >> 8) is 0
x = t
r += 8
unless (t = x >> 4) is 0
x = t
r += 4
unless (t = x >> 2) is 0
x = t
r += 2
unless (t = x >> 1) is 0
x = t
r += 1
r
# (public) return the number of bits in "this"
bnBitLength = ->
this_array = @array
return 0 if @t <= 0
BI_DB * (@t - 1) + nbits(this_array[@t - 1] ^ (@s & BI_DM))
# (protected) r = this << n*DB
bnpDLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = undefined
i = @t - 1
while i >= 0
r_array[i + n] = this_array[i]
--i
i = n - 1
while i >= 0
r_array[i] = 0
--i
r.t = @t + n
r.s = @s
return
# (protected) r = this >> n*DB
bnpDRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = n
while i < @t
r_array[i - n] = this_array[i]
++i
r.t = Math.max(@t - n, 0)
r.s = @s
return
# (protected) r = this << n
bnpLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << cbs) - 1
ds = Math.floor(n / BI_DB)
c = (@s << bs) & BI_DM
i = undefined
i = @t - 1
while i >= 0
r_array[i + ds + 1] = (this_array[i] >> cbs) | c
c = (this_array[i] & bm) << bs
--i
i = ds - 1
while i >= 0
r_array[i] = 0
--i
r_array[ds] = c
r.t = @t + ds + 1
r.s = @s
r.clamp()
return
# (protected) r = this >> n
bnpRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
r.s = @s
ds = Math.floor(n / BI_DB)
if ds >= @t
r.t = 0
return
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << bs) - 1
r_array[0] = this_array[ds] >> bs
i = ds + 1
while i < @t
r_array[i - ds - 1] |= (this_array[i] & bm) << cbs
r_array[i - ds] = this_array[i] >> bs
++i
r_array[@t - ds - 1] |= (@s & bm) << cbs if bs > 0
r.t = @t - ds
r.clamp()
return
# (protected) r = this - a
bnpSubTo = (a, r) ->
this_array = @array
r_array = r.array
a_array = a.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] - a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c -= a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c -= a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c -= a.s
r.s = (if (c < 0) then -1 else 0)
if c < -1
r_array[i++] = BI_DV + c
else r_array[i++] = c if c > 0
r.t = i
r.clamp()
return
# (protected) r = this * a, r != this,a (HAC 14.12)
# "this" should be the larger one if appropriate.
bnpMultiplyTo = (a, r) ->
this_array = @array
r_array = r.array
x = @abs()
y = a.abs()
y_array = y.array
i = x.t
r.t = i + y.t
r_array[i] = 0 while --i >= 0
i = 0
while i < y.t
r_array[i + x.t] = x.am(0, y_array[i], r, i, 0, x.t)
++i
r.s = 0
r.clamp()
BigInteger.ZERO.subTo r, r unless @s is a.s
return
# (protected) r = this^2, r != this (HAC 14.16)
bnpSquareTo = (r) ->
x = @abs()
x_array = x.array
r_array = r.array
i = r.t = 2 * x.t
r_array[i] = 0 while --i >= 0
i = 0
while i < x.t - 1
c = x.am(i, x_array[i], r, 2 * i, 0, 1)
if (r_array[i + x.t] += x.am(i + 1, 2 * x_array[i], r, 2 * i + 1, c, x.t - i - 1)) >= BI_DV
r_array[i + x.t] -= BI_DV
r_array[i + x.t + 1] = 1
++i
r_array[r.t - 1] += x.am(i, x_array[i], r, 2 * i, 0, 1) if r.t > 0
r.s = 0
r.clamp()
return
# (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
# r != q, this != m. q or r may be null.
bnpDivRemTo = (m, q, r) ->
pm = m.abs()
return if pm.t <= 0
pt = @abs()
if pt.t < pm.t
q.fromInt 0 if q?
@copyTo r if r?
return
r = nbi() unless r?
y = nbi()
ts = @s
ms = m.s
pm_array = pm.array
nsh = BI_DB - nbits(pm_array[pm.t - 1]) # normalize modulus
if nsh > 0
pm.lShiftTo nsh, y
pt.lShiftTo nsh, r
else
pm.copyTo y
pt.copyTo r
ys = y.t
y_array = y.array
y0 = y_array[ys - 1]
return if y0 is 0
yt = y0 * (1 << BI_F1) + ((if (ys > 1) then y_array[ys - 2] >> BI_F2 else 0))
d1 = BI_FV / yt
d2 = (1 << BI_F1) / yt
e = 1 << BI_F2
i = r.t
j = i - ys
t = (if (not (q?)) then nbi() else q)
y.dlShiftTo j, t
r_array = r.array
if r.compareTo(t) >= 0
r_array[r.t++] = 1
r.subTo t, r
BigInteger.ONE.dlShiftTo ys, t
t.subTo y, y # "negative" y so we can replace sub with am later
y_array[y.t++] = 0 while y.t < ys
while --j >= 0
# Estimate quotient digit
qd = (if (r_array[--i] is y0) then BI_DM else Math.floor(r_array[i] * d1 + (r_array[i - 1] + e) * d2))
if (r_array[i] += y.am(0, qd, r, j, 0, ys)) < qd # Try it out
y.dlShiftTo j, t
r.subTo t, r
r.subTo t, r while r_array[i] < --qd
if q?
r.drShiftTo ys, q
BigInteger.ZERO.subTo q, q unless ts is ms
r.t = ys
r.clamp()
r.rShiftTo nsh, r if nsh > 0 # Denormalize remainder
BigInteger.ZERO.subTo r, r if ts < 0
return
# (public) this mod a
bnMod = (a) ->
r = nbi()
@abs().divRemTo a, null, r
a.subTo r, r if @s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# Modular reduction using "classic" algorithm
Classic = (m) ->
@m = m
return
cConvert = (x) ->
if x.s < 0 or x.compareTo(@m) >= 0
x.mod @m
else
x
cRevert = (x) ->
x
cReduce = (x) ->
x.divRemTo @m, null, x
return
cMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
cSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# (protected) return "-1/this % 2^DB"; useful for Mont. reduction
# justification:
# xy == 1 (mod m)
# xy = 1+km
# xy(2-xy) = (1+km)(1-km)
# x[y(2-xy)] = 1-k^2m^2
# x[y(2-xy)] == 1 (mod m^2)
# if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
# should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
# JS multiply "overflows" differently from C/C++, so care is needed here.
bnpInvDigit = ->
this_array = @array
return 0 if @t < 1
x = this_array[0]
return 0 if (x & 1) is 0
y = x & 3 # y == 1/x mod 2^2
y = (y * (2 - (x & 0xf) * y)) & 0xf # y == 1/x mod 2^4
y = (y * (2 - (x & 0xff) * y)) & 0xff # y == 1/x mod 2^8
y = (y * (2 - (((x & 0xffff) * y) & 0xffff))) & 0xffff # y == 1/x mod 2^16
# last step - calculate inverse mod DV directly;
# assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
y = (y * (2 - x * y % BI_DV)) % BI_DV # y == 1/x mod 2^dbits
# we really want the negative inverse, and -DV < y < DV
(if (y > 0) then BI_DV - y else -y)
# Montgomery reduction
Montgomery = (m) ->
@m = m
@mp = m.invDigit()
@mpl = @mp & 0x7fff
@mph = @mp >> 15
@um = (1 << (BI_DB - 15)) - 1
@mt2 = 2 * m.t
return
# xR mod m
montConvert = (x) ->
r = nbi()
x.abs().dlShiftTo @m.t, r
r.divRemTo @m, null, r
@m.subTo r, r if x.s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# x/R mod m
montRevert = (x) ->
r = nbi()
x.copyTo r
@reduce r
r
# x = x/R mod m (HAC 14.32)
montReduce = (x) ->
x_array = x.array
# pad x so am has enough room later
x_array[x.t++] = 0 while x.t <= @mt2
i = 0
while i < @m.t
# faster way of calculating u0 = x[i]*mp mod DV
j = x_array[i] & 0x7fff
u0 = (j * @mpl + (((j * @mph + (x_array[i] >> 15) * @mpl) & @um) << 15)) & BI_DM
# use am to combine the multiply-shift-add into one call
j = i + @m.t
x_array[j] += @m.am(0, u0, x, i, 0, @m.t)
# propagate carry
while x_array[j] >= BI_DV
x_array[j] -= BI_DV
x_array[++j]++
++i
x.clamp()
x.drShiftTo @m.t, x
x.subTo @m, x if x.compareTo(@m) >= 0
return
# r = "x^2/R mod m"; x != r
montSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = "xy/R mod m"; x,y != r
montMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (protected) true iff this is even
bnpIsEven = ->
this_array = @array
((if (@t > 0) then (this_array[0] & 1) else @s)) is 0
# (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
bnpExp = (e, z) ->
return BigInteger.ONE if e > 0xffffffff or e < 1
r = nbi()
r2 = nbi()
g = z.convert(this)
i = nbits(e) - 1
g.copyTo r
while --i >= 0
z.sqrTo r, r2
if (e & (1 << i)) > 0
z.mulTo r2, g, r
else
t = r
r = r2
r2 = t
z.revert r
# (public) this^e % m, 0 <= e < 2^32
bnModPowInt = (e, m) ->
z = undefined
if e < 256 or m.isEven()
z = new Classic(m)
else
z = new Montgomery(m)
@exp e, z
# protected
# public
# "constants"
# Copyright (c) 2005 Tom Wu
# All Rights Reserved.
# See "LICENSE" for details.
# Extended JavaScript BN functions, required for RSA private ops.
# (public)
bnClone = ->
r = nbi()
@copyTo r
r
# (public) return value as integer
bnIntValue = ->
this_array = @array
if @s < 0
if @t is 1
return this_array[0] - BI_DV
else return -1 if @t is 0
else if @t is 1
return this_array[0]
else return 0 if @t is 0
# assumes 16 < DB < 32
((this_array[1] & ((1 << (32 - BI_DB)) - 1)) << BI_DB) | this_array[0]
# (public) return value as byte
bnByteValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 24) >> 24)
# (public) return value as short (assumes DB>=16)
bnShortValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 16) >> 16)
# (protected) return x s.t. r^x < DV
bnpChunkSize = (r) ->
Math.floor Math.LN2 * BI_DB / Math.log(r)
# (public) 0 if this == 0, 1 if this > 0
bnSigNum = ->
this_array = @array
if @s < 0
-1
else if @t <= 0 or (@t is 1 and this_array[0] <= 0)
0
else
1
# (protected) convert to radix string
bnpToRadix = (b) ->
b = 10 unless b?
return "0" if @signum() is 0 or b < 2 or b > 36
cs = @chunkSize(b)
a = Math.pow(b, cs)
d = nbv(a)
y = nbi()
z = nbi()
r = ""
@divRemTo d, y, z
while y.signum() > 0
r = (a + z.intValue()).toString(b).substr(1) + r
y.divRemTo d, y, z
z.intValue().toString(b) + r
# (protected) convert from radix string
bnpFromRadix = (s, b) ->
@fromInt 0
b = 10 unless b?
cs = @chunkSize(b)
d = Math.pow(b, cs)
mi = false
j = 0
w = 0
i = 0
while i < s.length
x = intAt(s, i)
if x < 0
mi = true if s.charAt(i) is "-" and @signum() is 0
continue
w = b * w + x
if ++j >= cs
@dMultiply d
@dAddOffset w, 0
j = 0
w = 0
++i
if j > 0
@dMultiply Math.pow(b, j)
@dAddOffset w, 0
BigInteger.ZERO.subTo this, this if mi
return
# (protected) alternate constructor
bnpFromNumber = (a, b, c) ->
if "number" is typeof b
# new BigInteger(int,int,RNG)
if a < 2
@fromInt 1
else
@fromNumber a, c
# force MSB set
@bitwiseTo BigInteger.ONE.shiftLeft(a - 1), op_or, this unless @testBit(a - 1)
@dAddOffset 1, 0 if @isEven() # force odd
until @isProbablePrime(b)
@dAddOffset 2, 0
@subTo BigInteger.ONE.shiftLeft(a - 1), this if @bitLength() > a
else
# new BigInteger(int,RNG)
x = new Array()
t = a & 7
x.length = (a >> 3) + 1
b.nextBytes x
if t > 0
x[0] &= ((1 << t) - 1)
else
x[0] = 0
@fromString x, 256
return
# (public) convert to bigendian byte array
bnToByteArray = ->
this_array = @array
i = @t
r = new Array()
r[0] = @s
p = BI_DB - (i * BI_DB) % 8
d = undefined
k = 0
if i-- > 0
r[k++] = d | (@s << (BI_DB - p)) if p < BI_DB and (d = this_array[i] >> p) isnt (@s & BI_DM) >> p
while i >= 0
if p < 8
d = (this_array[i] & ((1 << p) - 1)) << (8 - p)
d |= this_array[--i] >> (p += BI_DB - 8)
else
d = (this_array[i] >> (p -= 8)) & 0xff
if p <= 0
p += BI_DB
--i
d |= -256 unless (d & 0x80) is 0
++k if k is 0 and (@s & 0x80) isnt (d & 0x80)
r[k++] = d if k > 0 or d isnt @s
r
bnEquals = (a) ->
@compareTo(a) is 0
bnMin = (a) ->
(if (@compareTo(a) < 0) then this else a)
bnMax = (a) ->
(if (@compareTo(a) > 0) then this else a)
# (protected) r = this op a (bitwise)
bnpBitwiseTo = (a, op, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = undefined
f = undefined
m = Math.min(a.t, @t)
i = 0
while i < m
r_array[i] = op(this_array[i], a_array[i])
++i
if a.t < @t
f = a.s & BI_DM
i = m
while i < @t
r_array[i] = op(this_array[i], f)
++i
r.t = @t
else
f = @s & BI_DM
i = m
while i < a.t
r_array[i] = op(f, a_array[i])
++i
r.t = a.t
r.s = op(@s, a.s)
r.clamp()
return
# (public) this & a
op_and = (x, y) ->
x & y
bnAnd = (a) ->
r = nbi()
@bitwiseTo a, op_and, r
r
# (public) this | a
op_or = (x, y) ->
x | y
bnOr = (a) ->
r = nbi()
@bitwiseTo a, op_or, r
r
# (public) this ^ a
op_xor = (x, y) ->
x ^ y
bnXor = (a) ->
r = nbi()
@bitwiseTo a, op_xor, r
r
# (public) this & ~a
op_andnot = (x, y) ->
x & ~y
bnAndNot = (a) ->
r = nbi()
@bitwiseTo a, op_andnot, r
r
# (public) ~this
bnNot = ->
this_array = @array
r = nbi()
r_array = r.array
i = 0
while i < @t
r_array[i] = BI_DM & ~this_array[i]
++i
r.t = @t
r.s = ~@s
r
# (public) this << n
bnShiftLeft = (n) ->
r = nbi()
if n < 0
@rShiftTo -n, r
else
@lShiftTo n, r
r
# (public) this >> n
bnShiftRight = (n) ->
r = nbi()
if n < 0
@lShiftTo -n, r
else
@rShiftTo n, r
r
# return index of lowest 1-bit in x, x < 2^31
lbit = (x) ->
return -1 if x is 0
r = 0
if (x & 0xffff) is 0
x >>= 16
r += 16
if (x & 0xff) is 0
x >>= 8
r += 8
if (x & 0xf) is 0
x >>= 4
r += 4
if (x & 3) is 0
x >>= 2
r += 2
++r if (x & 1) is 0
r
# (public) returns index of lowest 1-bit (or -1 if none)
bnGetLowestSetBit = ->
this_array = @array
i = 0
while i < @t
return i * BI_DB + lbit(this_array[i]) unless this_array[i] is 0
++i
return @t * BI_DB if @s < 0
-1
# return number of 1 bits in x
cbit = (x) ->
r = 0
until x is 0
x &= x - 1
++r
r
# (public) return number of set bits
bnBitCount = ->
r = 0
x = @s & BI_DM
i = 0
while i < @t
r += cbit(this_array[i] ^ x)
++i
r
# (public) true iff nth bit is set
bnTestBit = (n) ->
this_array = @array
j = Math.floor(n / BI_DB)
return (@s isnt 0) if j >= @t
(this_array[j] & (1 << (n % BI_DB))) isnt 0
# (protected) this op (1<<n)
bnpChangeBit = (n, op) ->
r = BigInteger.ONE.shiftLeft(n)
@bitwiseTo r, op, r
r
# (public) this | (1<<n)
bnSetBit = (n) ->
@changeBit n, op_or
# (public) this & ~(1<<n)
bnClearBit = (n) ->
@changeBit n, op_andnot
# (public) this ^ (1<<n)
bnFlipBit = (n) ->
@changeBit n, op_xor
# (protected) r = this + a
bnpAddTo = (a, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] + a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c += a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c += a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += a.s
r.s = (if (c < 0) then -1 else 0)
if c > 0
r_array[i++] = c
else r_array[i++] = BI_DV + c if c < -1
r.t = i
r.clamp()
return
# (public) this + a
bnAdd = (a) ->
r = nbi()
@addTo a, r
r
# (public) this - a
bnSubtract = (a) ->
r = nbi()
@subTo a, r
r
# (public) this * a
bnMultiply = (a) ->
r = nbi()
@multiplyTo a, r
r
# (public) this / a
bnDivide = (a) ->
r = nbi()
@divRemTo a, r, null
r
# (public) this % a
bnRemainder = (a) ->
r = nbi()
@divRemTo a, null, r
r
# (public) [this/a,this%a]
bnDivideAndRemainder = (a) ->
q = nbi()
r = nbi()
@divRemTo a, q, r
new Array(q, r)
# (protected) this *= n, this >= 0, 1 < n < DV
bnpDMultiply = (n) ->
this_array = @array
this_array[@t] = @am(0, n - 1, this, 0, 0, @t)
++@t
@clamp()
return
# (protected) this += n << w words, this >= 0
bnpDAddOffset = (n, w) ->
this_array = @array
this_array[@t++] = 0 while @t <= w
this_array[w] += n
while this_array[w] >= BI_DV
this_array[w] -= BI_DV
this_array[@t++] = 0 if ++w >= @t
++this_array[w]
return
# A "null" reducer
NullExp = ->
nNop = (x) ->
x
nMulTo = (x, y, r) ->
x.multiplyTo y, r
return
nSqrTo = (x, r) ->
x.squareTo r
return
# (public) this^e
bnPow = (e) ->
@exp e, new NullExp()
# (protected) r = lower n words of "this * a", a.t <= n
# "this" should be the larger one if appropriate.
bnpMultiplyLowerTo = (a, n, r) ->
r_array = r.array
a_array = a.array
i = Math.min(@t + a.t, n)
r.s = 0 # assumes a,this >= 0
r.t = i
r_array[--i] = 0 while i > 0
j = undefined
j = r.t - @t
while i < j
r_array[i + @t] = @am(0, a_array[i], r, i, 0, @t)
++i
j = Math.min(a.t, n)
while i < j
@am 0, a_array[i], r, i, 0, n - i
++i
r.clamp()
return
# (protected) r = "this * a" without lower n words, n > 0
# "this" should be the larger one if appropriate.
bnpMultiplyUpperTo = (a, n, r) ->
r_array = r.array
a_array = a.array
--n
i = r.t = @t + a.t - n
r.s = 0 # assumes a,this >= 0
r_array[i] = 0 while --i >= 0
i = Math.max(n - @t, 0)
while i < a.t
r_array[@t + i - n] = @am(n - i, a_array[i], r, 0, 0, @t + i - n)
++i
r.clamp()
r.drShiftTo 1, r
return
# Barrett modular reduction
Barrett = (m) ->
# setup Barrett
@r2 = nbi()
@q3 = nbi()
BigInteger.ONE.dlShiftTo 2 * m.t, @r2
@mu = @r2.divide(m)
@m = m
return
barrettConvert = (x) ->
if x.s < 0 or x.t > 2 * @m.t
x.mod @m
else if x.compareTo(@m) < 0
x
else
r = nbi()
x.copyTo r
@reduce r
r
barrettRevert = (x) ->
x
# x = x mod m (HAC 14.42)
barrettReduce = (x) ->
x.drShiftTo @m.t - 1, @r2
if x.t > @m.t + 1
x.t = @m.t + 1
x.clamp()
@mu.multiplyUpperTo @r2, @m.t + 1, @q3
@m.multiplyLowerTo @q3, @m.t + 1, @r2
x.dAddOffset 1, @m.t + 1 while x.compareTo(@r2) < 0
x.subTo @r2, x
x.subTo @m, x while x.compareTo(@m) >= 0
return
# r = x^2 mod m; x != r
barrettSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = x*y mod m; x,y != r
barrettMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (public) this^e % m (HAC 14.85)
bnModPow = (e, m) ->
e_array = e.array
i = e.bitLength()
k = undefined
r = nbv(1)
z = undefined
if i <= 0
return r
else if i < 18
k = 1
else if i < 48
k = 3
else if i < 144
k = 4
else if i < 768
k = 5
else
k = 6
if i < 8
z = new Classic(m)
else if m.isEven()
z = new Barrett(m)
else
z = new Montgomery(m)
# precomputation
g = new Array()
n = 3
k1 = k - 1
km = (1 << k) - 1
g[1] = z.convert(this)
if k > 1
g2 = nbi()
z.sqrTo g[1], g2
while n <= km
g[n] = nbi()
z.mulTo g2, g[n - 2], g[n]
n += 2
j = e.t - 1
w = undefined
is1 = true
r2 = nbi()
t = undefined
i = nbits(e_array[j]) - 1
while j >= 0
if i >= k1
w = (e_array[j] >> (i - k1)) & km
else
w = (e_array[j] & ((1 << (i + 1)) - 1)) << (k1 - i)
w |= e_array[j - 1] >> (BI_DB + i - k1) if j > 0
n = k
while (w & 1) is 0
w >>= 1
--n
if (i -= n) < 0
i += BI_DB
--j
if is1 # ret == 1, don't bother squaring or multiplying it
g[w].copyTo r
is1 = false
else
while n > 1
z.sqrTo r, r2
z.sqrTo r2, r
n -= 2
if n > 0
z.sqrTo r, r2
else
t = r
r = r2
r2 = t
z.mulTo r2, g[w], r
while j >= 0 and (e_array[j] & (1 << i)) is 0
z.sqrTo r, r2
t = r
r = r2
r2 = t
if --i < 0
i = BI_DB - 1
--j
z.revert r
# (public) gcd(this,a) (HAC 14.54)
bnGCD = (a) ->
x = (if (@s < 0) then @negate() else @clone())
y = (if (a.s < 0) then a.negate() else a.clone())
if x.compareTo(y) < 0
t = x
x = y
y = t
i = x.getLowestSetBit()
g = y.getLowestSetBit()
return x if g < 0
g = i if i < g
if g > 0
x.rShiftTo g, x
y.rShiftTo g, y
while x.signum() > 0
x.rShiftTo i, x if (i = x.getLowestSetBit()) > 0
y.rShiftTo i, y if (i = y.getLowestSetBit()) > 0
if x.compareTo(y) >= 0
x.subTo y, x
x.rShiftTo 1, x
else
y.subTo x, y
y.rShiftTo 1, y
y.lShiftTo g, y if g > 0
y
# (protected) this % n, n < 2^26
bnpModInt = (n) ->
this_array = @array
return 0 if n <= 0
d = BI_DV % n
r = (if (@s < 0) then n - 1 else 0)
if @t > 0
if d is 0
r = this_array[0] % n
else
i = @t - 1
while i >= 0
r = (d * r + this_array[i]) % n
--i
r
# (public) 1/this % m (HAC 14.61)
bnModInverse = (m) ->
ac = m.isEven()
return BigInteger.ZERO if (@isEven() and ac) or m.signum() is 0
u = m.clone()
v = @clone()
a = nbv(1)
b = nbv(0)
c = nbv(0)
d = nbv(1)
until u.signum() is 0
while u.isEven()
u.rShiftTo 1, u
if ac
if not a.isEven() or not b.isEven()
a.addTo this, a
b.subTo m, b
a.rShiftTo 1, a
else b.subTo m, b unless b.isEven()
b.rShiftTo 1, b
while v.isEven()
v.rShiftTo 1, v
if ac
if not c.isEven() or not d.isEven()
c.addTo this, c
d.subTo m, d
c.rShiftTo 1, c
else d.subTo m, d unless d.isEven()
d.rShiftTo 1, d
if u.compareTo(v) >= 0
u.subTo v, u
a.subTo c, a if ac
b.subTo d, b
else
v.subTo u, v
c.subTo a, c if ac
d.subTo b, d
return BigInteger.ZERO unless v.compareTo(BigInteger.ONE) is 0
return d.subtract(m) if d.compareTo(m) >= 0
if d.signum() < 0
d.addTo m, d
else
return d
if d.signum() < 0
d.add m
else
d
# (public) test primality with certainty >= 1-.5^t
bnIsProbablePrime = (t) ->
i = undefined
x = @abs()
x_array = x.array
if x.t is 1 and x_array[0] <= lowprimes[lowprimes.length - 1]
i = 0
while i < lowprimes.length
return true if x_array[0] is lowprimes[i]
++i
return false
return false if x.isEven()
i = 1
while i < lowprimes.length
m = lowprimes[i]
j = i + 1
m *= lowprimes[j++] while j < lowprimes.length and m < lplim
m = x.modInt(m)
return false if m % lowprimes[i++] is 0 while i < j
x.millerRabin t
# (protected) true if probably prime (HAC 4.24, Miller-Rabin)
bnpMillerRabin = (t) ->
n1 = @subtract(BigInteger.ONE)
k = n1.getLowestSetBit()
return false if k <= 0
r = n1.shiftRight(k)
t = (t + 1) >> 1
t = lowprimes.length if t > lowprimes.length
a = nbi()
i = 0
while i < t
a.fromInt lowprimes[i]
y = a.modPow(r, this)
if y.compareTo(BigInteger.ONE) isnt 0 and y.compareTo(n1) isnt 0
j = 1
while j++ < k and y.compareTo(n1) isnt 0
y = y.modPowInt(2, this)
return false if y.compareTo(BigInteger.ONE) is 0
return false unless y.compareTo(n1) is 0
++i
true
# protected
# public
# BigInteger interfaces not implemented in jsbn:
# BigInteger(int signum, byte[] magnitude)
# double doubleValue()
# float floatValue()
# int hashCode()
# long longValue()
# static BigInteger valueOf(long val)
# prng4.js - uses Arcfour as a PRNG
Arcfour = ->
@i = 0
@j = 0
@S = new Array()
return
# Initialize arcfour context from key, an array of ints, each from [0..255]
ARC4init = (key) ->
i = undefined
j = undefined
t = undefined
i = 0
while i < 256
@S[i] = i
++i
j = 0
i = 0
while i < 256
j = (j + @S[i] + key[i % key.length]) & 255
t = @S[i]
@S[i] = @S[j]
@S[j] = t
++i
@i = 0
@j = 0
return
ARC4next = ->
t = undefined
@i = (@i + 1) & 255
@j = (@j + @S[@i]) & 255
t = @S[@i]
@S[@i] = @S[@j]
@S[@j] = t
@S[(t + @S[@i]) & 255]
# Plug in your RNG constructor here
prng_newstate = ->
new Arcfour()
# Pool size must be a multiple of 4 and greater than 32.
# An array of bytes the size of the pool will be passed to init()
# Random number generator - requires a PRNG backend, e.g. prng4.js
# For best results, put code like
# <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
# in your main HTML document.
# Mix in a 32-bit integer into the pool
rng_seed_int = (x) ->
rng_pool[rng_pptr++] ^= x & 255
rng_pool[rng_pptr++] ^= (x >> 8) & 255
rng_pool[rng_pptr++] ^= (x >> 16) & 255
rng_pool[rng_pptr++] ^= (x >> 24) & 255
rng_pptr -= rng_psize if rng_pptr >= rng_psize
return
# Mix in the current time (w/milliseconds) into the pool
rng_seed_time = ->
# Use pre-computed date to avoid making the benchmark
# results dependent on the current date.
rng_seed_int 1122926989487
return
# Initialize the pool with junk if needed.
# extract some randomness from Math.random()
#rng_seed_int(window.screenX);
#rng_seed_int(window.screenY);
rng_get_byte = ->
unless rng_state?
rng_seed_time()
rng_state = prng_newstate()
rng_state.init rng_pool
rng_pptr = 0
while rng_pptr < rng_pool.length
rng_pool[rng_pptr] = 0
++rng_pptr
rng_pptr = 0
#rng_pool = null;
# TODO: allow reseeding after first request
rng_state.next()
rng_get_bytes = (ba) ->
i = undefined
i = 0
while i < ba.length
ba[i] = rng_get_byte()
++i
return
SecureRandom = ->
# Depends on jsbn.js and rng.js
# convert a (hex) string to a bignum object
parseBigInt = (str, r) ->
new BigInteger(str, r)
linebrk = (s, n) ->
ret = ""
i = 0
while i + n < s.length
ret += s.substring(i, i + n) + "\n"
i += n
ret + s.substring(i, s.length)
byte2Hex = (b) ->
if b < 0x10
"0" + b.toString(16)
else
b.toString 16
# PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
pkcs1pad2 = (s, n) ->
if n < s.length + 11
alert "Message too long for RSA"
return null
ba = new Array()
i = s.length - 1
ba[--n] = s.charCodeAt(i--) while i >= 0 and n > 0
ba[--n] = 0
rng = new SecureRandom()
x = new Array()
while n > 2 # random non-zero pad
x[0] = 0
rng.nextBytes x while x[0] is 0
ba[--n] = x[0]
ba[--n] = 2
ba[--n] = 0
new BigInteger(ba)
# "empty" RSA key constructor
RSAKey = ->
@n = null
@e = 0
@d = null
@p = null
@q = null
@dmp1 = null
@dmq1 = null
@coeff = null
return
# Set the public key fields N and e from hex strings
RSASetPublic = (N, E) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
else
alert "Invalid RSA public key"
return
# Perform raw public operation on "x": return x^e (mod n)
RSADoPublic = (x) ->
x.modPowInt @e, @n
# Return the PKCS#1 RSA encryption of "text" as an even-length hex string
RSAEncrypt = (text) ->
m = pkcs1pad2(text, (@n.bitLength() + 7) >> 3)
return null unless m?
c = @doPublic(m)
return null unless c?
h = c.toString(16)
if (h.length & 1) is 0
h
else
"0" + h
# Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
#function RSAEncryptB64(text) {
# var h = this.encrypt(text);
# if(h) return hex2b64(h); else return null;
#}
# protected
# public
#RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
# Depends on rsa.js and jsbn2.js
# Undo PKCS#1 (type 2, random) padding and, if valid, return the plaintext
pkcs1unpad2 = (d, n) ->
b = d.toByteArray()
i = 0
++i while i < b.length and b[i] is 0
return null if b.length - i isnt n - 1 or b[i] isnt 2
++i
return null if ++i >= b.length until b[i] is 0
ret = ""
ret += String.fromCharCode(b[i]) while ++i < b.length
ret
# Set the private key fields N, e, and d from hex strings
RSASetPrivate = (N, E, D) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
else
alert "Invalid RSA private key"
return
# Set the private key fields N, e, d and CRT params from hex strings
RSASetPrivateEx = (N, E, D, P, Q, DP, DQ, C) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
@p = parseBigInt(P, 16)
@q = parseBigInt(Q, 16)
@dmp1 = parseBigInt(DP, 16)
@dmq1 = parseBigInt(DQ, 16)
@coeff = parseBigInt(C, 16)
else
alert "Invalid RSA private key"
return
# Generate a new random private key B bits long, using public expt E
RSAGenerate = (B, E) ->
rng = new SecureRandom()
qs = B >> 1
@e = parseInt(E, 16)
ee = new BigInteger(E, 16)
loop
loop
@p = new BigInteger(B - qs, 1, rng)
break if @p.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @p.isProbablePrime(10)
loop
@q = new BigInteger(qs, 1, rng)
break if @q.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @q.isProbablePrime(10)
if @p.compareTo(@q) <= 0
t = @p
@p = @q
@q = t
p1 = @p.subtract(BigInteger.ONE)
q1 = @q.subtract(BigInteger.ONE)
phi = p1.multiply(q1)
if phi.gcd(ee).compareTo(BigInteger.ONE) is 0
@n = @p.multiply(@q)
@d = ee.modInverse(phi)
@dmp1 = @d.mod(p1)
@dmq1 = @d.mod(q1)
@coeff = @q.modInverse(@p)
break
return
# Perform raw private operation on "x": return x^d (mod n)
RSADoPrivate = (x) ->
return x.modPow(@d, @n) if not @p? or not @q?
# TODO: re-calculate any missing CRT params
xp = x.mod(@p).modPow(@dmp1, @p)
xq = x.mod(@q).modPow(@dmq1, @q)
xp = xp.add(@p) while xp.compareTo(xq) < 0
xp.subtract(xq).multiply(@coeff).mod(@p).multiply(@q).add xq
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is an even-length hex string and the output is a plain string.
RSADecrypt = (ctext) ->
c = parseBigInt(ctext, 16)
m = @doPrivate(c)
return null unless m?
pkcs1unpad2 m, (@n.bitLength() + 7) >> 3
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is a Base64-encoded string and the output is a plain string.
#function RSAB64Decrypt(ctext) {
# var h = b64tohex(ctext);
# if(h) return this.decrypt(h); else return null;
#}
# protected
# public
#RSAKey.prototype.b64_decrypt = RSAB64Decrypt;
encrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
encrypted = RSA.encrypt(TEXT)
return
decrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
decrypted = RSA.decrypt(encrypted)
throw new Error("Crypto operation failed") unless decrypted is TEXT
return
Crypto = new BenchmarkSuite("Crypto", 266181, [
new Benchmark("Encrypt", encrypt)
new Benchmark("Decrypt", decrypt)
])
dbits = undefined
BI_DB = undefined
BI_DM = undefined
BI_DV = undefined
BI_FP = undefined
BI_FV = undefined
BI_F1 = undefined
BI_F2 = undefined
canary = 0xdeadbeefcafe
j_lm = ((canary & 0xffffff) is 0xefcafe)
setupEngine = (fn, bits) ->
BigInteger::am = fn
dbits = bits
BI_DB = dbits
BI_DM = ((1 << dbits) - 1)
BI_DV = (1 << dbits)
BI_FP = 52
BI_FV = Math.pow(2, BI_FP)
BI_F1 = BI_FP - dbits
BI_F2 = 2 * dbits - BI_FP
return
BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
BI_RC = new Array()
rr = undefined
vv = undefined
rr = "0".charCodeAt(0)
vv = 0
while vv <= 9
BI_RC[rr++] = vv
++vv
rr = "a".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
rr = "A".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
Classic::convert = cConvert
Classic::revert = cRevert
Classic::reduce = cReduce
Classic::mulTo = cMulTo
Classic::sqrTo = cSqrTo
Montgomery::convert = montConvert
Montgomery::revert = montRevert
Montgomery::reduce = montReduce
Montgomery::mulTo = montMulTo
Montgomery::sqrTo = montSqrTo
BigInteger::copyTo = bnpCopyTo
BigInteger::fromInt = bnpFromInt
BigInteger::fromString = bnpFromString
BigInteger::clamp = bnpClamp
BigInteger::dlShiftTo = bnpDLShiftTo
BigInteger::drShiftTo = bnpDRShiftTo
BigInteger::lShiftTo = bnpLShiftTo
BigInteger::rShiftTo = bnpRShiftTo
BigInteger::subTo = bnpSubTo
BigInteger::multiplyTo = bnpMultiplyTo
BigInteger::squareTo = bnpSquareTo
BigInteger::divRemTo = bnpDivRemTo
BigInteger::invDigit = bnpInvDigit
BigInteger::isEven = bnpIsEven
BigInteger::exp = bnpExp
BigInteger::toString = bnToString
BigInteger::negate = bnNegate
BigInteger::abs = bnAbs
BigInteger::compareTo = bnCompareTo
BigInteger::bitLength = bnBitLength
BigInteger::mod = bnMod
BigInteger::modPowInt = bnModPowInt
BigInteger.ZERO = nbv(0)
BigInteger.ONE = nbv(1)
NullExp::convert = nNop
NullExp::revert = nNop
NullExp::mulTo = nMulTo
NullExp::sqrTo = nSqrTo
Barrett::convert = barrettConvert
Barrett::revert = barrettRevert
Barrett::reduce = barrettReduce
Barrett::mulTo = barrettMulTo
Barrett::sqrTo = barrettSqrTo
lowprimes = [
2
3
5
7
11
13
17
19
23
29
31
37
41
43
47
53
59
61
67
71
73
79
83
89
97
101
103
107
109
113
127
131
137
139
149
151
157
163
167
173
179
181
191
193
197
199
211
223
227
229
233
239
241
251
257
263
269
271
277
281
283
293
307
311
313
317
331
337
347
349
353
359
367
373
379
383
389
397
401
409
419
421
431
433
439
443
449
457
461
463
467
479
487
491
499
503
509
]
lplim = (1 << 26) / lowprimes[lowprimes.length - 1]
BigInteger::chunkSize = bnpChunkSize
BigInteger::toRadix = bnpToRadix
BigInteger::fromRadix = bnpFromRadix
BigInteger::fromNumber = bnpFromNumber
BigInteger::bitwiseTo = bnpBitwiseTo
BigInteger::changeBit = bnpChangeBit
BigInteger::addTo = bnpAddTo
BigInteger::dMultiply = bnpDMultiply
BigInteger::dAddOffset = bnpDAddOffset
BigInteger::multiplyLowerTo = bnpMultiplyLowerTo
BigInteger::multiplyUpperTo = bnpMultiplyUpperTo
BigInteger::modInt = bnpModInt
BigInteger::millerRabin = bnpMillerRabin
BigInteger::clone = bnClone
BigInteger::intValue = bnIntValue
BigInteger::byteValue = bnByteValue
BigInteger::shortValue = bnShortValue
BigInteger::signum = bnSigNum
BigInteger::toByteArray = bnToByteArray
BigInteger::equals = bnEquals
BigInteger::min = bnMin
BigInteger::max = bnMax
BigInteger::and = bnAnd
BigInteger::or = bnOr
BigInteger::xor = bnXor
BigInteger::andNot = bnAndNot
BigInteger::not = bnNot
BigInteger::shiftLeft = bnShiftLeft
BigInteger::shiftRight = bnShiftRight
BigInteger::getLowestSetBit = bnGetLowestSetBit
BigInteger::bitCount = bnBitCount
BigInteger::testBit = bnTestBit
BigInteger::setBit = bnSetBit
BigInteger::clearBit = bnClearBit
BigInteger::flipBit = bnFlipBit
BigInteger::add = bnAdd
BigInteger::subtract = bnSubtract
BigInteger::multiply = bnMultiply
BigInteger::divide = bnDivide
BigInteger::remainder = bnRemainder
BigInteger::divideAndRemainder = bnDivideAndRemainder
BigInteger::modPow = bnModPow
BigInteger::modInverse = bnModInverse
BigInteger::pow = bnPow
BigInteger::gcd = bnGCD
BigInteger::isProbablePrime = bnIsProbablePrime
Arcfour::init = ARC4init
Arcfour::next = ARC4next
rng_psize = 256
rng_state = undefined
rng_pool = undefined
rng_pptr = undefined
unless rng_pool?
rng_pool = new Array()
rng_pptr = 0
t = undefined
while rng_pptr < rng_psize
t = Math.floor(65536 * Math.random())
rng_pool[rng_pptr++] = t >>> 8
rng_pool[rng_pptr++] = t & 255
rng_pptr = 0
rng_seed_time()
SecureRandom::nextBytes = rng_get_bytes
RSAKey::doPublic = RSADoPublic
RSAKey::setPublic = RSASetPublic
RSAKey::encrypt = RSAEncrypt
RSAKey::doPrivate = RSADoPrivate
RSAKey::setPrivate = RSASetPrivate
RSAKey::setPrivateEx = RSASetPrivateEx
RSAKey::generate = RSAGenerate
RSAKey::decrypt = RSADecrypt
nValue = "a5261939975948bb7a58dffe5ff54e65f0498f9175f5a09288810b8975871e99af3b5dd94057b0fc07535f5f97444504fa35169d461d0d30cf0192e307727c065168c788771c561a9400fb49175e9e6aa4e23fe11af69e9412dd23b0cb6684c4c2429bce139e848ab26d0829073351f4acd36074eafd036a5eb83359d2a698d3"
eValue = "10001"
dValue = "8e9912f6d3645894e8d38cb58c0db81ff516cf4c7e5a14c7f1eddb1459d2cded4d8d293fc97aee6aefb861859c8b6a3d1dfe710463e1f9ddc72048c09751971c4a580aa51eb523357a3cc48d31cfad1d4a165066ed92d4748fb6571211da5cb14bc11b6e2df7c1a559e6d5ac1cd5c94703a22891464fba23d0d965086277a161"
pValue = "d090ce58a92c75233a6486cb0a9209bf3583b64f540c76f5294bb97d285eed33aec220bde14b2417951178ac152ceab6da7090905b478195498b352048f15e7d"
qValue = "cab575dc652bb66df15a0359609d51d1db184750c00c6698b90ef3465c99655103edbf0d54c56aec0ce3c4d22592338092a126a0cc49f65a4a30d222b411e58f"
dmp1Value = "1a24bca8e273df2f0e47c199bbf678604e7df7215480c77c8db39f49b000ce2cf7500038acfff5433b7d582a01f1826e6f4d42e1c57f5e1fef7b12aabc59fd25"
dmq1Value = "3d06982efbbe47339e1f6d36b1216b8a741d410b0c662f54f7118b27b9a4ec9d914337eb39841d8666f3034408cf94f5b62f11c402fc994fe15a05493150d9fd"
coeffValue = "3a3e731acd8960b7ff9eb81a7ff93bd1cfa74cbd56987db58b4594fb09c09084db1734c8143f98b602b981aaa9243ca28deb69b5b280ee8dcee0fd2625e53250"
setupEngine am3, 28
TEXT = "The quick brown fox jumped over the extremely lazy frog! " + "Now is the time for all good men to come to the party."
encrypted = undefined
| 38357 | #
# * Copyright (c) 2003-2005 <NAME>
# * All Rights Reserved.
# *
# * Permission is hereby granted, free of charge, to any person obtaining
# * a copy of this software and associated documentation files (the
# * "Software"), to deal in the Software without restriction, including
# * without limitation the rights to use, copy, modify, merge, publish,
# * distribute, sublicense, and/or sell copies of the Software, and to
# * permit persons to whom the Software is furnished to do so, subject to
# * the following conditions:
# *
# * The above copyright notice and this permission notice shall be
# * included in all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
# * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
# * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
# *
# * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
# * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
# * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
# * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
# * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# *
# * In addition, the following condition applies:
# *
# * All redistributions must retain an intact copy of this copyright notice
# * and disclaimer.
#
# The code has been adapted for use as a benchmark by Google.
# Basic JavaScript BN library - subset useful for RSA encryption.
# Bits per digit
# JavaScript engine analysis
# (public) Constructor
BigInteger = (a, b, c) ->
@array = new Array()
if a?
if "number" is typeof a
@fromNumber a, b, c
else if not b? and "string" isnt typeof a
@fromString a, 256
else
@fromString a, b
return
# return new, unset BigInteger
nbi = ->
new BigInteger(null)
# am: Compute w_j += (x*this_i), propagate carries,
# c is initial carry, returns final carry.
# c < 3*dvalue, x < 2*dvalue, this_i < dvalue
# We need to select the fastest one that works in this environment.
# am1: use a single mult and divide to get the high bits,
# max digit bits should be 26 because
# max internal value = 2*dvalue^2-2*dvalue (< 2^53)
am1 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
while --n >= 0
v = x * this_array[i++] + w_array[j] + c
c = Math.floor(v / 0x4000000)
w_array[j++] = v & 0x3ffffff
c
# am2 avoids a big mult-and-extract completely.
# Max digit bits should be <= 30 because we do bitwise ops
# on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
am2 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x7fff
xh = x >> 15
while --n >= 0
l = this_array[i] & 0x7fff
h = this_array[i++] >> 15
m = xh * l + h * xl
l = xl * l + ((m & 0x7fff) << 15) + w_array[j] + (c & 0x3fffffff)
c = (l >>> 30) + (m >>> 15) + xh * h + (c >>> 30)
w_array[j++] = l & 0x3fffffff
c
# Alternately, set max digit bits to 28 since some
# browsers slow down when dealing with 32-bit numbers.
am3 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x3fff
xh = x >> 14
while --n >= 0
l = this_array[i] & 0x3fff
h = this_array[i++] >> 14
m = xh * l + h * xl
l = xl * l + ((m & 0x3fff) << 14) + w_array[j] + c
c = (l >> 28) + (m >> 14) + xh * h
w_array[j++] = l & 0xfffffff
c
# This is tailored to VMs with 2-bit tagging. It makes sure
# that all the computations stay within the 29 bits available.
am4 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x1fff
xh = x >> 13
while --n >= 0
l = this_array[i] & 0x1fff
h = this_array[i++] >> 13
m = xh * l + h * xl
l = xl * l + ((m & 0x1fff) << 13) + w_array[j] + c
c = (l >> 26) + (m >> 13) + xh * h
w_array[j++] = l & 0x3ffffff
c
# am3/28 is best for SM, Rhino, but am4/26 is best for v8.
# Kestrel (Opera 9.5) gets its best result with am4/26.
# IE7 does 9% better with am3/28 than with am4/26.
# Firefox (SM) gets 10% faster with am3/28 than with am4/26.
# Digit conversions
int2char = (n) ->
BI_RM.charAt n
intAt = (s, i) ->
c = BI_RC[s.charCodeAt(i)]
(if (not (c?)) then -1 else c)
# (protected) copy this to r
bnpCopyTo = (r) ->
this_array = @array
r_array = r.array
i = @t - 1
while i >= 0
r_array[i] = this_array[i]
--i
r.t = @t
r.s = @s
return
# (protected) set from integer value x, -DV <= x < DV
bnpFromInt = (x) ->
this_array = @array
@t = 1
@s = (if (x < 0) then -1 else 0)
if x > 0
this_array[0] = x
else if x < -1
this_array[0] = x + DV
else
@t = 0
return
# return bigint initialized to value
nbv = (i) ->
r = nbi()
r.fromInt i
r
# (protected) set from string and radix
bnpFromString = (s, b) ->
this_array = @array
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 256 # byte array
k = 8
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
@fromRadix s, b
return
@t = 0
@s = 0
i = s.length
mi = false
sh = 0
while --i >= 0
x = (if (k is 8) then s[i] & 0xff else intAt(s, i))
if x < 0
mi = true if s.charAt(i) is "-"
continue
mi = false
if sh is 0
this_array[@t++] = x
else if sh + k > BI_DB
this_array[@t - 1] |= (x & ((1 << (BI_DB - sh)) - 1)) << sh
this_array[@t++] = (x >> (BI_DB - sh))
else
this_array[@t - 1] |= x << sh
sh += k
sh -= BI_DB if sh >= BI_DB
if k is 8 and (s[0] & 0x80) isnt 0
@s = -1
this_array[@t - 1] |= ((1 << (BI_DB - sh)) - 1) << sh if sh > 0
@clamp()
BigInteger.ZERO.subTo this, this if mi
return
# (protected) clamp off excess high words
bnpClamp = ->
this_array = @array
c = @s & BI_DM
--@t while @t > 0 and this_array[@t - 1] is c
return
# (public) return string representation in given radix
bnToString = (b) ->
this_array = @array
return "-" + @negate().toString(b) if @s < 0
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
return @toRadix(b)
km = (1 << k) - 1
d = undefined
m = false
r = ""
i = @t
p = BI_DB - (i * BI_DB) % k
if i-- > 0
if p < BI_DB and (d = this_array[i] >> p) > 0
m = true
r = int2char(d)
while i >= 0
if p < k
d = (this_array[i] & ((1 << p) - 1)) << (k - p)
d |= this_array[--i] >> (p += BI_DB - k)
else
d = (this_array[i] >> (p -= k)) & km
if p <= 0
p += BI_DB
--i
m = true if d > 0
r += int2char(d) if m
(if m then r else "0")
# (public) -this
bnNegate = ->
r = nbi()
BigInteger.ZERO.subTo this, r
r
# (public) |this|
bnAbs = ->
(if (@s < 0) then @negate() else this)
# (public) return + if this > a, - if this < a, 0 if equal
bnCompareTo = (a) ->
this_array = @array
a_array = a.array
r = @s - a.s
return r unless r is 0
i = @t
r = i - a.t
return r unless r is 0
return r unless (r = this_array[i] - a_array[i]) is 0 while --i >= 0
0
# returns bit length of the integer x
nbits = (x) ->
r = 1
t = undefined
unless (t = x >>> 16) is 0
x = t
r += 16
unless (t = x >> 8) is 0
x = t
r += 8
unless (t = x >> 4) is 0
x = t
r += 4
unless (t = x >> 2) is 0
x = t
r += 2
unless (t = x >> 1) is 0
x = t
r += 1
r
# (public) return the number of bits in "this"
bnBitLength = ->
this_array = @array
return 0 if @t <= 0
BI_DB * (@t - 1) + nbits(this_array[@t - 1] ^ (@s & BI_DM))
# (protected) r = this << n*DB
bnpDLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = undefined
i = @t - 1
while i >= 0
r_array[i + n] = this_array[i]
--i
i = n - 1
while i >= 0
r_array[i] = 0
--i
r.t = @t + n
r.s = @s
return
# (protected) r = this >> n*DB
bnpDRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = n
while i < @t
r_array[i - n] = this_array[i]
++i
r.t = Math.max(@t - n, 0)
r.s = @s
return
# (protected) r = this << n
bnpLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << cbs) - 1
ds = Math.floor(n / BI_DB)
c = (@s << bs) & BI_DM
i = undefined
i = @t - 1
while i >= 0
r_array[i + ds + 1] = (this_array[i] >> cbs) | c
c = (this_array[i] & bm) << bs
--i
i = ds - 1
while i >= 0
r_array[i] = 0
--i
r_array[ds] = c
r.t = @t + ds + 1
r.s = @s
r.clamp()
return
# (protected) r = this >> n
bnpRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
r.s = @s
ds = Math.floor(n / BI_DB)
if ds >= @t
r.t = 0
return
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << bs) - 1
r_array[0] = this_array[ds] >> bs
i = ds + 1
while i < @t
r_array[i - ds - 1] |= (this_array[i] & bm) << cbs
r_array[i - ds] = this_array[i] >> bs
++i
r_array[@t - ds - 1] |= (@s & bm) << cbs if bs > 0
r.t = @t - ds
r.clamp()
return
# (protected) r = this - a
bnpSubTo = (a, r) ->
this_array = @array
r_array = r.array
a_array = a.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] - a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c -= a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c -= a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c -= a.s
r.s = (if (c < 0) then -1 else 0)
if c < -1
r_array[i++] = BI_DV + c
else r_array[i++] = c if c > 0
r.t = i
r.clamp()
return
# (protected) r = this * a, r != this,a (HAC 14.12)
# "this" should be the larger one if appropriate.
bnpMultiplyTo = (a, r) ->
this_array = @array
r_array = r.array
x = @abs()
y = a.abs()
y_array = y.array
i = x.t
r.t = i + y.t
r_array[i] = 0 while --i >= 0
i = 0
while i < y.t
r_array[i + x.t] = x.am(0, y_array[i], r, i, 0, x.t)
++i
r.s = 0
r.clamp()
BigInteger.ZERO.subTo r, r unless @s is a.s
return
# (protected) r = this^2, r != this (HAC 14.16)
bnpSquareTo = (r) ->
x = @abs()
x_array = x.array
r_array = r.array
i = r.t = 2 * x.t
r_array[i] = 0 while --i >= 0
i = 0
while i < x.t - 1
c = x.am(i, x_array[i], r, 2 * i, 0, 1)
if (r_array[i + x.t] += x.am(i + 1, 2 * x_array[i], r, 2 * i + 1, c, x.t - i - 1)) >= BI_DV
r_array[i + x.t] -= BI_DV
r_array[i + x.t + 1] = 1
++i
r_array[r.t - 1] += x.am(i, x_array[i], r, 2 * i, 0, 1) if r.t > 0
r.s = 0
r.clamp()
return
# (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
# r != q, this != m. q or r may be null.
bnpDivRemTo = (m, q, r) ->
pm = m.abs()
return if pm.t <= 0
pt = @abs()
if pt.t < pm.t
q.fromInt 0 if q?
@copyTo r if r?
return
r = nbi() unless r?
y = nbi()
ts = @s
ms = m.s
pm_array = pm.array
nsh = BI_DB - nbits(pm_array[pm.t - 1]) # normalize modulus
if nsh > 0
pm.lShiftTo nsh, y
pt.lShiftTo nsh, r
else
pm.copyTo y
pt.copyTo r
ys = y.t
y_array = y.array
y0 = y_array[ys - 1]
return if y0 is 0
yt = y0 * (1 << BI_F1) + ((if (ys > 1) then y_array[ys - 2] >> BI_F2 else 0))
d1 = BI_FV / yt
d2 = (1 << BI_F1) / yt
e = 1 << BI_F2
i = r.t
j = i - ys
t = (if (not (q?)) then nbi() else q)
y.dlShiftTo j, t
r_array = r.array
if r.compareTo(t) >= 0
r_array[r.t++] = 1
r.subTo t, r
BigInteger.ONE.dlShiftTo ys, t
t.subTo y, y # "negative" y so we can replace sub with am later
y_array[y.t++] = 0 while y.t < ys
while --j >= 0
# Estimate quotient digit
qd = (if (r_array[--i] is y0) then BI_DM else Math.floor(r_array[i] * d1 + (r_array[i - 1] + e) * d2))
if (r_array[i] += y.am(0, qd, r, j, 0, ys)) < qd # Try it out
y.dlShiftTo j, t
r.subTo t, r
r.subTo t, r while r_array[i] < --qd
if q?
r.drShiftTo ys, q
BigInteger.ZERO.subTo q, q unless ts is ms
r.t = ys
r.clamp()
r.rShiftTo nsh, r if nsh > 0 # Denormalize remainder
BigInteger.ZERO.subTo r, r if ts < 0
return
# (public) this mod a
bnMod = (a) ->
r = nbi()
@abs().divRemTo a, null, r
a.subTo r, r if @s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# Modular reduction using "classic" algorithm
Classic = (m) ->
@m = m
return
cConvert = (x) ->
if x.s < 0 or x.compareTo(@m) >= 0
x.mod @m
else
x
cRevert = (x) ->
x
cReduce = (x) ->
x.divRemTo @m, null, x
return
cMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
cSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# (protected) return "-1/this % 2^DB"; useful for Mont. reduction
# justification:
# xy == 1 (mod m)
# xy = 1+km
# xy(2-xy) = (1+km)(1-km)
# x[y(2-xy)] = 1-k^2m^2
# x[y(2-xy)] == 1 (mod m^2)
# if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
# should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
# JS multiply "overflows" differently from C/C++, so care is needed here.
bnpInvDigit = ->
this_array = @array
return 0 if @t < 1
x = this_array[0]
return 0 if (x & 1) is 0
y = x & 3 # y == 1/x mod 2^2
y = (y * (2 - (x & 0xf) * y)) & 0xf # y == 1/x mod 2^4
y = (y * (2 - (x & 0xff) * y)) & 0xff # y == 1/x mod 2^8
y = (y * (2 - (((x & 0xffff) * y) & 0xffff))) & 0xffff # y == 1/x mod 2^16
# last step - calculate inverse mod DV directly;
# assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
y = (y * (2 - x * y % BI_DV)) % BI_DV # y == 1/x mod 2^dbits
# we really want the negative inverse, and -DV < y < DV
(if (y > 0) then BI_DV - y else -y)
# Montgomery reduction
Montgomery = (m) ->
@m = m
@mp = m.invDigit()
@mpl = @mp & 0x7fff
@mph = @mp >> 15
@um = (1 << (BI_DB - 15)) - 1
@mt2 = 2 * m.t
return
# xR mod m
montConvert = (x) ->
r = nbi()
x.abs().dlShiftTo @m.t, r
r.divRemTo @m, null, r
@m.subTo r, r if x.s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# x/R mod m
montRevert = (x) ->
r = nbi()
x.copyTo r
@reduce r
r
# x = x/R mod m (HAC 14.32)
montReduce = (x) ->
x_array = x.array
# pad x so am has enough room later
x_array[x.t++] = 0 while x.t <= @mt2
i = 0
while i < @m.t
# faster way of calculating u0 = x[i]*mp mod DV
j = x_array[i] & 0x7fff
u0 = (j * @mpl + (((j * @mph + (x_array[i] >> 15) * @mpl) & @um) << 15)) & BI_DM
# use am to combine the multiply-shift-add into one call
j = i + @m.t
x_array[j] += @m.am(0, u0, x, i, 0, @m.t)
# propagate carry
while x_array[j] >= BI_DV
x_array[j] -= BI_DV
x_array[++j]++
++i
x.clamp()
x.drShiftTo @m.t, x
x.subTo @m, x if x.compareTo(@m) >= 0
return
# r = "x^2/R mod m"; x != r
montSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = "xy/R mod m"; x,y != r
montMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (protected) true iff this is even
bnpIsEven = ->
this_array = @array
((if (@t > 0) then (this_array[0] & 1) else @s)) is 0
# (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
bnpExp = (e, z) ->
return BigInteger.ONE if e > 0xffffffff or e < 1
r = nbi()
r2 = nbi()
g = z.convert(this)
i = nbits(e) - 1
g.copyTo r
while --i >= 0
z.sqrTo r, r2
if (e & (1 << i)) > 0
z.mulTo r2, g, r
else
t = r
r = r2
r2 = t
z.revert r
# (public) this^e % m, 0 <= e < 2^32
bnModPowInt = (e, m) ->
z = undefined
if e < 256 or m.isEven()
z = new Classic(m)
else
z = new Montgomery(m)
@exp e, z
# protected
# public
# "constants"
# Copyright (c) 2005 <NAME>
# All Rights Reserved.
# See "LICENSE" for details.
# Extended JavaScript BN functions, required for RSA private ops.
# (public)
bnClone = ->
r = nbi()
@copyTo r
r
# (public) return value as integer
bnIntValue = ->
this_array = @array
if @s < 0
if @t is 1
return this_array[0] - BI_DV
else return -1 if @t is 0
else if @t is 1
return this_array[0]
else return 0 if @t is 0
# assumes 16 < DB < 32
((this_array[1] & ((1 << (32 - BI_DB)) - 1)) << BI_DB) | this_array[0]
# (public) return value as byte
bnByteValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 24) >> 24)
# (public) return value as short (assumes DB>=16)
bnShortValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 16) >> 16)
# (protected) return x s.t. r^x < DV
bnpChunkSize = (r) ->
Math.floor Math.LN2 * BI_DB / Math.log(r)
# (public) 0 if this == 0, 1 if this > 0
bnSigNum = ->
this_array = @array
if @s < 0
-1
else if @t <= 0 or (@t is 1 and this_array[0] <= 0)
0
else
1
# (protected) convert to radix string
bnpToRadix = (b) ->
b = 10 unless b?
return "0" if @signum() is 0 or b < 2 or b > 36
cs = @chunkSize(b)
a = Math.pow(b, cs)
d = nbv(a)
y = nbi()
z = nbi()
r = ""
@divRemTo d, y, z
while y.signum() > 0
r = (a + z.intValue()).toString(b).substr(1) + r
y.divRemTo d, y, z
z.intValue().toString(b) + r
# (protected) convert from radix string
bnpFromRadix = (s, b) ->
@fromInt 0
b = 10 unless b?
cs = @chunkSize(b)
d = Math.pow(b, cs)
mi = false
j = 0
w = 0
i = 0
while i < s.length
x = intAt(s, i)
if x < 0
mi = true if s.charAt(i) is "-" and @signum() is 0
continue
w = b * w + x
if ++j >= cs
@dMultiply d
@dAddOffset w, 0
j = 0
w = 0
++i
if j > 0
@dMultiply Math.pow(b, j)
@dAddOffset w, 0
BigInteger.ZERO.subTo this, this if mi
return
# (protected) alternate constructor
bnpFromNumber = (a, b, c) ->
if "number" is typeof b
# new BigInteger(int,int,RNG)
if a < 2
@fromInt 1
else
@fromNumber a, c
# force MSB set
@bitwiseTo BigInteger.ONE.shiftLeft(a - 1), op_or, this unless @testBit(a - 1)
@dAddOffset 1, 0 if @isEven() # force odd
until @isProbablePrime(b)
@dAddOffset 2, 0
@subTo BigInteger.ONE.shiftLeft(a - 1), this if @bitLength() > a
else
# new BigInteger(int,RNG)
x = new Array()
t = a & 7
x.length = (a >> 3) + 1
b.nextBytes x
if t > 0
x[0] &= ((1 << t) - 1)
else
x[0] = 0
@fromString x, 256
return
# (public) convert to bigendian byte array
bnToByteArray = ->
this_array = @array
i = @t
r = new Array()
r[0] = @s
p = BI_DB - (i * BI_DB) % 8
d = undefined
k = 0
if i-- > 0
r[k++] = d | (@s << (BI_DB - p)) if p < BI_DB and (d = this_array[i] >> p) isnt (@s & BI_DM) >> p
while i >= 0
if p < 8
d = (this_array[i] & ((1 << p) - 1)) << (8 - p)
d |= this_array[--i] >> (p += BI_DB - 8)
else
d = (this_array[i] >> (p -= 8)) & 0xff
if p <= 0
p += BI_DB
--i
d |= -256 unless (d & 0x80) is 0
++k if k is 0 and (@s & 0x80) isnt (d & 0x80)
r[k++] = d if k > 0 or d isnt @s
r
bnEquals = (a) ->
@compareTo(a) is 0
bnMin = (a) ->
(if (@compareTo(a) < 0) then this else a)
bnMax = (a) ->
(if (@compareTo(a) > 0) then this else a)
# (protected) r = this op a (bitwise)
bnpBitwiseTo = (a, op, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = undefined
f = undefined
m = Math.min(a.t, @t)
i = 0
while i < m
r_array[i] = op(this_array[i], a_array[i])
++i
if a.t < @t
f = a.s & BI_DM
i = m
while i < @t
r_array[i] = op(this_array[i], f)
++i
r.t = @t
else
f = @s & BI_DM
i = m
while i < a.t
r_array[i] = op(f, a_array[i])
++i
r.t = a.t
r.s = op(@s, a.s)
r.clamp()
return
# (public) this & a
op_and = (x, y) ->
x & y
bnAnd = (a) ->
r = nbi()
@bitwiseTo a, op_and, r
r
# (public) this | a
op_or = (x, y) ->
x | y
bnOr = (a) ->
r = nbi()
@bitwiseTo a, op_or, r
r
# (public) this ^ a
op_xor = (x, y) ->
x ^ y
bnXor = (a) ->
r = nbi()
@bitwiseTo a, op_xor, r
r
# (public) this & ~a
op_andnot = (x, y) ->
x & ~y
bnAndNot = (a) ->
r = nbi()
@bitwiseTo a, op_andnot, r
r
# (public) ~this
bnNot = ->
this_array = @array
r = nbi()
r_array = r.array
i = 0
while i < @t
r_array[i] = BI_DM & ~this_array[i]
++i
r.t = @t
r.s = ~@s
r
# (public) this << n
bnShiftLeft = (n) ->
r = nbi()
if n < 0
@rShiftTo -n, r
else
@lShiftTo n, r
r
# (public) this >> n
bnShiftRight = (n) ->
r = nbi()
if n < 0
@lShiftTo -n, r
else
@rShiftTo n, r
r
# return index of lowest 1-bit in x, x < 2^31
lbit = (x) ->
return -1 if x is 0
r = 0
if (x & 0xffff) is 0
x >>= 16
r += 16
if (x & 0xff) is 0
x >>= 8
r += 8
if (x & 0xf) is 0
x >>= 4
r += 4
if (x & 3) is 0
x >>= 2
r += 2
++r if (x & 1) is 0
r
# (public) returns index of lowest 1-bit (or -1 if none)
bnGetLowestSetBit = ->
this_array = @array
i = 0
while i < @t
return i * BI_DB + lbit(this_array[i]) unless this_array[i] is 0
++i
return @t * BI_DB if @s < 0
-1
# return number of 1 bits in x
cbit = (x) ->
r = 0
until x is 0
x &= x - 1
++r
r
# (public) return number of set bits
bnBitCount = ->
r = 0
x = @s & BI_DM
i = 0
while i < @t
r += cbit(this_array[i] ^ x)
++i
r
# (public) true iff nth bit is set
bnTestBit = (n) ->
this_array = @array
j = Math.floor(n / BI_DB)
return (@s isnt 0) if j >= @t
(this_array[j] & (1 << (n % BI_DB))) isnt 0
# (protected) this op (1<<n)
bnpChangeBit = (n, op) ->
r = BigInteger.ONE.shiftLeft(n)
@bitwiseTo r, op, r
r
# (public) this | (1<<n)
bnSetBit = (n) ->
@changeBit n, op_or
# (public) this & ~(1<<n)
bnClearBit = (n) ->
@changeBit n, op_andnot
# (public) this ^ (1<<n)
bnFlipBit = (n) ->
@changeBit n, op_xor
# (protected) r = this + a
bnpAddTo = (a, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] + a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c += a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c += a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += a.s
r.s = (if (c < 0) then -1 else 0)
if c > 0
r_array[i++] = c
else r_array[i++] = BI_DV + c if c < -1
r.t = i
r.clamp()
return
# (public) this + a
bnAdd = (a) ->
r = nbi()
@addTo a, r
r
# (public) this - a
bnSubtract = (a) ->
r = nbi()
@subTo a, r
r
# (public) this * a
bnMultiply = (a) ->
r = nbi()
@multiplyTo a, r
r
# (public) this / a
bnDivide = (a) ->
r = nbi()
@divRemTo a, r, null
r
# (public) this % a
bnRemainder = (a) ->
r = nbi()
@divRemTo a, null, r
r
# (public) [this/a,this%a]
bnDivideAndRemainder = (a) ->
q = nbi()
r = nbi()
@divRemTo a, q, r
new Array(q, r)
# (protected) this *= n, this >= 0, 1 < n < DV
bnpDMultiply = (n) ->
this_array = @array
this_array[@t] = @am(0, n - 1, this, 0, 0, @t)
++@t
@clamp()
return
# (protected) this += n << w words, this >= 0
bnpDAddOffset = (n, w) ->
this_array = @array
this_array[@t++] = 0 while @t <= w
this_array[w] += n
while this_array[w] >= BI_DV
this_array[w] -= BI_DV
this_array[@t++] = 0 if ++w >= @t
++this_array[w]
return
# A "null" reducer
NullExp = ->
nNop = (x) ->
x
nMulTo = (x, y, r) ->
x.multiplyTo y, r
return
nSqrTo = (x, r) ->
x.squareTo r
return
# (public) this^e
bnPow = (e) ->
@exp e, new NullExp()
# (protected) r = lower n words of "this * a", a.t <= n
# "this" should be the larger one if appropriate.
bnpMultiplyLowerTo = (a, n, r) ->
r_array = r.array
a_array = a.array
i = Math.min(@t + a.t, n)
r.s = 0 # assumes a,this >= 0
r.t = i
r_array[--i] = 0 while i > 0
j = undefined
j = r.t - @t
while i < j
r_array[i + @t] = @am(0, a_array[i], r, i, 0, @t)
++i
j = Math.min(a.t, n)
while i < j
@am 0, a_array[i], r, i, 0, n - i
++i
r.clamp()
return
# (protected) r = "this * a" without lower n words, n > 0
# "this" should be the larger one if appropriate.
bnpMultiplyUpperTo = (a, n, r) ->
r_array = r.array
a_array = a.array
--n
i = r.t = @t + a.t - n
r.s = 0 # assumes a,this >= 0
r_array[i] = 0 while --i >= 0
i = Math.max(n - @t, 0)
while i < a.t
r_array[@t + i - n] = @am(n - i, a_array[i], r, 0, 0, @t + i - n)
++i
r.clamp()
r.drShiftTo 1, r
return
# Barrett modular reduction
Barrett = (m) ->
# setup Barrett
@r2 = nbi()
@q3 = nbi()
BigInteger.ONE.dlShiftTo 2 * m.t, @r2
@mu = @r2.divide(m)
@m = m
return
barrettConvert = (x) ->
if x.s < 0 or x.t > 2 * @m.t
x.mod @m
else if x.compareTo(@m) < 0
x
else
r = nbi()
x.copyTo r
@reduce r
r
barrettRevert = (x) ->
x
# x = x mod m (HAC 14.42)
barrettReduce = (x) ->
x.drShiftTo @m.t - 1, @r2
if x.t > @m.t + 1
x.t = @m.t + 1
x.clamp()
@mu.multiplyUpperTo @r2, @m.t + 1, @q3
@m.multiplyLowerTo @q3, @m.t + 1, @r2
x.dAddOffset 1, @m.t + 1 while x.compareTo(@r2) < 0
x.subTo @r2, x
x.subTo @m, x while x.compareTo(@m) >= 0
return
# r = x^2 mod m; x != r
barrettSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = x*y mod m; x,y != r
barrettMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (public) this^e % m (HAC 14.85)
bnModPow = (e, m) ->
e_array = e.array
i = e.bitLength()
k = undefined
r = nbv(1)
z = undefined
if i <= 0
return r
else if i < 18
k = 1
else if i < 48
k = 3
else if i < 144
k = 4
else if i < 768
k = 5
else
k = 6
if i < 8
z = new Classic(m)
else if m.isEven()
z = new Barrett(m)
else
z = new Montgomery(m)
# precomputation
g = new Array()
n = 3
k1 = k - 1
km = (1 << k) - 1
g[1] = z.convert(this)
if k > 1
g2 = nbi()
z.sqrTo g[1], g2
while n <= km
g[n] = nbi()
z.mulTo g2, g[n - 2], g[n]
n += 2
j = e.t - 1
w = undefined
is1 = true
r2 = nbi()
t = undefined
i = nbits(e_array[j]) - 1
while j >= 0
if i >= k1
w = (e_array[j] >> (i - k1)) & km
else
w = (e_array[j] & ((1 << (i + 1)) - 1)) << (k1 - i)
w |= e_array[j - 1] >> (BI_DB + i - k1) if j > 0
n = k
while (w & 1) is 0
w >>= 1
--n
if (i -= n) < 0
i += BI_DB
--j
if is1 # ret == 1, don't bother squaring or multiplying it
g[w].copyTo r
is1 = false
else
while n > 1
z.sqrTo r, r2
z.sqrTo r2, r
n -= 2
if n > 0
z.sqrTo r, r2
else
t = r
r = r2
r2 = t
z.mulTo r2, g[w], r
while j >= 0 and (e_array[j] & (1 << i)) is 0
z.sqrTo r, r2
t = r
r = r2
r2 = t
if --i < 0
i = BI_DB - 1
--j
z.revert r
# (public) gcd(this,a) (HAC 14.54)
bnGCD = (a) ->
x = (if (@s < 0) then @negate() else @clone())
y = (if (a.s < 0) then a.negate() else a.clone())
if x.compareTo(y) < 0
t = x
x = y
y = t
i = x.getLowestSetBit()
g = y.getLowestSetBit()
return x if g < 0
g = i if i < g
if g > 0
x.rShiftTo g, x
y.rShiftTo g, y
while x.signum() > 0
x.rShiftTo i, x if (i = x.getLowestSetBit()) > 0
y.rShiftTo i, y if (i = y.getLowestSetBit()) > 0
if x.compareTo(y) >= 0
x.subTo y, x
x.rShiftTo 1, x
else
y.subTo x, y
y.rShiftTo 1, y
y.lShiftTo g, y if g > 0
y
# (protected) this % n, n < 2^26
bnpModInt = (n) ->
this_array = @array
return 0 if n <= 0
d = BI_DV % n
r = (if (@s < 0) then n - 1 else 0)
if @t > 0
if d is 0
r = this_array[0] % n
else
i = @t - 1
while i >= 0
r = (d * r + this_array[i]) % n
--i
r
# (public) 1/this % m (HAC 14.61)
bnModInverse = (m) ->
ac = m.isEven()
return BigInteger.ZERO if (@isEven() and ac) or m.signum() is 0
u = m.clone()
v = @clone()
a = nbv(1)
b = nbv(0)
c = nbv(0)
d = nbv(1)
until u.signum() is 0
while u.isEven()
u.rShiftTo 1, u
if ac
if not a.isEven() or not b.isEven()
a.addTo this, a
b.subTo m, b
a.rShiftTo 1, a
else b.subTo m, b unless b.isEven()
b.rShiftTo 1, b
while v.isEven()
v.rShiftTo 1, v
if ac
if not c.isEven() or not d.isEven()
c.addTo this, c
d.subTo m, d
c.rShiftTo 1, c
else d.subTo m, d unless d.isEven()
d.rShiftTo 1, d
if u.compareTo(v) >= 0
u.subTo v, u
a.subTo c, a if ac
b.subTo d, b
else
v.subTo u, v
c.subTo a, c if ac
d.subTo b, d
return BigInteger.ZERO unless v.compareTo(BigInteger.ONE) is 0
return d.subtract(m) if d.compareTo(m) >= 0
if d.signum() < 0
d.addTo m, d
else
return d
if d.signum() < 0
d.add m
else
d
# (public) test primality with certainty >= 1-.5^t
bnIsProbablePrime = (t) ->
i = undefined
x = @abs()
x_array = x.array
if x.t is 1 and x_array[0] <= lowprimes[lowprimes.length - 1]
i = 0
while i < lowprimes.length
return true if x_array[0] is lowprimes[i]
++i
return false
return false if x.isEven()
i = 1
while i < lowprimes.length
m = lowprimes[i]
j = i + 1
m *= lowprimes[j++] while j < lowprimes.length and m < lplim
m = x.modInt(m)
return false if m % lowprimes[i++] is 0 while i < j
x.millerRabin t
# (protected) true if probably prime (HAC 4.24, <NAME>iller-Rabin)
bnpMillerRabin = (t) ->
n1 = @subtract(BigInteger.ONE)
k = n1.getLowestSetBit()
return false if k <= 0
r = n1.shiftRight(k)
t = (t + 1) >> 1
t = lowprimes.length if t > lowprimes.length
a = nbi()
i = 0
while i < t
a.fromInt lowprimes[i]
y = a.modPow(r, this)
if y.compareTo(BigInteger.ONE) isnt 0 and y.compareTo(n1) isnt 0
j = 1
while j++ < k and y.compareTo(n1) isnt 0
y = y.modPowInt(2, this)
return false if y.compareTo(BigInteger.ONE) is 0
return false unless y.compareTo(n1) is 0
++i
true
# protected
# public
# BigInteger interfaces not implemented in jsbn:
# BigInteger(int signum, byte[] magnitude)
# double doubleValue()
# float floatValue()
# int hashCode()
# long longValue()
# static BigInteger valueOf(long val)
# prng4.js - uses Arcfour as a PRNG
Arcfour = ->
@i = 0
@j = 0
@S = new Array()
return
# Initialize arcfour context from key, an array of ints, each from [0..255]
ARC4init = (key) ->
i = undefined
j = undefined
t = undefined
i = 0
while i < 256
@S[i] = i
++i
j = 0
i = 0
while i < 256
j = (j + @S[i] + key[i % key.length]) & 255
t = @S[i]
@S[i] = @S[j]
@S[j] = t
++i
@i = 0
@j = 0
return
ARC4next = ->
t = undefined
@i = (@i + 1) & 255
@j = (@j + @S[@i]) & 255
t = @S[@i]
@S[@i] = @S[@j]
@S[@j] = t
@S[(t + @S[@i]) & 255]
# Plug in your RNG constructor here
prng_newstate = ->
new Arcfour()
# Pool size must be a multiple of 4 and greater than 32.
# An array of bytes the size of the pool will be passed to init()
# Random number generator - requires a PRNG backend, e.g. prng4.js
# For best results, put code like
# <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
# in your main HTML document.
# Mix in a 32-bit integer into the pool
rng_seed_int = (x) ->
rng_pool[rng_pptr++] ^= x & 255
rng_pool[rng_pptr++] ^= (x >> 8) & 255
rng_pool[rng_pptr++] ^= (x >> 16) & 255
rng_pool[rng_pptr++] ^= (x >> 24) & 255
rng_pptr -= rng_psize if rng_pptr >= rng_psize
return
# Mix in the current time (w/milliseconds) into the pool
rng_seed_time = ->
# Use pre-computed date to avoid making the benchmark
# results dependent on the current date.
rng_seed_int 1122926989487
return
# Initialize the pool with junk if needed.
# extract some randomness from Math.random()
#rng_seed_int(window.screenX);
#rng_seed_int(window.screenY);
rng_get_byte = ->
unless rng_state?
rng_seed_time()
rng_state = prng_newstate()
rng_state.init rng_pool
rng_pptr = 0
while rng_pptr < rng_pool.length
rng_pool[rng_pptr] = 0
++rng_pptr
rng_pptr = 0
#rng_pool = null;
# TODO: allow reseeding after first request
rng_state.next()
rng_get_bytes = (ba) ->
i = undefined
i = 0
while i < ba.length
ba[i] = rng_get_byte()
++i
return
SecureRandom = ->
# Depends on jsbn.js and rng.js
# convert a (hex) string to a bignum object
parseBigInt = (str, r) ->
new BigInteger(str, r)
linebrk = (s, n) ->
ret = ""
i = 0
while i + n < s.length
ret += s.substring(i, i + n) + "\n"
i += n
ret + s.substring(i, s.length)
byte2Hex = (b) ->
if b < 0x10
"0" + b.toString(16)
else
b.toString 16
# PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
pkcs1pad2 = (s, n) ->
if n < s.length + 11
alert "Message too long for RSA"
return null
ba = new Array()
i = s.length - 1
ba[--n] = s.charCodeAt(i--) while i >= 0 and n > 0
ba[--n] = 0
rng = new SecureRandom()
x = new Array()
while n > 2 # random non-zero pad
x[0] = 0
rng.nextBytes x while x[0] is 0
ba[--n] = x[0]
ba[--n] = 2
ba[--n] = 0
new BigInteger(ba)
# "empty" RSA key constructor
RSAKey = ->
@n = null
@e = 0
@d = null
@p = null
@q = null
@dmp1 = null
@dmq1 = null
@coeff = null
return
# Set the public key fields N and e from hex strings
RSASetPublic = (N, E) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
else
alert "Invalid RSA public key"
return
# Perform raw public operation on "x": return x^e (mod n)
RSADoPublic = (x) ->
x.modPowInt @e, @n
# Return the PKCS#1 RSA encryption of "text" as an even-length hex string
RSAEncrypt = (text) ->
m = pkcs1pad2(text, (@n.bitLength() + 7) >> 3)
return null unless m?
c = @doPublic(m)
return null unless c?
h = c.toString(16)
if (h.length & 1) is 0
h
else
"0" + h
# Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
#function RSAEncryptB64(text) {
# var h = this.encrypt(text);
# if(h) return hex2b64(h); else return null;
#}
# protected
# public
#RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
# Depends on rsa.js and jsbn2.js
# Undo PKCS#1 (type 2, random) padding and, if valid, return the plaintext
pkcs1unpad2 = (d, n) ->
b = d.toByteArray()
i = 0
++i while i < b.length and b[i] is 0
return null if b.length - i isnt n - 1 or b[i] isnt 2
++i
return null if ++i >= b.length until b[i] is 0
ret = ""
ret += String.fromCharCode(b[i]) while ++i < b.length
ret
# Set the private key fields N, e, and d from hex strings
RSASetPrivate = (N, E, D) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
else
alert "Invalid RSA private key"
return
# Set the private key fields N, e, d and CRT params from hex strings
RSASetPrivateEx = (N, E, D, P, Q, DP, DQ, C) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
@p = parseBigInt(P, 16)
@q = parseBigInt(Q, 16)
@dmp1 = parseBigInt(DP, 16)
@dmq1 = parseBigInt(DQ, 16)
@coeff = parseBigInt(C, 16)
else
alert "Invalid RSA private key"
return
# Generate a new random private key B bits long, using public expt E
RSAGenerate = (B, E) ->
rng = new SecureRandom()
qs = B >> 1
@e = parseInt(E, 16)
ee = new BigInteger(E, 16)
loop
loop
@p = new BigInteger(B - qs, 1, rng)
break if @p.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @p.isProbablePrime(10)
loop
@q = new BigInteger(qs, 1, rng)
break if @q.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @q.isProbablePrime(10)
if @p.compareTo(@q) <= 0
t = @p
@p = @q
@q = t
p1 = @p.subtract(BigInteger.ONE)
q1 = @q.subtract(BigInteger.ONE)
phi = p1.multiply(q1)
if phi.gcd(ee).compareTo(BigInteger.ONE) is 0
@n = @p.multiply(@q)
@d = ee.modInverse(phi)
@dmp1 = @d.mod(p1)
@dmq1 = @d.mod(q1)
@coeff = @q.modInverse(@p)
break
return
# Perform raw private operation on "x": return x^d (mod n)
RSADoPrivate = (x) ->
return x.modPow(@d, @n) if not @p? or not @q?
# TODO: re-calculate any missing CRT params
xp = x.mod(@p).modPow(@dmp1, @p)
xq = x.mod(@q).modPow(@dmq1, @q)
xp = xp.add(@p) while xp.compareTo(xq) < 0
xp.subtract(xq).multiply(@coeff).mod(@p).multiply(@q).add xq
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is an even-length hex string and the output is a plain string.
RSADecrypt = (ctext) ->
c = parseBigInt(ctext, 16)
m = @doPrivate(c)
return null unless m?
pkcs1unpad2 m, (@n.bitLength() + 7) >> 3
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is a Base64-encoded string and the output is a plain string.
#function RSAB64Decrypt(ctext) {
# var h = b64tohex(ctext);
# if(h) return this.decrypt(h); else return null;
#}
# protected
# public
#RSAKey.prototype.b64_decrypt = RSAB64Decrypt;
encrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
encrypted = RSA.encrypt(TEXT)
return
decrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
decrypted = RSA.decrypt(encrypted)
throw new Error("Crypto operation failed") unless decrypted is TEXT
return
Crypto = new BenchmarkSuite("Crypto", 266181, [
new Benchmark("Encrypt", encrypt)
new Benchmark("Decrypt", decrypt)
])
dbits = undefined
BI_DB = undefined
BI_DM = undefined
BI_DV = undefined
BI_FP = undefined
BI_FV = undefined
BI_F1 = undefined
BI_F2 = undefined
canary = 0xdeadbeefcafe
j_lm = ((canary & 0xffffff) is 0xefcafe)
setupEngine = (fn, bits) ->
BigInteger::am = fn
dbits = bits
BI_DB = dbits
BI_DM = ((1 << dbits) - 1)
BI_DV = (1 << dbits)
BI_FP = 52
BI_FV = Math.pow(2, BI_FP)
BI_F1 = BI_FP - dbits
BI_F2 = 2 * dbits - BI_FP
return
BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
BI_RC = new Array()
rr = undefined
vv = undefined
rr = "0".charCodeAt(0)
vv = 0
while vv <= 9
BI_RC[rr++] = vv
++vv
rr = "a".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
rr = "A".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
Classic::convert = cConvert
Classic::revert = cRevert
Classic::reduce = cReduce
Classic::mulTo = cMulTo
Classic::sqrTo = cSqrTo
Montgomery::convert = montConvert
Montgomery::revert = montRevert
Montgomery::reduce = montReduce
Montgomery::mulTo = montMulTo
Montgomery::sqrTo = montSqrTo
BigInteger::copyTo = bnpCopyTo
BigInteger::fromInt = bnpFromInt
BigInteger::fromString = bnpFromString
BigInteger::clamp = bnpClamp
BigInteger::dlShiftTo = bnpDLShiftTo
BigInteger::drShiftTo = bnpDRShiftTo
BigInteger::lShiftTo = bnpLShiftTo
BigInteger::rShiftTo = bnpRShiftTo
BigInteger::subTo = bnpSubTo
BigInteger::multiplyTo = bnpMultiplyTo
BigInteger::squareTo = bnpSquareTo
BigInteger::divRemTo = bnpDivRemTo
BigInteger::invDigit = bnpInvDigit
BigInteger::isEven = bnpIsEven
BigInteger::exp = bnpExp
BigInteger::toString = bnToString
BigInteger::negate = bnNegate
BigInteger::abs = bnAbs
BigInteger::compareTo = bnCompareTo
BigInteger::bitLength = bnBitLength
BigInteger::mod = bnMod
BigInteger::modPowInt = bnModPowInt
BigInteger.ZERO = nbv(0)
BigInteger.ONE = nbv(1)
NullExp::convert = nNop
NullExp::revert = nNop
NullExp::mulTo = nMulTo
NullExp::sqrTo = nSqrTo
Barrett::convert = barrettConvert
Barrett::revert = barrettRevert
Barrett::reduce = barrettReduce
Barrett::mulTo = barrettMulTo
Barrett::sqrTo = barrettSqrTo
lowprimes = [
2
3
5
7
11
13
17
19
23
29
31
37
41
43
47
53
59
61
67
71
73
79
83
89
97
101
103
107
109
113
127
131
137
139
149
151
157
163
167
173
179
181
191
193
197
199
211
223
227
229
233
239
241
251
257
263
269
271
277
281
283
293
307
311
313
317
331
337
347
349
353
359
367
373
379
383
389
397
401
409
419
421
431
433
439
443
449
457
461
463
467
479
487
491
499
503
509
]
lplim = (1 << 26) / lowprimes[lowprimes.length - 1]
BigInteger::chunkSize = bnpChunkSize
BigInteger::toRadix = bnpToRadix
BigInteger::fromRadix = bnpFromRadix
BigInteger::fromNumber = bnpFromNumber
BigInteger::bitwiseTo = bnpBitwiseTo
BigInteger::changeBit = bnpChangeBit
BigInteger::addTo = bnpAddTo
BigInteger::dMultiply = bnpDMultiply
BigInteger::dAddOffset = bnpDAddOffset
BigInteger::multiplyLowerTo = bnpMultiplyLowerTo
BigInteger::multiplyUpperTo = bnpMultiplyUpperTo
BigInteger::modInt = bnpModInt
BigInteger::millerRabin = bnpMillerRabin
BigInteger::clone = bnClone
BigInteger::intValue = bnIntValue
BigInteger::byteValue = bnByteValue
BigInteger::shortValue = bnShortValue
BigInteger::signum = bnSigNum
BigInteger::toByteArray = bnToByteArray
BigInteger::equals = bnEquals
BigInteger::min = bnMin
BigInteger::max = bnMax
BigInteger::and = bnAnd
BigInteger::or = bnOr
BigInteger::xor = bnXor
BigInteger::andNot = bnAndNot
BigInteger::not = bnNot
BigInteger::shiftLeft = bnShiftLeft
BigInteger::shiftRight = bnShiftRight
BigInteger::getLowestSetBit = bnGetLowestSetBit
BigInteger::bitCount = bnBitCount
BigInteger::testBit = bnTestBit
BigInteger::setBit = bnSetBit
BigInteger::clearBit = bnClearBit
BigInteger::flipBit = bnFlipBit
BigInteger::add = bnAdd
BigInteger::subtract = bnSubtract
BigInteger::multiply = bnMultiply
BigInteger::divide = bnDivide
BigInteger::remainder = bnRemainder
BigInteger::divideAndRemainder = bnDivideAndRemainder
BigInteger::modPow = bnModPow
BigInteger::modInverse = bnModInverse
BigInteger::pow = bnPow
BigInteger::gcd = bnGCD
BigInteger::isProbablePrime = bnIsProbablePrime
Arcfour::init = ARC4init
Arcfour::next = ARC4next
rng_psize = 256
rng_state = undefined
rng_pool = undefined
rng_pptr = undefined
unless rng_pool?
rng_pool = new Array()
rng_pptr = 0
t = undefined
while rng_pptr < rng_psize
t = Math.floor(65536 * Math.random())
rng_pool[rng_pptr++] = t >>> 8
rng_pool[rng_pptr++] = t & 255
rng_pptr = 0
rng_seed_time()
SecureRandom::nextBytes = rng_get_bytes
RSAKey::doPublic = RSADoPublic
RSAKey::setPublic = RSASetPublic
RSAKey::encrypt = RSAEncrypt
RSAKey::doPrivate = RSADoPrivate
RSAKey::setPrivate = RSASetPrivate
RSAKey::setPrivateEx = RSASetPrivateEx
RSAKey::generate = RSAGenerate
RSAKey::decrypt = RSADecrypt
nValue = "<KEY>"
eValue = "10001"
dValue = "<KEY>6e2df7c1a559e6d5ac1cd5c94703a22891464fba23d0d965086277a161"
pValue = "d090ce58a92c75233a6486cb0a9209bf3583b64f540c76f5294bb97d285eed33aec220bde14b2417951178ac152ceab6da7090905b478195498b352048f15e7d"
qValue = "cab575dc652bb66df15a0359609d<KEY>1d1db184<KEY>0c00c66<KEY>b<KEY>0ef<KEY>c<KEY>0<KEY>edbf<KEY>aec<KEY>1<KEY>0cc<KEY>f6<KEY>22b<KEY>e<KEY>f"
dmp1Value = "1a24bca8e273df2f0e47c199bbf678604e7df7215480c77c8db39f49b000ce2cf7500038acfff5433b7d582a01f1826e6f4d42e1c57f5e1fef7b12aabc59fd25"
dmq1Value = "3d06982efbbe47339e1f6d36b1216b8a741d410b0c662f54f7118b27b9a4ec9d914337eb39841d8666f3034408cf94f5b<KEY>"
coeffValue = "<KEY>"
setupEngine am3, 28
TEXT = "The quick brown fox jumped over the extremely lazy frog! " + "Now is the time for all good men to come to the party."
encrypted = undefined
| true | #
# * Copyright (c) 2003-2005 PI:NAME:<NAME>END_PI
# * All Rights Reserved.
# *
# * Permission is hereby granted, free of charge, to any person obtaining
# * a copy of this software and associated documentation files (the
# * "Software"), to deal in the Software without restriction, including
# * without limitation the rights to use, copy, modify, merge, publish,
# * distribute, sublicense, and/or sell copies of the Software, and to
# * permit persons to whom the Software is furnished to do so, subject to
# * the following conditions:
# *
# * The above copyright notice and this permission notice shall be
# * included in all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
# * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
# * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
# *
# * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
# * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
# * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
# * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
# * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# *
# * In addition, the following condition applies:
# *
# * All redistributions must retain an intact copy of this copyright notice
# * and disclaimer.
#
# The code has been adapted for use as a benchmark by Google.
# Basic JavaScript BN library - subset useful for RSA encryption.
# Bits per digit
# JavaScript engine analysis
# (public) Constructor
BigInteger = (a, b, c) ->
@array = new Array()
if a?
if "number" is typeof a
@fromNumber a, b, c
else if not b? and "string" isnt typeof a
@fromString a, 256
else
@fromString a, b
return
# return new, unset BigInteger
nbi = ->
new BigInteger(null)
# am: Compute w_j += (x*this_i), propagate carries,
# c is initial carry, returns final carry.
# c < 3*dvalue, x < 2*dvalue, this_i < dvalue
# We need to select the fastest one that works in this environment.
# am1: use a single mult and divide to get the high bits,
# max digit bits should be 26 because
# max internal value = 2*dvalue^2-2*dvalue (< 2^53)
am1 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
while --n >= 0
v = x * this_array[i++] + w_array[j] + c
c = Math.floor(v / 0x4000000)
w_array[j++] = v & 0x3ffffff
c
# am2 avoids a big mult-and-extract completely.
# Max digit bits should be <= 30 because we do bitwise ops
# on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
am2 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x7fff
xh = x >> 15
while --n >= 0
l = this_array[i] & 0x7fff
h = this_array[i++] >> 15
m = xh * l + h * xl
l = xl * l + ((m & 0x7fff) << 15) + w_array[j] + (c & 0x3fffffff)
c = (l >>> 30) + (m >>> 15) + xh * h + (c >>> 30)
w_array[j++] = l & 0x3fffffff
c
# Alternately, set max digit bits to 28 since some
# browsers slow down when dealing with 32-bit numbers.
am3 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x3fff
xh = x >> 14
while --n >= 0
l = this_array[i] & 0x3fff
h = this_array[i++] >> 14
m = xh * l + h * xl
l = xl * l + ((m & 0x3fff) << 14) + w_array[j] + c
c = (l >> 28) + (m >> 14) + xh * h
w_array[j++] = l & 0xfffffff
c
# This is tailored to VMs with 2-bit tagging. It makes sure
# that all the computations stay within the 29 bits available.
am4 = (i, x, w, j, c, n) ->
this_array = @array
w_array = w.array
xl = x & 0x1fff
xh = x >> 13
while --n >= 0
l = this_array[i] & 0x1fff
h = this_array[i++] >> 13
m = xh * l + h * xl
l = xl * l + ((m & 0x1fff) << 13) + w_array[j] + c
c = (l >> 26) + (m >> 13) + xh * h
w_array[j++] = l & 0x3ffffff
c
# am3/28 is best for SM, Rhino, but am4/26 is best for v8.
# Kestrel (Opera 9.5) gets its best result with am4/26.
# IE7 does 9% better with am3/28 than with am4/26.
# Firefox (SM) gets 10% faster with am3/28 than with am4/26.
# Digit conversions
int2char = (n) ->
BI_RM.charAt n
intAt = (s, i) ->
c = BI_RC[s.charCodeAt(i)]
(if (not (c?)) then -1 else c)
# (protected) copy this to r
bnpCopyTo = (r) ->
this_array = @array
r_array = r.array
i = @t - 1
while i >= 0
r_array[i] = this_array[i]
--i
r.t = @t
r.s = @s
return
# (protected) set from integer value x, -DV <= x < DV
bnpFromInt = (x) ->
this_array = @array
@t = 1
@s = (if (x < 0) then -1 else 0)
if x > 0
this_array[0] = x
else if x < -1
this_array[0] = x + DV
else
@t = 0
return
# return bigint initialized to value
nbv = (i) ->
r = nbi()
r.fromInt i
r
# (protected) set from string and radix
bnpFromString = (s, b) ->
this_array = @array
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 256 # byte array
k = 8
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
@fromRadix s, b
return
@t = 0
@s = 0
i = s.length
mi = false
sh = 0
while --i >= 0
x = (if (k is 8) then s[i] & 0xff else intAt(s, i))
if x < 0
mi = true if s.charAt(i) is "-"
continue
mi = false
if sh is 0
this_array[@t++] = x
else if sh + k > BI_DB
this_array[@t - 1] |= (x & ((1 << (BI_DB - sh)) - 1)) << sh
this_array[@t++] = (x >> (BI_DB - sh))
else
this_array[@t - 1] |= x << sh
sh += k
sh -= BI_DB if sh >= BI_DB
if k is 8 and (s[0] & 0x80) isnt 0
@s = -1
this_array[@t - 1] |= ((1 << (BI_DB - sh)) - 1) << sh if sh > 0
@clamp()
BigInteger.ZERO.subTo this, this if mi
return
# (protected) clamp off excess high words
bnpClamp = ->
this_array = @array
c = @s & BI_DM
--@t while @t > 0 and this_array[@t - 1] is c
return
# (public) return string representation in given radix
bnToString = (b) ->
this_array = @array
return "-" + @negate().toString(b) if @s < 0
k = undefined
if b is 16
k = 4
else if b is 8
k = 3
else if b is 2
k = 1
else if b is 32
k = 5
else if b is 4
k = 2
else
return @toRadix(b)
km = (1 << k) - 1
d = undefined
m = false
r = ""
i = @t
p = BI_DB - (i * BI_DB) % k
if i-- > 0
if p < BI_DB and (d = this_array[i] >> p) > 0
m = true
r = int2char(d)
while i >= 0
if p < k
d = (this_array[i] & ((1 << p) - 1)) << (k - p)
d |= this_array[--i] >> (p += BI_DB - k)
else
d = (this_array[i] >> (p -= k)) & km
if p <= 0
p += BI_DB
--i
m = true if d > 0
r += int2char(d) if m
(if m then r else "0")
# (public) -this
bnNegate = ->
r = nbi()
BigInteger.ZERO.subTo this, r
r
# (public) |this|
bnAbs = ->
(if (@s < 0) then @negate() else this)
# (public) return + if this > a, - if this < a, 0 if equal
bnCompareTo = (a) ->
this_array = @array
a_array = a.array
r = @s - a.s
return r unless r is 0
i = @t
r = i - a.t
return r unless r is 0
return r unless (r = this_array[i] - a_array[i]) is 0 while --i >= 0
0
# returns bit length of the integer x
nbits = (x) ->
r = 1
t = undefined
unless (t = x >>> 16) is 0
x = t
r += 16
unless (t = x >> 8) is 0
x = t
r += 8
unless (t = x >> 4) is 0
x = t
r += 4
unless (t = x >> 2) is 0
x = t
r += 2
unless (t = x >> 1) is 0
x = t
r += 1
r
# (public) return the number of bits in "this"
bnBitLength = ->
this_array = @array
return 0 if @t <= 0
BI_DB * (@t - 1) + nbits(this_array[@t - 1] ^ (@s & BI_DM))
# (protected) r = this << n*DB
bnpDLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = undefined
i = @t - 1
while i >= 0
r_array[i + n] = this_array[i]
--i
i = n - 1
while i >= 0
r_array[i] = 0
--i
r.t = @t + n
r.s = @s
return
# (protected) r = this >> n*DB
bnpDRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
i = n
while i < @t
r_array[i - n] = this_array[i]
++i
r.t = Math.max(@t - n, 0)
r.s = @s
return
# (protected) r = this << n
bnpLShiftTo = (n, r) ->
this_array = @array
r_array = r.array
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << cbs) - 1
ds = Math.floor(n / BI_DB)
c = (@s << bs) & BI_DM
i = undefined
i = @t - 1
while i >= 0
r_array[i + ds + 1] = (this_array[i] >> cbs) | c
c = (this_array[i] & bm) << bs
--i
i = ds - 1
while i >= 0
r_array[i] = 0
--i
r_array[ds] = c
r.t = @t + ds + 1
r.s = @s
r.clamp()
return
# (protected) r = this >> n
bnpRShiftTo = (n, r) ->
this_array = @array
r_array = r.array
r.s = @s
ds = Math.floor(n / BI_DB)
if ds >= @t
r.t = 0
return
bs = n % BI_DB
cbs = BI_DB - bs
bm = (1 << bs) - 1
r_array[0] = this_array[ds] >> bs
i = ds + 1
while i < @t
r_array[i - ds - 1] |= (this_array[i] & bm) << cbs
r_array[i - ds] = this_array[i] >> bs
++i
r_array[@t - ds - 1] |= (@s & bm) << cbs if bs > 0
r.t = @t - ds
r.clamp()
return
# (protected) r = this - a
bnpSubTo = (a, r) ->
this_array = @array
r_array = r.array
a_array = a.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] - a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c -= a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c -= a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c -= a.s
r.s = (if (c < 0) then -1 else 0)
if c < -1
r_array[i++] = BI_DV + c
else r_array[i++] = c if c > 0
r.t = i
r.clamp()
return
# (protected) r = this * a, r != this,a (HAC 14.12)
# "this" should be the larger one if appropriate.
bnpMultiplyTo = (a, r) ->
this_array = @array
r_array = r.array
x = @abs()
y = a.abs()
y_array = y.array
i = x.t
r.t = i + y.t
r_array[i] = 0 while --i >= 0
i = 0
while i < y.t
r_array[i + x.t] = x.am(0, y_array[i], r, i, 0, x.t)
++i
r.s = 0
r.clamp()
BigInteger.ZERO.subTo r, r unless @s is a.s
return
# (protected) r = this^2, r != this (HAC 14.16)
bnpSquareTo = (r) ->
x = @abs()
x_array = x.array
r_array = r.array
i = r.t = 2 * x.t
r_array[i] = 0 while --i >= 0
i = 0
while i < x.t - 1
c = x.am(i, x_array[i], r, 2 * i, 0, 1)
if (r_array[i + x.t] += x.am(i + 1, 2 * x_array[i], r, 2 * i + 1, c, x.t - i - 1)) >= BI_DV
r_array[i + x.t] -= BI_DV
r_array[i + x.t + 1] = 1
++i
r_array[r.t - 1] += x.am(i, x_array[i], r, 2 * i, 0, 1) if r.t > 0
r.s = 0
r.clamp()
return
# (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
# r != q, this != m. q or r may be null.
bnpDivRemTo = (m, q, r) ->
pm = m.abs()
return if pm.t <= 0
pt = @abs()
if pt.t < pm.t
q.fromInt 0 if q?
@copyTo r if r?
return
r = nbi() unless r?
y = nbi()
ts = @s
ms = m.s
pm_array = pm.array
nsh = BI_DB - nbits(pm_array[pm.t - 1]) # normalize modulus
if nsh > 0
pm.lShiftTo nsh, y
pt.lShiftTo nsh, r
else
pm.copyTo y
pt.copyTo r
ys = y.t
y_array = y.array
y0 = y_array[ys - 1]
return if y0 is 0
yt = y0 * (1 << BI_F1) + ((if (ys > 1) then y_array[ys - 2] >> BI_F2 else 0))
d1 = BI_FV / yt
d2 = (1 << BI_F1) / yt
e = 1 << BI_F2
i = r.t
j = i - ys
t = (if (not (q?)) then nbi() else q)
y.dlShiftTo j, t
r_array = r.array
if r.compareTo(t) >= 0
r_array[r.t++] = 1
r.subTo t, r
BigInteger.ONE.dlShiftTo ys, t
t.subTo y, y # "negative" y so we can replace sub with am later
y_array[y.t++] = 0 while y.t < ys
while --j >= 0
# Estimate quotient digit
qd = (if (r_array[--i] is y0) then BI_DM else Math.floor(r_array[i] * d1 + (r_array[i - 1] + e) * d2))
if (r_array[i] += y.am(0, qd, r, j, 0, ys)) < qd # Try it out
y.dlShiftTo j, t
r.subTo t, r
r.subTo t, r while r_array[i] < --qd
if q?
r.drShiftTo ys, q
BigInteger.ZERO.subTo q, q unless ts is ms
r.t = ys
r.clamp()
r.rShiftTo nsh, r if nsh > 0 # Denormalize remainder
BigInteger.ZERO.subTo r, r if ts < 0
return
# (public) this mod a
bnMod = (a) ->
r = nbi()
@abs().divRemTo a, null, r
a.subTo r, r if @s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# Modular reduction using "classic" algorithm
Classic = (m) ->
@m = m
return
cConvert = (x) ->
if x.s < 0 or x.compareTo(@m) >= 0
x.mod @m
else
x
cRevert = (x) ->
x
cReduce = (x) ->
x.divRemTo @m, null, x
return
cMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
cSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# (protected) return "-1/this % 2^DB"; useful for Mont. reduction
# justification:
# xy == 1 (mod m)
# xy = 1+km
# xy(2-xy) = (1+km)(1-km)
# x[y(2-xy)] = 1-k^2m^2
# x[y(2-xy)] == 1 (mod m^2)
# if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
# should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
# JS multiply "overflows" differently from C/C++, so care is needed here.
bnpInvDigit = ->
this_array = @array
return 0 if @t < 1
x = this_array[0]
return 0 if (x & 1) is 0
y = x & 3 # y == 1/x mod 2^2
y = (y * (2 - (x & 0xf) * y)) & 0xf # y == 1/x mod 2^4
y = (y * (2 - (x & 0xff) * y)) & 0xff # y == 1/x mod 2^8
y = (y * (2 - (((x & 0xffff) * y) & 0xffff))) & 0xffff # y == 1/x mod 2^16
# last step - calculate inverse mod DV directly;
# assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
y = (y * (2 - x * y % BI_DV)) % BI_DV # y == 1/x mod 2^dbits
# we really want the negative inverse, and -DV < y < DV
(if (y > 0) then BI_DV - y else -y)
# Montgomery reduction
Montgomery = (m) ->
@m = m
@mp = m.invDigit()
@mpl = @mp & 0x7fff
@mph = @mp >> 15
@um = (1 << (BI_DB - 15)) - 1
@mt2 = 2 * m.t
return
# xR mod m
montConvert = (x) ->
r = nbi()
x.abs().dlShiftTo @m.t, r
r.divRemTo @m, null, r
@m.subTo r, r if x.s < 0 and r.compareTo(BigInteger.ZERO) > 0
r
# x/R mod m
montRevert = (x) ->
r = nbi()
x.copyTo r
@reduce r
r
# x = x/R mod m (HAC 14.32)
montReduce = (x) ->
x_array = x.array
# pad x so am has enough room later
x_array[x.t++] = 0 while x.t <= @mt2
i = 0
while i < @m.t
# faster way of calculating u0 = x[i]*mp mod DV
j = x_array[i] & 0x7fff
u0 = (j * @mpl + (((j * @mph + (x_array[i] >> 15) * @mpl) & @um) << 15)) & BI_DM
# use am to combine the multiply-shift-add into one call
j = i + @m.t
x_array[j] += @m.am(0, u0, x, i, 0, @m.t)
# propagate carry
while x_array[j] >= BI_DV
x_array[j] -= BI_DV
x_array[++j]++
++i
x.clamp()
x.drShiftTo @m.t, x
x.subTo @m, x if x.compareTo(@m) >= 0
return
# r = "x^2/R mod m"; x != r
montSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = "xy/R mod m"; x,y != r
montMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (protected) true iff this is even
bnpIsEven = ->
this_array = @array
((if (@t > 0) then (this_array[0] & 1) else @s)) is 0
# (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
bnpExp = (e, z) ->
return BigInteger.ONE if e > 0xffffffff or e < 1
r = nbi()
r2 = nbi()
g = z.convert(this)
i = nbits(e) - 1
g.copyTo r
while --i >= 0
z.sqrTo r, r2
if (e & (1 << i)) > 0
z.mulTo r2, g, r
else
t = r
r = r2
r2 = t
z.revert r
# (public) this^e % m, 0 <= e < 2^32
bnModPowInt = (e, m) ->
z = undefined
if e < 256 or m.isEven()
z = new Classic(m)
else
z = new Montgomery(m)
@exp e, z
# protected
# public
# "constants"
# Copyright (c) 2005 PI:NAME:<NAME>END_PI
# All Rights Reserved.
# See "LICENSE" for details.
# Extended JavaScript BN functions, required for RSA private ops.
# (public)
bnClone = ->
r = nbi()
@copyTo r
r
# (public) return value as integer
bnIntValue = ->
this_array = @array
if @s < 0
if @t is 1
return this_array[0] - BI_DV
else return -1 if @t is 0
else if @t is 1
return this_array[0]
else return 0 if @t is 0
# assumes 16 < DB < 32
((this_array[1] & ((1 << (32 - BI_DB)) - 1)) << BI_DB) | this_array[0]
# (public) return value as byte
bnByteValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 24) >> 24)
# (public) return value as short (assumes DB>=16)
bnShortValue = ->
this_array = @array
(if (@t is 0) then @s else (this_array[0] << 16) >> 16)
# (protected) return x s.t. r^x < DV
bnpChunkSize = (r) ->
Math.floor Math.LN2 * BI_DB / Math.log(r)
# (public) 0 if this == 0, 1 if this > 0
bnSigNum = ->
this_array = @array
if @s < 0
-1
else if @t <= 0 or (@t is 1 and this_array[0] <= 0)
0
else
1
# (protected) convert to radix string
bnpToRadix = (b) ->
b = 10 unless b?
return "0" if @signum() is 0 or b < 2 or b > 36
cs = @chunkSize(b)
a = Math.pow(b, cs)
d = nbv(a)
y = nbi()
z = nbi()
r = ""
@divRemTo d, y, z
while y.signum() > 0
r = (a + z.intValue()).toString(b).substr(1) + r
y.divRemTo d, y, z
z.intValue().toString(b) + r
# (protected) convert from radix string
bnpFromRadix = (s, b) ->
@fromInt 0
b = 10 unless b?
cs = @chunkSize(b)
d = Math.pow(b, cs)
mi = false
j = 0
w = 0
i = 0
while i < s.length
x = intAt(s, i)
if x < 0
mi = true if s.charAt(i) is "-" and @signum() is 0
continue
w = b * w + x
if ++j >= cs
@dMultiply d
@dAddOffset w, 0
j = 0
w = 0
++i
if j > 0
@dMultiply Math.pow(b, j)
@dAddOffset w, 0
BigInteger.ZERO.subTo this, this if mi
return
# (protected) alternate constructor
bnpFromNumber = (a, b, c) ->
if "number" is typeof b
# new BigInteger(int,int,RNG)
if a < 2
@fromInt 1
else
@fromNumber a, c
# force MSB set
@bitwiseTo BigInteger.ONE.shiftLeft(a - 1), op_or, this unless @testBit(a - 1)
@dAddOffset 1, 0 if @isEven() # force odd
until @isProbablePrime(b)
@dAddOffset 2, 0
@subTo BigInteger.ONE.shiftLeft(a - 1), this if @bitLength() > a
else
# new BigInteger(int,RNG)
x = new Array()
t = a & 7
x.length = (a >> 3) + 1
b.nextBytes x
if t > 0
x[0] &= ((1 << t) - 1)
else
x[0] = 0
@fromString x, 256
return
# (public) convert to bigendian byte array
bnToByteArray = ->
this_array = @array
i = @t
r = new Array()
r[0] = @s
p = BI_DB - (i * BI_DB) % 8
d = undefined
k = 0
if i-- > 0
r[k++] = d | (@s << (BI_DB - p)) if p < BI_DB and (d = this_array[i] >> p) isnt (@s & BI_DM) >> p
while i >= 0
if p < 8
d = (this_array[i] & ((1 << p) - 1)) << (8 - p)
d |= this_array[--i] >> (p += BI_DB - 8)
else
d = (this_array[i] >> (p -= 8)) & 0xff
if p <= 0
p += BI_DB
--i
d |= -256 unless (d & 0x80) is 0
++k if k is 0 and (@s & 0x80) isnt (d & 0x80)
r[k++] = d if k > 0 or d isnt @s
r
bnEquals = (a) ->
@compareTo(a) is 0
bnMin = (a) ->
(if (@compareTo(a) < 0) then this else a)
bnMax = (a) ->
(if (@compareTo(a) > 0) then this else a)
# (protected) r = this op a (bitwise)
bnpBitwiseTo = (a, op, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = undefined
f = undefined
m = Math.min(a.t, @t)
i = 0
while i < m
r_array[i] = op(this_array[i], a_array[i])
++i
if a.t < @t
f = a.s & BI_DM
i = m
while i < @t
r_array[i] = op(this_array[i], f)
++i
r.t = @t
else
f = @s & BI_DM
i = m
while i < a.t
r_array[i] = op(f, a_array[i])
++i
r.t = a.t
r.s = op(@s, a.s)
r.clamp()
return
# (public) this & a
op_and = (x, y) ->
x & y
bnAnd = (a) ->
r = nbi()
@bitwiseTo a, op_and, r
r
# (public) this | a
op_or = (x, y) ->
x | y
bnOr = (a) ->
r = nbi()
@bitwiseTo a, op_or, r
r
# (public) this ^ a
op_xor = (x, y) ->
x ^ y
bnXor = (a) ->
r = nbi()
@bitwiseTo a, op_xor, r
r
# (public) this & ~a
op_andnot = (x, y) ->
x & ~y
bnAndNot = (a) ->
r = nbi()
@bitwiseTo a, op_andnot, r
r
# (public) ~this
bnNot = ->
this_array = @array
r = nbi()
r_array = r.array
i = 0
while i < @t
r_array[i] = BI_DM & ~this_array[i]
++i
r.t = @t
r.s = ~@s
r
# (public) this << n
bnShiftLeft = (n) ->
r = nbi()
if n < 0
@rShiftTo -n, r
else
@lShiftTo n, r
r
# (public) this >> n
bnShiftRight = (n) ->
r = nbi()
if n < 0
@lShiftTo -n, r
else
@rShiftTo n, r
r
# return index of lowest 1-bit in x, x < 2^31
lbit = (x) ->
return -1 if x is 0
r = 0
if (x & 0xffff) is 0
x >>= 16
r += 16
if (x & 0xff) is 0
x >>= 8
r += 8
if (x & 0xf) is 0
x >>= 4
r += 4
if (x & 3) is 0
x >>= 2
r += 2
++r if (x & 1) is 0
r
# (public) returns index of lowest 1-bit (or -1 if none)
bnGetLowestSetBit = ->
this_array = @array
i = 0
while i < @t
return i * BI_DB + lbit(this_array[i]) unless this_array[i] is 0
++i
return @t * BI_DB if @s < 0
-1
# return number of 1 bits in x
cbit = (x) ->
r = 0
until x is 0
x &= x - 1
++r
r
# (public) return number of set bits
bnBitCount = ->
r = 0
x = @s & BI_DM
i = 0
while i < @t
r += cbit(this_array[i] ^ x)
++i
r
# (public) true iff nth bit is set
bnTestBit = (n) ->
this_array = @array
j = Math.floor(n / BI_DB)
return (@s isnt 0) if j >= @t
(this_array[j] & (1 << (n % BI_DB))) isnt 0
# (protected) this op (1<<n)
bnpChangeBit = (n, op) ->
r = BigInteger.ONE.shiftLeft(n)
@bitwiseTo r, op, r
r
# (public) this | (1<<n)
bnSetBit = (n) ->
@changeBit n, op_or
# (public) this & ~(1<<n)
bnClearBit = (n) ->
@changeBit n, op_andnot
# (public) this ^ (1<<n)
bnFlipBit = (n) ->
@changeBit n, op_xor
# (protected) r = this + a
bnpAddTo = (a, r) ->
this_array = @array
a_array = a.array
r_array = r.array
i = 0
c = 0
m = Math.min(a.t, @t)
while i < m
c += this_array[i] + a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
if a.t < @t
c += a.s
while i < @t
c += this_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += @s
else
c += @s
while i < a.t
c += a_array[i]
r_array[i++] = c & BI_DM
c >>= BI_DB
c += a.s
r.s = (if (c < 0) then -1 else 0)
if c > 0
r_array[i++] = c
else r_array[i++] = BI_DV + c if c < -1
r.t = i
r.clamp()
return
# (public) this + a
bnAdd = (a) ->
r = nbi()
@addTo a, r
r
# (public) this - a
bnSubtract = (a) ->
r = nbi()
@subTo a, r
r
# (public) this * a
bnMultiply = (a) ->
r = nbi()
@multiplyTo a, r
r
# (public) this / a
bnDivide = (a) ->
r = nbi()
@divRemTo a, r, null
r
# (public) this % a
bnRemainder = (a) ->
r = nbi()
@divRemTo a, null, r
r
# (public) [this/a,this%a]
bnDivideAndRemainder = (a) ->
q = nbi()
r = nbi()
@divRemTo a, q, r
new Array(q, r)
# (protected) this *= n, this >= 0, 1 < n < DV
bnpDMultiply = (n) ->
this_array = @array
this_array[@t] = @am(0, n - 1, this, 0, 0, @t)
++@t
@clamp()
return
# (protected) this += n << w words, this >= 0
bnpDAddOffset = (n, w) ->
this_array = @array
this_array[@t++] = 0 while @t <= w
this_array[w] += n
while this_array[w] >= BI_DV
this_array[w] -= BI_DV
this_array[@t++] = 0 if ++w >= @t
++this_array[w]
return
# A "null" reducer
NullExp = ->
nNop = (x) ->
x
nMulTo = (x, y, r) ->
x.multiplyTo y, r
return
nSqrTo = (x, r) ->
x.squareTo r
return
# (public) this^e
bnPow = (e) ->
@exp e, new NullExp()
# (protected) r = lower n words of "this * a", a.t <= n
# "this" should be the larger one if appropriate.
bnpMultiplyLowerTo = (a, n, r) ->
r_array = r.array
a_array = a.array
i = Math.min(@t + a.t, n)
r.s = 0 # assumes a,this >= 0
r.t = i
r_array[--i] = 0 while i > 0
j = undefined
j = r.t - @t
while i < j
r_array[i + @t] = @am(0, a_array[i], r, i, 0, @t)
++i
j = Math.min(a.t, n)
while i < j
@am 0, a_array[i], r, i, 0, n - i
++i
r.clamp()
return
# (protected) r = "this * a" without lower n words, n > 0
# "this" should be the larger one if appropriate.
bnpMultiplyUpperTo = (a, n, r) ->
r_array = r.array
a_array = a.array
--n
i = r.t = @t + a.t - n
r.s = 0 # assumes a,this >= 0
r_array[i] = 0 while --i >= 0
i = Math.max(n - @t, 0)
while i < a.t
r_array[@t + i - n] = @am(n - i, a_array[i], r, 0, 0, @t + i - n)
++i
r.clamp()
r.drShiftTo 1, r
return
# Barrett modular reduction
Barrett = (m) ->
# setup Barrett
@r2 = nbi()
@q3 = nbi()
BigInteger.ONE.dlShiftTo 2 * m.t, @r2
@mu = @r2.divide(m)
@m = m
return
barrettConvert = (x) ->
if x.s < 0 or x.t > 2 * @m.t
x.mod @m
else if x.compareTo(@m) < 0
x
else
r = nbi()
x.copyTo r
@reduce r
r
barrettRevert = (x) ->
x
# x = x mod m (HAC 14.42)
barrettReduce = (x) ->
x.drShiftTo @m.t - 1, @r2
if x.t > @m.t + 1
x.t = @m.t + 1
x.clamp()
@mu.multiplyUpperTo @r2, @m.t + 1, @q3
@m.multiplyLowerTo @q3, @m.t + 1, @r2
x.dAddOffset 1, @m.t + 1 while x.compareTo(@r2) < 0
x.subTo @r2, x
x.subTo @m, x while x.compareTo(@m) >= 0
return
# r = x^2 mod m; x != r
barrettSqrTo = (x, r) ->
x.squareTo r
@reduce r
return
# r = x*y mod m; x,y != r
barrettMulTo = (x, y, r) ->
x.multiplyTo y, r
@reduce r
return
# (public) this^e % m (HAC 14.85)
bnModPow = (e, m) ->
e_array = e.array
i = e.bitLength()
k = undefined
r = nbv(1)
z = undefined
if i <= 0
return r
else if i < 18
k = 1
else if i < 48
k = 3
else if i < 144
k = 4
else if i < 768
k = 5
else
k = 6
if i < 8
z = new Classic(m)
else if m.isEven()
z = new Barrett(m)
else
z = new Montgomery(m)
# precomputation
g = new Array()
n = 3
k1 = k - 1
km = (1 << k) - 1
g[1] = z.convert(this)
if k > 1
g2 = nbi()
z.sqrTo g[1], g2
while n <= km
g[n] = nbi()
z.mulTo g2, g[n - 2], g[n]
n += 2
j = e.t - 1
w = undefined
is1 = true
r2 = nbi()
t = undefined
i = nbits(e_array[j]) - 1
while j >= 0
if i >= k1
w = (e_array[j] >> (i - k1)) & km
else
w = (e_array[j] & ((1 << (i + 1)) - 1)) << (k1 - i)
w |= e_array[j - 1] >> (BI_DB + i - k1) if j > 0
n = k
while (w & 1) is 0
w >>= 1
--n
if (i -= n) < 0
i += BI_DB
--j
if is1 # ret == 1, don't bother squaring or multiplying it
g[w].copyTo r
is1 = false
else
while n > 1
z.sqrTo r, r2
z.sqrTo r2, r
n -= 2
if n > 0
z.sqrTo r, r2
else
t = r
r = r2
r2 = t
z.mulTo r2, g[w], r
while j >= 0 and (e_array[j] & (1 << i)) is 0
z.sqrTo r, r2
t = r
r = r2
r2 = t
if --i < 0
i = BI_DB - 1
--j
z.revert r
# (public) gcd(this,a) (HAC 14.54)
bnGCD = (a) ->
x = (if (@s < 0) then @negate() else @clone())
y = (if (a.s < 0) then a.negate() else a.clone())
if x.compareTo(y) < 0
t = x
x = y
y = t
i = x.getLowestSetBit()
g = y.getLowestSetBit()
return x if g < 0
g = i if i < g
if g > 0
x.rShiftTo g, x
y.rShiftTo g, y
while x.signum() > 0
x.rShiftTo i, x if (i = x.getLowestSetBit()) > 0
y.rShiftTo i, y if (i = y.getLowestSetBit()) > 0
if x.compareTo(y) >= 0
x.subTo y, x
x.rShiftTo 1, x
else
y.subTo x, y
y.rShiftTo 1, y
y.lShiftTo g, y if g > 0
y
# (protected) this % n, n < 2^26
bnpModInt = (n) ->
this_array = @array
return 0 if n <= 0
d = BI_DV % n
r = (if (@s < 0) then n - 1 else 0)
if @t > 0
if d is 0
r = this_array[0] % n
else
i = @t - 1
while i >= 0
r = (d * r + this_array[i]) % n
--i
r
# (public) 1/this % m (HAC 14.61)
bnModInverse = (m) ->
ac = m.isEven()
return BigInteger.ZERO if (@isEven() and ac) or m.signum() is 0
u = m.clone()
v = @clone()
a = nbv(1)
b = nbv(0)
c = nbv(0)
d = nbv(1)
until u.signum() is 0
while u.isEven()
u.rShiftTo 1, u
if ac
if not a.isEven() or not b.isEven()
a.addTo this, a
b.subTo m, b
a.rShiftTo 1, a
else b.subTo m, b unless b.isEven()
b.rShiftTo 1, b
while v.isEven()
v.rShiftTo 1, v
if ac
if not c.isEven() or not d.isEven()
c.addTo this, c
d.subTo m, d
c.rShiftTo 1, c
else d.subTo m, d unless d.isEven()
d.rShiftTo 1, d
if u.compareTo(v) >= 0
u.subTo v, u
a.subTo c, a if ac
b.subTo d, b
else
v.subTo u, v
c.subTo a, c if ac
d.subTo b, d
return BigInteger.ZERO unless v.compareTo(BigInteger.ONE) is 0
return d.subtract(m) if d.compareTo(m) >= 0
if d.signum() < 0
d.addTo m, d
else
return d
if d.signum() < 0
d.add m
else
d
# (public) test primality with certainty >= 1-.5^t
bnIsProbablePrime = (t) ->
i = undefined
x = @abs()
x_array = x.array
if x.t is 1 and x_array[0] <= lowprimes[lowprimes.length - 1]
i = 0
while i < lowprimes.length
return true if x_array[0] is lowprimes[i]
++i
return false
return false if x.isEven()
i = 1
while i < lowprimes.length
m = lowprimes[i]
j = i + 1
m *= lowprimes[j++] while j < lowprimes.length and m < lplim
m = x.modInt(m)
return false if m % lowprimes[i++] is 0 while i < j
x.millerRabin t
# (protected) true if probably prime (HAC 4.24, PI:NAME:<NAME>END_PIiller-Rabin)
bnpMillerRabin = (t) ->
n1 = @subtract(BigInteger.ONE)
k = n1.getLowestSetBit()
return false if k <= 0
r = n1.shiftRight(k)
t = (t + 1) >> 1
t = lowprimes.length if t > lowprimes.length
a = nbi()
i = 0
while i < t
a.fromInt lowprimes[i]
y = a.modPow(r, this)
if y.compareTo(BigInteger.ONE) isnt 0 and y.compareTo(n1) isnt 0
j = 1
while j++ < k and y.compareTo(n1) isnt 0
y = y.modPowInt(2, this)
return false if y.compareTo(BigInteger.ONE) is 0
return false unless y.compareTo(n1) is 0
++i
true
# protected
# public
# BigInteger interfaces not implemented in jsbn:
# BigInteger(int signum, byte[] magnitude)
# double doubleValue()
# float floatValue()
# int hashCode()
# long longValue()
# static BigInteger valueOf(long val)
# prng4.js - uses Arcfour as a PRNG
Arcfour = ->
@i = 0
@j = 0
@S = new Array()
return
# Initialize arcfour context from key, an array of ints, each from [0..255]
ARC4init = (key) ->
i = undefined
j = undefined
t = undefined
i = 0
while i < 256
@S[i] = i
++i
j = 0
i = 0
while i < 256
j = (j + @S[i] + key[i % key.length]) & 255
t = @S[i]
@S[i] = @S[j]
@S[j] = t
++i
@i = 0
@j = 0
return
ARC4next = ->
t = undefined
@i = (@i + 1) & 255
@j = (@j + @S[@i]) & 255
t = @S[@i]
@S[@i] = @S[@j]
@S[@j] = t
@S[(t + @S[@i]) & 255]
# Plug in your RNG constructor here
prng_newstate = ->
new Arcfour()
# Pool size must be a multiple of 4 and greater than 32.
# An array of bytes the size of the pool will be passed to init()
# Random number generator - requires a PRNG backend, e.g. prng4.js
# For best results, put code like
# <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
# in your main HTML document.
# Mix in a 32-bit integer into the pool
rng_seed_int = (x) ->
rng_pool[rng_pptr++] ^= x & 255
rng_pool[rng_pptr++] ^= (x >> 8) & 255
rng_pool[rng_pptr++] ^= (x >> 16) & 255
rng_pool[rng_pptr++] ^= (x >> 24) & 255
rng_pptr -= rng_psize if rng_pptr >= rng_psize
return
# Mix in the current time (w/milliseconds) into the pool
rng_seed_time = ->
# Use pre-computed date to avoid making the benchmark
# results dependent on the current date.
rng_seed_int 1122926989487
return
# Initialize the pool with junk if needed.
# extract some randomness from Math.random()
#rng_seed_int(window.screenX);
#rng_seed_int(window.screenY);
rng_get_byte = ->
unless rng_state?
rng_seed_time()
rng_state = prng_newstate()
rng_state.init rng_pool
rng_pptr = 0
while rng_pptr < rng_pool.length
rng_pool[rng_pptr] = 0
++rng_pptr
rng_pptr = 0
#rng_pool = null;
# TODO: allow reseeding after first request
rng_state.next()
rng_get_bytes = (ba) ->
i = undefined
i = 0
while i < ba.length
ba[i] = rng_get_byte()
++i
return
SecureRandom = ->
# Depends on jsbn.js and rng.js
# convert a (hex) string to a bignum object
parseBigInt = (str, r) ->
new BigInteger(str, r)
linebrk = (s, n) ->
ret = ""
i = 0
while i + n < s.length
ret += s.substring(i, i + n) + "\n"
i += n
ret + s.substring(i, s.length)
byte2Hex = (b) ->
if b < 0x10
"0" + b.toString(16)
else
b.toString 16
# PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
pkcs1pad2 = (s, n) ->
if n < s.length + 11
alert "Message too long for RSA"
return null
ba = new Array()
i = s.length - 1
ba[--n] = s.charCodeAt(i--) while i >= 0 and n > 0
ba[--n] = 0
rng = new SecureRandom()
x = new Array()
while n > 2 # random non-zero pad
x[0] = 0
rng.nextBytes x while x[0] is 0
ba[--n] = x[0]
ba[--n] = 2
ba[--n] = 0
new BigInteger(ba)
# "empty" RSA key constructor
RSAKey = ->
@n = null
@e = 0
@d = null
@p = null
@q = null
@dmp1 = null
@dmq1 = null
@coeff = null
return
# Set the public key fields N and e from hex strings
RSASetPublic = (N, E) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
else
alert "Invalid RSA public key"
return
# Perform raw public operation on "x": return x^e (mod n)
RSADoPublic = (x) ->
x.modPowInt @e, @n
# Return the PKCS#1 RSA encryption of "text" as an even-length hex string
RSAEncrypt = (text) ->
m = pkcs1pad2(text, (@n.bitLength() + 7) >> 3)
return null unless m?
c = @doPublic(m)
return null unless c?
h = c.toString(16)
if (h.length & 1) is 0
h
else
"0" + h
# Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
#function RSAEncryptB64(text) {
# var h = this.encrypt(text);
# if(h) return hex2b64(h); else return null;
#}
# protected
# public
#RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
# Depends on rsa.js and jsbn2.js
# Undo PKCS#1 (type 2, random) padding and, if valid, return the plaintext
pkcs1unpad2 = (d, n) ->
b = d.toByteArray()
i = 0
++i while i < b.length and b[i] is 0
return null if b.length - i isnt n - 1 or b[i] isnt 2
++i
return null if ++i >= b.length until b[i] is 0
ret = ""
ret += String.fromCharCode(b[i]) while ++i < b.length
ret
# Set the private key fields N, e, and d from hex strings
RSASetPrivate = (N, E, D) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
else
alert "Invalid RSA private key"
return
# Set the private key fields N, e, d and CRT params from hex strings
RSASetPrivateEx = (N, E, D, P, Q, DP, DQ, C) ->
if N? and E? and N.length > 0 and E.length > 0
@n = parseBigInt(N, 16)
@e = parseInt(E, 16)
@d = parseBigInt(D, 16)
@p = parseBigInt(P, 16)
@q = parseBigInt(Q, 16)
@dmp1 = parseBigInt(DP, 16)
@dmq1 = parseBigInt(DQ, 16)
@coeff = parseBigInt(C, 16)
else
alert "Invalid RSA private key"
return
# Generate a new random private key B bits long, using public expt E
RSAGenerate = (B, E) ->
rng = new SecureRandom()
qs = B >> 1
@e = parseInt(E, 16)
ee = new BigInteger(E, 16)
loop
loop
@p = new BigInteger(B - qs, 1, rng)
break if @p.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @p.isProbablePrime(10)
loop
@q = new BigInteger(qs, 1, rng)
break if @q.subtract(BigInteger.ONE).gcd(ee).compareTo(BigInteger.ONE) is 0 and @q.isProbablePrime(10)
if @p.compareTo(@q) <= 0
t = @p
@p = @q
@q = t
p1 = @p.subtract(BigInteger.ONE)
q1 = @q.subtract(BigInteger.ONE)
phi = p1.multiply(q1)
if phi.gcd(ee).compareTo(BigInteger.ONE) is 0
@n = @p.multiply(@q)
@d = ee.modInverse(phi)
@dmp1 = @d.mod(p1)
@dmq1 = @d.mod(q1)
@coeff = @q.modInverse(@p)
break
return
# Perform raw private operation on "x": return x^d (mod n)
RSADoPrivate = (x) ->
return x.modPow(@d, @n) if not @p? or not @q?
# TODO: re-calculate any missing CRT params
xp = x.mod(@p).modPow(@dmp1, @p)
xq = x.mod(@q).modPow(@dmq1, @q)
xp = xp.add(@p) while xp.compareTo(xq) < 0
xp.subtract(xq).multiply(@coeff).mod(@p).multiply(@q).add xq
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is an even-length hex string and the output is a plain string.
RSADecrypt = (ctext) ->
c = parseBigInt(ctext, 16)
m = @doPrivate(c)
return null unless m?
pkcs1unpad2 m, (@n.bitLength() + 7) >> 3
# Return the PKCS#1 RSA decryption of "ctext".
# "ctext" is a Base64-encoded string and the output is a plain string.
#function RSAB64Decrypt(ctext) {
# var h = b64tohex(ctext);
# if(h) return this.decrypt(h); else return null;
#}
# protected
# public
#RSAKey.prototype.b64_decrypt = RSAB64Decrypt;
encrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
encrypted = RSA.encrypt(TEXT)
return
decrypt = ->
RSA = new RSAKey()
RSA.setPublic nValue, eValue
RSA.setPrivateEx nValue, eValue, dValue, pValue, qValue, dmp1Value, dmq1Value, coeffValue
decrypted = RSA.decrypt(encrypted)
throw new Error("Crypto operation failed") unless decrypted is TEXT
return
Crypto = new BenchmarkSuite("Crypto", 266181, [
new Benchmark("Encrypt", encrypt)
new Benchmark("Decrypt", decrypt)
])
dbits = undefined
BI_DB = undefined
BI_DM = undefined
BI_DV = undefined
BI_FP = undefined
BI_FV = undefined
BI_F1 = undefined
BI_F2 = undefined
canary = 0xdeadbeefcafe
j_lm = ((canary & 0xffffff) is 0xefcafe)
setupEngine = (fn, bits) ->
BigInteger::am = fn
dbits = bits
BI_DB = dbits
BI_DM = ((1 << dbits) - 1)
BI_DV = (1 << dbits)
BI_FP = 52
BI_FV = Math.pow(2, BI_FP)
BI_F1 = BI_FP - dbits
BI_F2 = 2 * dbits - BI_FP
return
BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
BI_RC = new Array()
rr = undefined
vv = undefined
rr = "0".charCodeAt(0)
vv = 0
while vv <= 9
BI_RC[rr++] = vv
++vv
rr = "a".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
rr = "A".charCodeAt(0)
vv = 10
while vv < 36
BI_RC[rr++] = vv
++vv
Classic::convert = cConvert
Classic::revert = cRevert
Classic::reduce = cReduce
Classic::mulTo = cMulTo
Classic::sqrTo = cSqrTo
Montgomery::convert = montConvert
Montgomery::revert = montRevert
Montgomery::reduce = montReduce
Montgomery::mulTo = montMulTo
Montgomery::sqrTo = montSqrTo
BigInteger::copyTo = bnpCopyTo
BigInteger::fromInt = bnpFromInt
BigInteger::fromString = bnpFromString
BigInteger::clamp = bnpClamp
BigInteger::dlShiftTo = bnpDLShiftTo
BigInteger::drShiftTo = bnpDRShiftTo
BigInteger::lShiftTo = bnpLShiftTo
BigInteger::rShiftTo = bnpRShiftTo
BigInteger::subTo = bnpSubTo
BigInteger::multiplyTo = bnpMultiplyTo
BigInteger::squareTo = bnpSquareTo
BigInteger::divRemTo = bnpDivRemTo
BigInteger::invDigit = bnpInvDigit
BigInteger::isEven = bnpIsEven
BigInteger::exp = bnpExp
BigInteger::toString = bnToString
BigInteger::negate = bnNegate
BigInteger::abs = bnAbs
BigInteger::compareTo = bnCompareTo
BigInteger::bitLength = bnBitLength
BigInteger::mod = bnMod
BigInteger::modPowInt = bnModPowInt
BigInteger.ZERO = nbv(0)
BigInteger.ONE = nbv(1)
NullExp::convert = nNop
NullExp::revert = nNop
NullExp::mulTo = nMulTo
NullExp::sqrTo = nSqrTo
Barrett::convert = barrettConvert
Barrett::revert = barrettRevert
Barrett::reduce = barrettReduce
Barrett::mulTo = barrettMulTo
Barrett::sqrTo = barrettSqrTo
lowprimes = [
2
3
5
7
11
13
17
19
23
29
31
37
41
43
47
53
59
61
67
71
73
79
83
89
97
101
103
107
109
113
127
131
137
139
149
151
157
163
167
173
179
181
191
193
197
199
211
223
227
229
233
239
241
251
257
263
269
271
277
281
283
293
307
311
313
317
331
337
347
349
353
359
367
373
379
383
389
397
401
409
419
421
431
433
439
443
449
457
461
463
467
479
487
491
499
503
509
]
lplim = (1 << 26) / lowprimes[lowprimes.length - 1]
BigInteger::chunkSize = bnpChunkSize
BigInteger::toRadix = bnpToRadix
BigInteger::fromRadix = bnpFromRadix
BigInteger::fromNumber = bnpFromNumber
BigInteger::bitwiseTo = bnpBitwiseTo
BigInteger::changeBit = bnpChangeBit
BigInteger::addTo = bnpAddTo
BigInteger::dMultiply = bnpDMultiply
BigInteger::dAddOffset = bnpDAddOffset
BigInteger::multiplyLowerTo = bnpMultiplyLowerTo
BigInteger::multiplyUpperTo = bnpMultiplyUpperTo
BigInteger::modInt = bnpModInt
BigInteger::millerRabin = bnpMillerRabin
BigInteger::clone = bnClone
BigInteger::intValue = bnIntValue
BigInteger::byteValue = bnByteValue
BigInteger::shortValue = bnShortValue
BigInteger::signum = bnSigNum
BigInteger::toByteArray = bnToByteArray
BigInteger::equals = bnEquals
BigInteger::min = bnMin
BigInteger::max = bnMax
BigInteger::and = bnAnd
BigInteger::or = bnOr
BigInteger::xor = bnXor
BigInteger::andNot = bnAndNot
BigInteger::not = bnNot
BigInteger::shiftLeft = bnShiftLeft
BigInteger::shiftRight = bnShiftRight
BigInteger::getLowestSetBit = bnGetLowestSetBit
BigInteger::bitCount = bnBitCount
BigInteger::testBit = bnTestBit
BigInteger::setBit = bnSetBit
BigInteger::clearBit = bnClearBit
BigInteger::flipBit = bnFlipBit
BigInteger::add = bnAdd
BigInteger::subtract = bnSubtract
BigInteger::multiply = bnMultiply
BigInteger::divide = bnDivide
BigInteger::remainder = bnRemainder
BigInteger::divideAndRemainder = bnDivideAndRemainder
BigInteger::modPow = bnModPow
BigInteger::modInverse = bnModInverse
BigInteger::pow = bnPow
BigInteger::gcd = bnGCD
BigInteger::isProbablePrime = bnIsProbablePrime
Arcfour::init = ARC4init
Arcfour::next = ARC4next
rng_psize = 256
rng_state = undefined
rng_pool = undefined
rng_pptr = undefined
unless rng_pool?
rng_pool = new Array()
rng_pptr = 0
t = undefined
while rng_pptr < rng_psize
t = Math.floor(65536 * Math.random())
rng_pool[rng_pptr++] = t >>> 8
rng_pool[rng_pptr++] = t & 255
rng_pptr = 0
rng_seed_time()
SecureRandom::nextBytes = rng_get_bytes
RSAKey::doPublic = RSADoPublic
RSAKey::setPublic = RSASetPublic
RSAKey::encrypt = RSAEncrypt
RSAKey::doPrivate = RSADoPrivate
RSAKey::setPrivate = RSASetPrivate
RSAKey::setPrivateEx = RSASetPrivateEx
RSAKey::generate = RSAGenerate
RSAKey::decrypt = RSADecrypt
nValue = "PI:KEY:<KEY>END_PI"
eValue = "10001"
dValue = "PI:KEY:<KEY>END_PI6e2df7c1a559e6d5ac1cd5c94703a22891464fba23d0d965086277a161"
pValue = "d090ce58a92c75233a6486cb0a9209bf3583b64f540c76f5294bb97d285eed33aec220bde14b2417951178ac152ceab6da7090905b478195498b352048f15e7d"
qValue = "cab575dc652bb66df15a0359609dPI:KEY:<KEY>END_PI1d1db184PI:KEY:<KEY>END_PI0c00c66PI:KEY:<KEY>END_PIbPI:KEY:<KEY>END_PI0efPI:KEY:<KEY>END_PIcPI:KEY:<KEY>END_PI0PI:KEY:<KEY>END_PIedbfPI:KEY:<KEY>END_PIaecPI:KEY:<KEY>END_PI1PI:KEY:<KEY>END_PI0ccPI:KEY:<KEY>END_PIf6PI:KEY:<KEY>END_PI22bPI:KEY:<KEY>END_PIePI:KEY:<KEY>END_PIf"
dmp1Value = "1a24bca8e273df2f0e47c199bbf678604e7df7215480c77c8db39f49b000ce2cf7500038acfff5433b7d582a01f1826e6f4d42e1c57f5e1fef7b12aabc59fd25"
dmq1Value = "3d06982efbbe47339e1f6d36b1216b8a741d410b0c662f54f7118b27b9a4ec9d914337eb39841d8666f3034408cf94f5bPI:KEY:<KEY>END_PI"
coeffValue = "PI:KEY:<KEY>END_PI"
setupEngine am3, 28
TEXT = "The quick brown fox jumped over the extremely lazy frog! " + "Now is the time for all good men to come to the party."
encrypted = undefined
|
[
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com> \n\nLicensed under the A",
"end": 39,
"score": 0.9998880624771118,
"start": 26,
"tag": "NAME",
"value": "Stephan Jorek"
},
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com... | src/Action/Scope/Decorator/PropertyReferenceArgumentRecycle.coffee | sjorek/goatee.js | 0 | ###
© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{PropertyReferenceArgument} = require './PropertyReferenceArgument'
{Utility:{
arrayClear
}} = require '../../Core/Constants'
exports = module?.exports ? this
exports.PropertyReferenceArgumentRecycle = \
class PropertyReferenceArgumentRecycle extends PropertyReferenceArgument
_map = {}
_args = []
_vals = []
_clearMap = (key) ->
delete _map[key]
return
_collectProperties = (object) ->
for key of object
continue if _map[key]?
_map[key] = _args.length
_args.push key
_vals.push object[key]
return
build: (code) ->
self = @
return (scope...) ->
arrayClear _args
arrayClear _vals
scope.map _collectProperties
_args.map _clearMap
self.compile(_args, code).apply(@, _vals)
| 68049 | ###
© Copyright 2013-2014 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{PropertyReferenceArgument} = require './PropertyReferenceArgument'
{Utility:{
arrayClear
}} = require '../../Core/Constants'
exports = module?.exports ? this
exports.PropertyReferenceArgumentRecycle = \
class PropertyReferenceArgumentRecycle extends PropertyReferenceArgument
_map = {}
_args = []
_vals = []
_clearMap = (key) ->
delete _map[key]
return
_collectProperties = (object) ->
for key of object
continue if _map[key]?
_map[key] = _args.length
_args.push key
_vals.push object[key]
return
build: (code) ->
self = @
return (scope...) ->
arrayClear _args
arrayClear _vals
scope.map _collectProperties
_args.map _clearMap
self.compile(_args, code).apply(@, _vals)
| true | ###
© Copyright 2013-2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{PropertyReferenceArgument} = require './PropertyReferenceArgument'
{Utility:{
arrayClear
}} = require '../../Core/Constants'
exports = module?.exports ? this
exports.PropertyReferenceArgumentRecycle = \
class PropertyReferenceArgumentRecycle extends PropertyReferenceArgument
_map = {}
_args = []
_vals = []
_clearMap = (key) ->
delete _map[key]
return
_collectProperties = (object) ->
for key of object
continue if _map[key]?
_map[key] = _args.length
_args.push key
_vals.push object[key]
return
build: (code) ->
self = @
return (scope...) ->
arrayClear _args
arrayClear _vals
scope.map _collectProperties
_args.map _clearMap
self.compile(_args, code).apply(@, _vals)
|
[
{
"context": "n/env node\n\n###\n forecast-cli\n https://github.com/joergd/forecast-cli\n\n Copyright (c) 2013-2015 Joerg Diek",
"end": 65,
"score": 0.9996091723442078,
"start": 59,
"tag": "USERNAME",
"value": "joergd"
},
{
"context": ".com/joergd/forecast-cli\n\n Copyright (c) 2013-... | src/lib/forecast-cli.coffee | chauncey-garrett/bin-forecast-cli | 0 | #!/usr/bin/env node
###
forecast-cli
https://github.com/joergd/forecast-cli
Copyright (c) 2013-2015 Joerg Diekmann
Licensed under the MIT license.
###
program = require 'commander'
prompt = require 'prompt'
defaults = require './defaults'
forecast = require './forecast'
program.version(require('../package').version)
.option('--hourly', 'Hourly report for the next 48 hours')
.option('--units', 'Configure the units used in the forecast')
program.on('--help', () ->
console.log ' Examples:'
console.log ''
console.log ' $ forecast'
console.log ' $ forecast "Cape Town"'
console.log ' $ forecast --hourly "Cape Town"'
console.log ' $ forecast --units'
console.log ''
)
program.parse(process.argv)
if program.units
console.log "Choose the units you'd like to see in your forecasts:"
units = ["Fahrenheit (°F)", "Celsius (°C)"]
unitsValues = ["us", "si"]
program.choose units, (i) ->
defaults.saveUnits unitsValues[i]
console.log "Thanks - units have been configured to #{units[i]}."
process.exit()
else
if program.args.length is 1
defaults.savePlace program.args[0]
forecast.get program.args[0], program.hourly
else
console.log ''
defaultPlace = defaults.place()
if defaultPlace != ''
forecast.get defaultPlace, program.hourly
else
prompt.start()
prompt.get([{ name: 'place', description: 'Please enter a city name', default: defaultPlace }], (err, result) ->
if err
console.log err
else
if result.place.length > 0
defaults.savePlace result.place
forecast.get result.place, program.hourly
else
console.log "Ok, whatever."
)
| 8850 | #!/usr/bin/env node
###
forecast-cli
https://github.com/joergd/forecast-cli
Copyright (c) 2013-2015 <NAME>
Licensed under the MIT license.
###
program = require 'commander'
prompt = require 'prompt'
defaults = require './defaults'
forecast = require './forecast'
program.version(require('../package').version)
.option('--hourly', 'Hourly report for the next 48 hours')
.option('--units', 'Configure the units used in the forecast')
program.on('--help', () ->
console.log ' Examples:'
console.log ''
console.log ' $ forecast'
console.log ' $ forecast "Cape Town"'
console.log ' $ forecast --hourly "Cape Town"'
console.log ' $ forecast --units'
console.log ''
)
program.parse(process.argv)
if program.units
console.log "Choose the units you'd like to see in your forecasts:"
units = ["Fahrenheit (°F)", "Celsius (°C)"]
unitsValues = ["us", "si"]
program.choose units, (i) ->
defaults.saveUnits unitsValues[i]
console.log "Thanks - units have been configured to #{units[i]}."
process.exit()
else
if program.args.length is 1
defaults.savePlace program.args[0]
forecast.get program.args[0], program.hourly
else
console.log ''
defaultPlace = defaults.place()
if defaultPlace != ''
forecast.get defaultPlace, program.hourly
else
prompt.start()
prompt.get([{ name: 'place', description: 'Please enter a city name', default: defaultPlace }], (err, result) ->
if err
console.log err
else
if result.place.length > 0
defaults.savePlace result.place
forecast.get result.place, program.hourly
else
console.log "Ok, whatever."
)
| true | #!/usr/bin/env node
###
forecast-cli
https://github.com/joergd/forecast-cli
Copyright (c) 2013-2015 PI:NAME:<NAME>END_PI
Licensed under the MIT license.
###
program = require 'commander'
prompt = require 'prompt'
defaults = require './defaults'
forecast = require './forecast'
program.version(require('../package').version)
.option('--hourly', 'Hourly report for the next 48 hours')
.option('--units', 'Configure the units used in the forecast')
program.on('--help', () ->
console.log ' Examples:'
console.log ''
console.log ' $ forecast'
console.log ' $ forecast "Cape Town"'
console.log ' $ forecast --hourly "Cape Town"'
console.log ' $ forecast --units'
console.log ''
)
program.parse(process.argv)
if program.units
console.log "Choose the units you'd like to see in your forecasts:"
units = ["Fahrenheit (°F)", "Celsius (°C)"]
unitsValues = ["us", "si"]
program.choose units, (i) ->
defaults.saveUnits unitsValues[i]
console.log "Thanks - units have been configured to #{units[i]}."
process.exit()
else
if program.args.length is 1
defaults.savePlace program.args[0]
forecast.get program.args[0], program.hourly
else
console.log ''
defaultPlace = defaults.place()
if defaultPlace != ''
forecast.get defaultPlace, program.hourly
else
prompt.start()
prompt.get([{ name: 'place', description: 'Please enter a city name', default: defaultPlace }], (err, result) ->
if err
console.log err
else
if result.place.length > 0
defaults.savePlace result.place
forecast.get result.place, program.hourly
else
console.log "Ok, whatever."
)
|
[
{
"context": " user: authData.username\n pass: authData.password\n return\n\n {\n getPageById: (baseUrl, auth, ",
"end": 395,
"score": 0.9479831457138062,
"start": 387,
"tag": "PASSWORD",
"value": "password"
}
] | lib/job-dependencies/experimentalConfluence/dependency.coffee | andreineculau/atlasboard | 0 | # This dependency is experimental and not suitable for general consumption. The API available here is liable to change
# without notice.
module.exports = (job, io, config) ->
addAuthConfig = (auth, opts) ->
authData = job.config.globalAuth[auth]
if authData and authData.username and authData.password
opts.auth =
user: authData.username
pass: authData.password
return
{
getPageById: (baseUrl, auth, pageId, cb) ->
opts =
url: baseUrl + '/rest/api/content/' + pageId
qs: expand: 'body.view'
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
cb null,
title: body.title
content: body.body.view.value
return
return
getPageByCQL: (baseUrl, auth, query, cb) ->
opts =
url: baseUrl + '/rest/experimental/content'
qs:
expand: 'body.view'
limit: 1
cql: query
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
if body.results.length == 0
return cb(null, new Error('No page matching query ' + query))
result = body.results[0]
cb null,
title: result.title
content: result.body.view.value
webLink: result._links.webui
return
return
}
| 159503 | # This dependency is experimental and not suitable for general consumption. The API available here is liable to change
# without notice.
module.exports = (job, io, config) ->
addAuthConfig = (auth, opts) ->
authData = job.config.globalAuth[auth]
if authData and authData.username and authData.password
opts.auth =
user: authData.username
pass: authData.<PASSWORD>
return
{
getPageById: (baseUrl, auth, pageId, cb) ->
opts =
url: baseUrl + '/rest/api/content/' + pageId
qs: expand: 'body.view'
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
cb null,
title: body.title
content: body.body.view.value
return
return
getPageByCQL: (baseUrl, auth, query, cb) ->
opts =
url: baseUrl + '/rest/experimental/content'
qs:
expand: 'body.view'
limit: 1
cql: query
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
if body.results.length == 0
return cb(null, new Error('No page matching query ' + query))
result = body.results[0]
cb null,
title: result.title
content: result.body.view.value
webLink: result._links.webui
return
return
}
| true | # This dependency is experimental and not suitable for general consumption. The API available here is liable to change
# without notice.
module.exports = (job, io, config) ->
addAuthConfig = (auth, opts) ->
authData = job.config.globalAuth[auth]
if authData and authData.username and authData.password
opts.auth =
user: authData.username
pass: authData.PI:PASSWORD:<PASSWORD>END_PI
return
{
getPageById: (baseUrl, auth, pageId, cb) ->
opts =
url: baseUrl + '/rest/api/content/' + pageId
qs: expand: 'body.view'
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
cb null,
title: body.title
content: body.body.view.value
return
return
getPageByCQL: (baseUrl, auth, query, cb) ->
opts =
url: baseUrl + '/rest/experimental/content'
qs:
expand: 'body.view'
limit: 1
cql: query
addAuthConfig auth, opts
job.dependencies.easyRequest.JSON opts, (err, body) ->
if err
return cb(err)
if body.results.length == 0
return cb(null, new Error('No page matching query ' + query))
result = body.results[0]
cb null,
title: result.title
content: result.body.view.value
webLink: result._links.webui
return
return
}
|
[
{
"context": "er()\n\n allModels = [\n # {\n # key: 'coloana-infinitului'\n # position: new THREE.Vector3(2, -10, 0)",
"end": 138,
"score": 0.9989427924156189,
"start": 119,
"tag": "KEY",
"value": "coloana-infinitului"
},
{
"context": "r3(-0.1, 0, -0.3)\n #... | example/3d/project-multiplayer-menu/js/game/models/LandingModel.coffee | mess110/coffee-engine | 1 | class LandingModel extends BaseModel
constructor: (index)->
super()
allModels = [
# {
# key: 'coloana-infinitului'
# position: new THREE.Vector3(2, -10, 0)
# rotation: new THREE.Vector3(-0.1, 0, -0.3)
# }
{
key: 'putinei'
position: new THREE.Vector3(1.2, -1, 6)
rotation: new THREE.Vector3(0, 0, -0.2)
scale: new THREE.Vector3(2, 2, 2)
animate: 0
}
{
key: 'chest'
position: new THREE.Vector3(3, -2, 0)
rotation: new THREE.Vector3(0.3, 0, 0)
}
]
if index?
selected = allModels[index]
else
selected = allModels.shuffle().first()
@model = new THREE.Object3D()
@model.position.copy selected.position
# TODO: find out why rotation.copy doesn't work
@model.rotation.set selected.rotation.x, selected.rotation.y, selected.rotation.z
@model.scale.copy selected.scale if selected.scale?
@mesh = JsonModelManager.clone(selected.key)
@model.add @mesh
if selected.animate?
@animate(selected.animate)
tick: (tpf) ->
@mesh.rotation.y += tpf / 2
| 52156 | class LandingModel extends BaseModel
constructor: (index)->
super()
allModels = [
# {
# key: '<KEY>'
# position: new THREE.Vector3(2, -10, 0)
# rotation: new THREE.Vector3(-0.1, 0, -0.3)
# }
{
key: '<KEY>'
position: new THREE.Vector3(1.2, -1, 6)
rotation: new THREE.Vector3(0, 0, -0.2)
scale: new THREE.Vector3(2, 2, 2)
animate: 0
}
{
key: '<KEY>'
position: new THREE.Vector3(3, -2, 0)
rotation: new THREE.Vector3(0.3, 0, 0)
}
]
if index?
selected = allModels[index]
else
selected = allModels.shuffle().first()
@model = new THREE.Object3D()
@model.position.copy selected.position
# TODO: find out why rotation.copy doesn't work
@model.rotation.set selected.rotation.x, selected.rotation.y, selected.rotation.z
@model.scale.copy selected.scale if selected.scale?
@mesh = JsonModelManager.clone(selected.key)
@model.add @mesh
if selected.animate?
@animate(selected.animate)
tick: (tpf) ->
@mesh.rotation.y += tpf / 2
| true | class LandingModel extends BaseModel
constructor: (index)->
super()
allModels = [
# {
# key: 'PI:KEY:<KEY>END_PI'
# position: new THREE.Vector3(2, -10, 0)
# rotation: new THREE.Vector3(-0.1, 0, -0.3)
# }
{
key: 'PI:KEY:<KEY>END_PI'
position: new THREE.Vector3(1.2, -1, 6)
rotation: new THREE.Vector3(0, 0, -0.2)
scale: new THREE.Vector3(2, 2, 2)
animate: 0
}
{
key: 'PI:KEY:<KEY>END_PI'
position: new THREE.Vector3(3, -2, 0)
rotation: new THREE.Vector3(0.3, 0, 0)
}
]
if index?
selected = allModels[index]
else
selected = allModels.shuffle().first()
@model = new THREE.Object3D()
@model.position.copy selected.position
# TODO: find out why rotation.copy doesn't work
@model.rotation.set selected.rotation.x, selected.rotation.y, selected.rotation.z
@model.scale.copy selected.scale if selected.scale?
@mesh = JsonModelManager.clone(selected.key)
@model.add @mesh
if selected.animate?
@animate(selected.animate)
tick: (tpf) ->
@mesh.rotation.y += tpf / 2
|
[
{
"context": "# A safe setInterval Alternative\n# Written by Luke Morton and Richard Willis, MIT licensed.\n#\n# Usage:\n# ",
"end": 57,
"score": 0.9997897148132324,
"start": 46,
"tag": "NAME",
"value": "Luke Morton"
},
{
"context": "tInterval Alternative\n# Written by Luke Morton ... | src/interlude.coffee | drpheltright/interlude | 1 | # A safe setInterval Alternative
# Written by Luke Morton and Richard Willis, MIT licensed.
#
# Usage:
# // A JavaScript example
# var interval = new Interlude(500, function () {
# console.log('bob');
# });
# interval.stop();
#
# # A CoffeeScript example
# new Interlude 500, ->
# # @ or this represents the Interlude instance
# console.log 'bob'
# @count or= 0
# if @count++ is 5 then @stop()
#
# # Asynchronous interval
# Interlude.async 1500, ->
# $.get('/test', => @next())
#
((definition) ->
if module?.exports? then module.exports = definition();
else if typeof define is 'function' and define.amd? then define(definition)
else @Interlude = definition()
) ($) ->
return class
@async: (wait, fn) ->
interval = new Interlude(wait, fn)
interval.isAsync = true
return interval
isAsync: false
constructor: (@wait = 1000, @fn) -> @init()
init: ->
@timeout = setTimeout(@callback, @wait)
return @
callback: =>
@fn.call(@)
@timeout = setTimeout(arguments.callee, @wait) unless @isAsync
return @
next: ->
throw 'Interlude not async' unless @isAsync
@init()
return @
stop: () ->
clearTimeout(@timeout)
return @ | 162297 | # A safe setInterval Alternative
# Written by <NAME> and <NAME>, MIT licensed.
#
# Usage:
# // A JavaScript example
# var interval = new Interlude(500, function () {
# console.log('bob');
# });
# interval.stop();
#
# # A CoffeeScript example
# new Interlude 500, ->
# # @ or this represents the Interlude instance
# console.log 'bob'
# @count or= 0
# if @count++ is 5 then @stop()
#
# # Asynchronous interval
# Interlude.async 1500, ->
# $.get('/test', => @next())
#
((definition) ->
if module?.exports? then module.exports = definition();
else if typeof define is 'function' and define.amd? then define(definition)
else @Interlude = definition()
) ($) ->
return class
@async: (wait, fn) ->
interval = new Interlude(wait, fn)
interval.isAsync = true
return interval
isAsync: false
constructor: (@wait = 1000, @fn) -> @init()
init: ->
@timeout = setTimeout(@callback, @wait)
return @
callback: =>
@fn.call(@)
@timeout = setTimeout(arguments.callee, @wait) unless @isAsync
return @
next: ->
throw 'Interlude not async' unless @isAsync
@init()
return @
stop: () ->
clearTimeout(@timeout)
return @ | true | # A safe setInterval Alternative
# Written by PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI, MIT licensed.
#
# Usage:
# // A JavaScript example
# var interval = new Interlude(500, function () {
# console.log('bob');
# });
# interval.stop();
#
# # A CoffeeScript example
# new Interlude 500, ->
# # @ or this represents the Interlude instance
# console.log 'bob'
# @count or= 0
# if @count++ is 5 then @stop()
#
# # Asynchronous interval
# Interlude.async 1500, ->
# $.get('/test', => @next())
#
((definition) ->
if module?.exports? then module.exports = definition();
else if typeof define is 'function' and define.amd? then define(definition)
else @Interlude = definition()
) ($) ->
return class
@async: (wait, fn) ->
interval = new Interlude(wait, fn)
interval.isAsync = true
return interval
isAsync: false
constructor: (@wait = 1000, @fn) -> @init()
init: ->
@timeout = setTimeout(@callback, @wait)
return @
callback: =>
@fn.call(@)
@timeout = setTimeout(arguments.callee, @wait) unless @isAsync
return @
next: ->
throw 'Interlude not async' unless @isAsync
@init()
return @
stop: () ->
clearTimeout(@timeout)
return @ |
[
{
"context": "= [{id: 0, name: \"San Francisco\"}, {id: 1, name: \"Lyon\"}, {id: 2, name: \"Barcelona\"}]\n regexp = new",
"end": 175,
"score": 0.5834287405014038,
"start": 173,
"tag": "NAME",
"value": "Ly"
},
{
"context": "ch, callback) ->\n collection = [{id: 0, name: \"SoMa\... | examples/typeaheads/cityService.coffee | vedantchoubey098/react-form-builder | 44 | filter = require("lodash/filter")
module.exports = cityService =
loadCitiesByName: (search, callback) ->
collection = [{id: 0, name: "San Francisco"}, {id: 1, name: "Lyon"}, {id: 2, name: "Barcelona"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
loadNeighborhoodsByName: (search, callback) ->
collection = [{id: 0, name: "SoMa"}, {id: 1, name: "Mission"}, {id: 2, name: "Fillmore"}, {id: 3, name: "Les Pentes"}, {id: 4, name: "Presqu'ile"}, {id: 5, name: "Gerland"}, {id: 6, name: "Barri Gotic"}, {id: 7, name: "Passeig de Gracia"}, {id: 8, name: "Barceloneta"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
| 216983 | filter = require("lodash/filter")
module.exports = cityService =
loadCitiesByName: (search, callback) ->
collection = [{id: 0, name: "San Francisco"}, {id: 1, name: "<NAME>on"}, {id: 2, name: "Barcelona"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
loadNeighborhoodsByName: (search, callback) ->
collection = [{id: 0, name: "<NAME>"}, {id: 1, name: "Mission"}, {id: 2, name: "<NAME>"}, {id: 3, name: "Les Pentes"}, {id: 4, name: "Presqu'ile"}, {id: 5, name: "Gerland"}, {id: 6, name: "<NAME>"}, {id: 7, name: "<NAME>"}, {id: 8, name: "<NAME>"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
| true | filter = require("lodash/filter")
module.exports = cityService =
loadCitiesByName: (search, callback) ->
collection = [{id: 0, name: "San Francisco"}, {id: 1, name: "PI:NAME:<NAME>END_PIon"}, {id: 2, name: "Barcelona"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
loadNeighborhoodsByName: (search, callback) ->
collection = [{id: 0, name: "PI:NAME:<NAME>END_PI"}, {id: 1, name: "Mission"}, {id: 2, name: "PI:NAME:<NAME>END_PI"}, {id: 3, name: "Les Pentes"}, {id: 4, name: "Presqu'ile"}, {id: 5, name: "Gerland"}, {id: 6, name: "PI:NAME:<NAME>END_PI"}, {id: 7, name: "PI:NAME:<NAME>END_PI"}, {id: 8, name: "PI:NAME:<NAME>END_PI"}]
regexp = new RegExp(search[0]+'+', 'i') if search[0] != ''
callback.success filter collection, (item) ->
search[0] != '' && regexp.test(item.name)
|
[
{
"context": " [\n\t\t_id: '53b54577f5adc6a9932b1aec'\n\t\tusername: 'admin'\n\t\temail: 'admin@admin.com'\n\t\tpassword: '202cb962",
"end": 662,
"score": 0.9987016320228577,
"start": 657,
"tag": "USERNAME",
"value": "admin"
},
{
"context": "77f5adc6a9932b1aec'\n\t\tusername: 'admin'... | meta/migrate.coffee | winnlab/Optimeal | 0 | mongoose = require 'mongoose'
module.exports = [
modelName: 'permission'
data: [
_id: 'denied'
name: 'access_denied'
,
_id: 'dashboard'
name: 'dashboard'
,
_id: 'users'
name: 'users'
,
_id: 'clients'
name: 'clients'
,
_id: 'cache'
name: 'cache'
,
_id: 'roles'
name: 'roles'
,
_id: 'permisions'
name: 'permissions'
]
,
modelName: 'role'
data: [
_id: 'admin'
name: 'admin'
'permissions': [
'denied'
'dashboard'
'users'
'clients'
'cache'
'roles'
'permisions'
]
,
_id: 'user'
name: 'user'
permissions: []
]
,
modelName: 'user'
data: [
_id: '53b54577f5adc6a9932b1aec'
username: 'admin'
email: 'admin@admin.com'
password: '202cb962ac59075b964b07152d234b70'
role: 'admin'
status: 1
]
,
modelName: 'language'
data: [
'_id' : '53db9c277ae86638de0fc923'
'name' : 'Русский'
'isoCode' : 'ru'
'active' : true
'default': true
,
'_id' : '53db9bd57ae86638de0fc922'
'name' : 'Українська'
'isoCode' : 'ua'
'active' : true
'default': false
]
] | 217433 | mongoose = require 'mongoose'
module.exports = [
modelName: 'permission'
data: [
_id: 'denied'
name: 'access_denied'
,
_id: 'dashboard'
name: 'dashboard'
,
_id: 'users'
name: 'users'
,
_id: 'clients'
name: 'clients'
,
_id: 'cache'
name: 'cache'
,
_id: 'roles'
name: 'roles'
,
_id: 'permisions'
name: 'permissions'
]
,
modelName: 'role'
data: [
_id: 'admin'
name: 'admin'
'permissions': [
'denied'
'dashboard'
'users'
'clients'
'cache'
'roles'
'permisions'
]
,
_id: 'user'
name: 'user'
permissions: []
]
,
modelName: 'user'
data: [
_id: '53b54577f5adc6a9932b1aec'
username: 'admin'
email: '<EMAIL>'
password: '<PASSWORD>'
role: 'admin'
status: 1
]
,
modelName: 'language'
data: [
'_id' : '53db9c277ae86638de0fc923'
'name' : 'Русский'
'isoCode' : 'ru'
'active' : true
'default': true
,
'_id' : '53db9bd57ae86638de0fc922'
'name' : 'Українська'
'isoCode' : 'ua'
'active' : true
'default': false
]
] | true | mongoose = require 'mongoose'
module.exports = [
modelName: 'permission'
data: [
_id: 'denied'
name: 'access_denied'
,
_id: 'dashboard'
name: 'dashboard'
,
_id: 'users'
name: 'users'
,
_id: 'clients'
name: 'clients'
,
_id: 'cache'
name: 'cache'
,
_id: 'roles'
name: 'roles'
,
_id: 'permisions'
name: 'permissions'
]
,
modelName: 'role'
data: [
_id: 'admin'
name: 'admin'
'permissions': [
'denied'
'dashboard'
'users'
'clients'
'cache'
'roles'
'permisions'
]
,
_id: 'user'
name: 'user'
permissions: []
]
,
modelName: 'user'
data: [
_id: '53b54577f5adc6a9932b1aec'
username: 'admin'
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
role: 'admin'
status: 1
]
,
modelName: 'language'
data: [
'_id' : '53db9c277ae86638de0fc923'
'name' : 'Русский'
'isoCode' : 'ru'
'active' : true
'default': true
,
'_id' : '53db9bd57ae86638de0fc922'
'name' : 'Українська'
'isoCode' : 'ua'
'active' : true
'default': false
]
] |
[
{
"context": "params: {id: 'foobar'} , query: { outcome_token: '123123', option: 'selected+option' }\n routes.user_o",
"end": 1146,
"score": 0.9434285163879395,
"start": 1140,
"tag": "PASSWORD",
"value": "123123"
}
] | apps/inquiry/test/routes.coffee | l2succes/force | 0 | sinon = require 'sinon'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
routes = require '../routes'
describe 'inquiry routes', ->
beforeEach ->
@req = params: 'foobar'
@res = render: sinon.stub(), locals: sd: {}
@next = sinon.stub()
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#index', ->
it 'handles errors', ->
Backbone.sync.yieldsTo 'error'
routes.index @req, @res, @next
@res.render.called.should.be.false()
it 'fetches the artwork and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'artwork', id: 'foobar'
routes.index @req, @res, @next
@res.locals.sd.ARTWORK.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'index'
@res.render.args[0][1].artwork.id.should.equal 'foobar'
describe '#user_outcome', ->
it 'fetches the inquiry and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'inquiry', id: 'foobar'
@req = params: {id: 'foobar'} , query: { outcome_token: '123123', option: 'selected+option' }
routes.user_outcome @req, @res, @next
@res.locals.sd.INQUIRY.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'user_outcome'
@res.render.args[0][1].inquiry.id.should.equal 'foobar'
| 68847 | sinon = require 'sinon'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
routes = require '../routes'
describe 'inquiry routes', ->
beforeEach ->
@req = params: 'foobar'
@res = render: sinon.stub(), locals: sd: {}
@next = sinon.stub()
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#index', ->
it 'handles errors', ->
Backbone.sync.yieldsTo 'error'
routes.index @req, @res, @next
@res.render.called.should.be.false()
it 'fetches the artwork and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'artwork', id: 'foobar'
routes.index @req, @res, @next
@res.locals.sd.ARTWORK.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'index'
@res.render.args[0][1].artwork.id.should.equal 'foobar'
describe '#user_outcome', ->
it 'fetches the inquiry and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'inquiry', id: 'foobar'
@req = params: {id: 'foobar'} , query: { outcome_token: '<PASSWORD>', option: 'selected+option' }
routes.user_outcome @req, @res, @next
@res.locals.sd.INQUIRY.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'user_outcome'
@res.render.args[0][1].inquiry.id.should.equal 'foobar'
| true | sinon = require 'sinon'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
routes = require '../routes'
describe 'inquiry routes', ->
beforeEach ->
@req = params: 'foobar'
@res = render: sinon.stub(), locals: sd: {}
@next = sinon.stub()
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#index', ->
it 'handles errors', ->
Backbone.sync.yieldsTo 'error'
routes.index @req, @res, @next
@res.render.called.should.be.false()
it 'fetches the artwork and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'artwork', id: 'foobar'
routes.index @req, @res, @next
@res.locals.sd.ARTWORK.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'index'
@res.render.args[0][1].artwork.id.should.equal 'foobar'
describe '#user_outcome', ->
it 'fetches the inquiry and renders the template', ->
Backbone.sync.yieldsTo 'success',
fabricate 'inquiry', id: 'foobar'
@req = params: {id: 'foobar'} , query: { outcome_token: 'PI:PASSWORD:<PASSWORD>END_PI', option: 'selected+option' }
routes.user_outcome @req, @res, @next
@res.locals.sd.INQUIRY.id.should.equal 'foobar'
@res.render.called.should.be.true()
@res.render.args[0][0].should.equal 'user_outcome'
@res.render.args[0][1].inquiry.id.should.equal 'foobar'
|
[
{
"context": " @fileoverview Tests for jsx-pascal-case\n# @author Jake Marsh\n###\n'use strict'\n\n# -----------------------------",
"end": 67,
"score": 0.9997727274894714,
"start": 57,
"tag": "NAME",
"value": "Jake Marsh"
}
] | src/tests/rules/jsx-pascal-case.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for jsx-pascal-case
# @author Jake Marsh
###
'use strict'
# ------------------------------------------------------------------------------
# Requirements
# ------------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/jsx-pascal-case'
{RuleTester} = require 'eslint'
path = require 'path'
# ------------------------------------------------------------------------------
# Tests
# ------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'jsx-pascal-case', rule,
valid: [
code: '<testComponent />'
,
code: '<test_component />'
,
code: '<TestComponent />'
,
code: '<CSSTransitionGroup />'
,
code: '<BetterThanCSS />'
,
code: '<TestComponent><div /></TestComponent>'
,
code: '<Test1Component />'
,
code: '<TestComponent1 />'
,
code: '<T3stComp0nent />'
,
code: '<T />'
,
code: '<YMCA />'
options: [allowAllCaps: yes]
,
code: '<Modal.Header />'
,
# ,
# code: '<Modal:Header />'
code: '<IGNORED />'
options: [ignore: ['IGNORED']]
]
invalid: [
code: '<Test_component />'
errors: [
message: 'Imported JSX component Test_component must be in PascalCase'
]
,
code: '<TEST_COMPONENT />'
errors: [
message: 'Imported JSX component TEST_COMPONENT must be in PascalCase'
]
,
code: '<YMCA />'
errors: [message: 'Imported JSX component YMCA must be in PascalCase']
]
| 100173 | ###*
# @fileoverview Tests for jsx-pascal-case
# @author <NAME>
###
'use strict'
# ------------------------------------------------------------------------------
# Requirements
# ------------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/jsx-pascal-case'
{RuleTester} = require 'eslint'
path = require 'path'
# ------------------------------------------------------------------------------
# Tests
# ------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'jsx-pascal-case', rule,
valid: [
code: '<testComponent />'
,
code: '<test_component />'
,
code: '<TestComponent />'
,
code: '<CSSTransitionGroup />'
,
code: '<BetterThanCSS />'
,
code: '<TestComponent><div /></TestComponent>'
,
code: '<Test1Component />'
,
code: '<TestComponent1 />'
,
code: '<T3stComp0nent />'
,
code: '<T />'
,
code: '<YMCA />'
options: [allowAllCaps: yes]
,
code: '<Modal.Header />'
,
# ,
# code: '<Modal:Header />'
code: '<IGNORED />'
options: [ignore: ['IGNORED']]
]
invalid: [
code: '<Test_component />'
errors: [
message: 'Imported JSX component Test_component must be in PascalCase'
]
,
code: '<TEST_COMPONENT />'
errors: [
message: 'Imported JSX component TEST_COMPONENT must be in PascalCase'
]
,
code: '<YMCA />'
errors: [message: 'Imported JSX component YMCA must be in PascalCase']
]
| true | ###*
# @fileoverview Tests for jsx-pascal-case
# @author PI:NAME:<NAME>END_PI
###
'use strict'
# ------------------------------------------------------------------------------
# Requirements
# ------------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/jsx-pascal-case'
{RuleTester} = require 'eslint'
path = require 'path'
# ------------------------------------------------------------------------------
# Tests
# ------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'jsx-pascal-case', rule,
valid: [
code: '<testComponent />'
,
code: '<test_component />'
,
code: '<TestComponent />'
,
code: '<CSSTransitionGroup />'
,
code: '<BetterThanCSS />'
,
code: '<TestComponent><div /></TestComponent>'
,
code: '<Test1Component />'
,
code: '<TestComponent1 />'
,
code: '<T3stComp0nent />'
,
code: '<T />'
,
code: '<YMCA />'
options: [allowAllCaps: yes]
,
code: '<Modal.Header />'
,
# ,
# code: '<Modal:Header />'
code: '<IGNORED />'
options: [ignore: ['IGNORED']]
]
invalid: [
code: '<Test_component />'
errors: [
message: 'Imported JSX component Test_component must be in PascalCase'
]
,
code: '<TEST_COMPONENT />'
errors: [
message: 'Imported JSX component TEST_COMPONENT must be in PascalCase'
]
,
code: '<YMCA />'
errors: [message: 'Imported JSX component YMCA must be in PascalCase']
]
|
[
{
"context": "bjects sent and received with XMPP:\n# addresses: \"{profileId}@chat.grindr.com\"\n# password: one-time token (see authentication",
"end": 112,
"score": 0.9977695345878601,
"start": 84,
"tag": "EMAIL",
"value": "\"{profileId}@chat.grindr.com"
},
{
"context": "esses:... | fuckr/services/chat.coffee | grr69/originatr | 0 | #Grindr™ chat messages are JSON objects sent and received with XMPP:
# addresses: "{profileId}@chat.grindr.com"
# password: one-time token (see authentication)
#Grindr™ chat uses HTTP to:
# - get messages sent while you were offline (/undeliveredChatMessages)
# - confirm receiption (/confirmChatMessagesDelivered)
# - notify Grindr™ you blocked someone (managed by profiles controller)
jacasr = require('jacasr')
nwWindow = gui = require('nw.gui').Window.get()
chat = ($http, $localStorage, $rootScope, $q, profiles, authentication, API_URL) ->
s4 = -> Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1)
uuid = -> "#{s4()}#{s4()}-#{s4()}-#{s4()}-#{s4()}-#{s4()}#{s4()}#{s4()}".toUpperCase()
client = {}
$localStorage.conversations = $localStorage.conversations || {}
$localStorage.sentImages = $localStorage.sentImages || []
createConversation = (id) ->
$localStorage.conversations[id] =
id: id
messages: []
profiles.get(id).then (profile) ->
$localStorage.conversations[id].thumbnail = profile.profileImageMediaHash
addMessage = (message) ->
if message.sourceProfileId == $rootScope.profileId
fromMe = true
id = message.targetProfileId
else
fromMe = false
id = message.sourceProfileId
return if profiles.isBlocked(id)
if message.type == 'block'
delete $localStorage.conversations[id]
if fromMe then profiles.block(id) else profiles.blockedBy(id)
else
createConversation(id) unless $localStorage.conversations[id]
timestamp = message.timestamp
$localStorage.conversations[id].lastTimeActive = timestamp
message = switch message.type
when 'text' then {text: message.body}
when 'map' then {location: angular.fromJson(message.body)}
when 'image' then {image: angular.fromJson(message.body).imageHash}
else {text: message.type + ' ' + message.body}
message.fromMe = fromMe
message.timestamp = timestamp
$localStorage.conversations[id].messages.push(message)
unless fromMe
$localStorage.conversations[id].unread = true
document.getElementById('notification').play()
$rootScope.$broadcast('new_message')
acknowledgeMessages = (messageIds) ->
$http.put(API_URL + 'me/chat/messages?confirmed=true', {messageIds: messageIds})
lastConnection = null
$rootScope.$on 'authenticated', (event, token) ->
lastConnection ||= Date.now()
loggingOut = false
client = new jacasr.Client
login: $rootScope.profileId
password: token
domain: 'chat.grindr.com'
client.on 'ready', ->
chat.connected = true
$http.get(API_URL + 'me/chat/messages?undelivered=true').then (response) ->
messageIds = []
_(response.data.messages).sortBy((message) -> message.timestamp).forEach (message) ->
addMessage(message)
messageIds.push(message.messageId)
if messageIds.length > 0
acknowledgeMessages(messageIds)
client.on 'message', (_, json) ->
message = angular.fromJson(json)
addMessage(message)
client.on 'close', ->
now = Date.now()
return if loggingOut
if (now - lastConnection) < 60000
$rootScope.chatError = true
alert("XMPP chat error. If you're using public wifi, XMPP protocol is probably blocked.")
else
lastConnection = now
client.disconnect()
authentication.login()
$rootScope.$on 'logout', ->
loggingOut = true
client.disconnect()
window.onbeforeunload = ->
client.disconnect()
nwWindow.on 'close', ->
client.disconnect()
this.close(true)
sendMessage = (type, body, to, save=true) ->
message =
targetProfileId: String(to)
type: type
messageId: uuid()
timestamp: Date.now()
sourceDisplayName: ''
sourceProfileId: String($rootScope.profileId)
body: body
client.write """<message from='#{$rootScope.profileId}@chat.grindr.com/jacasr' to='#{to}@chat.grindr.com' xml:lang='' type='chat' id='#{message.messageId}'><body>#{_.escape angular.toJson(message)}</body><markable xmlns='urn:xmpp:chat-markers:0'/></message>"""
#TODO: send read message confirmation
addMessage(message) if save
return {
sendText: (text, to, save=true) ->
sendMessage('text', text, to, save)
getConversation: (id) ->
$localStorage.conversations[id]
lastestConversations: ->
_.sortBy $localStorage.conversations, (conversation) -> - conversation.lastTimeActive
sentImages: $localStorage.sentImages
sendImage: (imageHash, to) ->
messageBody = angular.toJson({imageHash: imageHash})
sendMessage('image', messageBody, to)
sendLocation: (to) ->
messageBody = angular.toJson
lat: $localStorage.location.lat
lon: $localStorage.location.lon
sendMessage('map', messageBody, to)
block: (id) ->
sendMessage('block', null, id)
delete: (id) ->
delete $localStorage.conversations[id]
}
fuckr.factory('chat', ['$http', '$localStorage', '$rootScope', '$q', 'profiles', 'authentication', 'API_URL', chat])
| 5081 | #Grindr™ chat messages are JSON objects sent and received with XMPP:
# addresses: <EMAIL>"
# password: <PASSWORD> (see authentication)
#Grindr™ chat uses HTTP to:
# - get messages sent while you were offline (/undeliveredChatMessages)
# - confirm receiption (/confirmChatMessagesDelivered)
# - notify Grindr™ you blocked someone (managed by profiles controller)
jacasr = require('jacasr')
nwWindow = gui = require('nw.gui').Window.get()
chat = ($http, $localStorage, $rootScope, $q, profiles, authentication, API_URL) ->
s4 = -> Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1)
uuid = -> "#{s4()}#{s4()}-#{s4()}-#{s4()}-#{s4()}-#{s4()}#{s4()}#{s4()}".toUpperCase()
client = {}
$localStorage.conversations = $localStorage.conversations || {}
$localStorage.sentImages = $localStorage.sentImages || []
createConversation = (id) ->
$localStorage.conversations[id] =
id: id
messages: []
profiles.get(id).then (profile) ->
$localStorage.conversations[id].thumbnail = profile.profileImageMediaHash
addMessage = (message) ->
if message.sourceProfileId == $rootScope.profileId
fromMe = true
id = message.targetProfileId
else
fromMe = false
id = message.sourceProfileId
return if profiles.isBlocked(id)
if message.type == 'block'
delete $localStorage.conversations[id]
if fromMe then profiles.block(id) else profiles.blockedBy(id)
else
createConversation(id) unless $localStorage.conversations[id]
timestamp = message.timestamp
$localStorage.conversations[id].lastTimeActive = timestamp
message = switch message.type
when 'text' then {text: message.body}
when 'map' then {location: angular.fromJson(message.body)}
when 'image' then {image: angular.fromJson(message.body).imageHash}
else {text: message.type + ' ' + message.body}
message.fromMe = fromMe
message.timestamp = timestamp
$localStorage.conversations[id].messages.push(message)
unless fromMe
$localStorage.conversations[id].unread = true
document.getElementById('notification').play()
$rootScope.$broadcast('new_message')
acknowledgeMessages = (messageIds) ->
$http.put(API_URL + 'me/chat/messages?confirmed=true', {messageIds: messageIds})
lastConnection = null
$rootScope.$on 'authenticated', (event, token) ->
lastConnection ||= Date.now()
loggingOut = false
client = new jacasr.Client
login: $rootScope.profileId
password: <PASSWORD>
domain: 'chat.grindr.com'
client.on 'ready', ->
chat.connected = true
$http.get(API_URL + 'me/chat/messages?undelivered=true').then (response) ->
messageIds = []
_(response.data.messages).sortBy((message) -> message.timestamp).forEach (message) ->
addMessage(message)
messageIds.push(message.messageId)
if messageIds.length > 0
acknowledgeMessages(messageIds)
client.on 'message', (_, json) ->
message = angular.fromJson(json)
addMessage(message)
client.on 'close', ->
now = Date.now()
return if loggingOut
if (now - lastConnection) < 60000
$rootScope.chatError = true
alert("XMPP chat error. If you're using public wifi, XMPP protocol is probably blocked.")
else
lastConnection = now
client.disconnect()
authentication.login()
$rootScope.$on 'logout', ->
loggingOut = true
client.disconnect()
window.onbeforeunload = ->
client.disconnect()
nwWindow.on 'close', ->
client.disconnect()
this.close(true)
sendMessage = (type, body, to, save=true) ->
message =
targetProfileId: String(to)
type: type
messageId: uuid()
timestamp: Date.now()
sourceDisplayName: ''
sourceProfileId: String($rootScope.profileId)
body: body
client.write """<message from='#{<EMAIL>/jacasr' to='<EMAIL>' xml:lang='' type='chat' id='#{message.messageId}'><body>#{_.escape angular.toJson(message)}</body><markable xmlns='urn:xmpp:chat-markers:0'/></message>"""
#TODO: send read message confirmation
addMessage(message) if save
return {
sendText: (text, to, save=true) ->
sendMessage('text', text, to, save)
getConversation: (id) ->
$localStorage.conversations[id]
lastestConversations: ->
_.sortBy $localStorage.conversations, (conversation) -> - conversation.lastTimeActive
sentImages: $localStorage.sentImages
sendImage: (imageHash, to) ->
messageBody = angular.toJson({imageHash: imageHash})
sendMessage('image', messageBody, to)
sendLocation: (to) ->
messageBody = angular.toJson
lat: $localStorage.location.lat
lon: $localStorage.location.lon
sendMessage('map', messageBody, to)
block: (id) ->
sendMessage('block', null, id)
delete: (id) ->
delete $localStorage.conversations[id]
}
fuckr.factory('chat', ['$http', '$localStorage', '$rootScope', '$q', 'profiles', 'authentication', 'API_URL', chat])
| true | #Grindr™ chat messages are JSON objects sent and received with XMPP:
# addresses: PI:EMAIL:<EMAIL>END_PI"
# password: PI:PASSWORD:<PASSWORD>END_PI (see authentication)
#Grindr™ chat uses HTTP to:
# - get messages sent while you were offline (/undeliveredChatMessages)
# - confirm receiption (/confirmChatMessagesDelivered)
# - notify Grindr™ you blocked someone (managed by profiles controller)
jacasr = require('jacasr')
nwWindow = gui = require('nw.gui').Window.get()
chat = ($http, $localStorage, $rootScope, $q, profiles, authentication, API_URL) ->
s4 = -> Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1)
uuid = -> "#{s4()}#{s4()}-#{s4()}-#{s4()}-#{s4()}-#{s4()}#{s4()}#{s4()}".toUpperCase()
client = {}
$localStorage.conversations = $localStorage.conversations || {}
$localStorage.sentImages = $localStorage.sentImages || []
createConversation = (id) ->
$localStorage.conversations[id] =
id: id
messages: []
profiles.get(id).then (profile) ->
$localStorage.conversations[id].thumbnail = profile.profileImageMediaHash
addMessage = (message) ->
if message.sourceProfileId == $rootScope.profileId
fromMe = true
id = message.targetProfileId
else
fromMe = false
id = message.sourceProfileId
return if profiles.isBlocked(id)
if message.type == 'block'
delete $localStorage.conversations[id]
if fromMe then profiles.block(id) else profiles.blockedBy(id)
else
createConversation(id) unless $localStorage.conversations[id]
timestamp = message.timestamp
$localStorage.conversations[id].lastTimeActive = timestamp
message = switch message.type
when 'text' then {text: message.body}
when 'map' then {location: angular.fromJson(message.body)}
when 'image' then {image: angular.fromJson(message.body).imageHash}
else {text: message.type + ' ' + message.body}
message.fromMe = fromMe
message.timestamp = timestamp
$localStorage.conversations[id].messages.push(message)
unless fromMe
$localStorage.conversations[id].unread = true
document.getElementById('notification').play()
$rootScope.$broadcast('new_message')
acknowledgeMessages = (messageIds) ->
$http.put(API_URL + 'me/chat/messages?confirmed=true', {messageIds: messageIds})
lastConnection = null
$rootScope.$on 'authenticated', (event, token) ->
lastConnection ||= Date.now()
loggingOut = false
client = new jacasr.Client
login: $rootScope.profileId
password: PI:PASSWORD:<PASSWORD>END_PI
domain: 'chat.grindr.com'
client.on 'ready', ->
chat.connected = true
$http.get(API_URL + 'me/chat/messages?undelivered=true').then (response) ->
messageIds = []
_(response.data.messages).sortBy((message) -> message.timestamp).forEach (message) ->
addMessage(message)
messageIds.push(message.messageId)
if messageIds.length > 0
acknowledgeMessages(messageIds)
client.on 'message', (_, json) ->
message = angular.fromJson(json)
addMessage(message)
client.on 'close', ->
now = Date.now()
return if loggingOut
if (now - lastConnection) < 60000
$rootScope.chatError = true
alert("XMPP chat error. If you're using public wifi, XMPP protocol is probably blocked.")
else
lastConnection = now
client.disconnect()
authentication.login()
$rootScope.$on 'logout', ->
loggingOut = true
client.disconnect()
window.onbeforeunload = ->
client.disconnect()
nwWindow.on 'close', ->
client.disconnect()
this.close(true)
sendMessage = (type, body, to, save=true) ->
message =
targetProfileId: String(to)
type: type
messageId: uuid()
timestamp: Date.now()
sourceDisplayName: ''
sourceProfileId: String($rootScope.profileId)
body: body
client.write """<message from='#{PI:EMAIL:<EMAIL>END_PI/jacasr' to='PI:EMAIL:<EMAIL>END_PI' xml:lang='' type='chat' id='#{message.messageId}'><body>#{_.escape angular.toJson(message)}</body><markable xmlns='urn:xmpp:chat-markers:0'/></message>"""
#TODO: send read message confirmation
addMessage(message) if save
return {
sendText: (text, to, save=true) ->
sendMessage('text', text, to, save)
getConversation: (id) ->
$localStorage.conversations[id]
lastestConversations: ->
_.sortBy $localStorage.conversations, (conversation) -> - conversation.lastTimeActive
sentImages: $localStorage.sentImages
sendImage: (imageHash, to) ->
messageBody = angular.toJson({imageHash: imageHash})
sendMessage('image', messageBody, to)
sendLocation: (to) ->
messageBody = angular.toJson
lat: $localStorage.location.lat
lon: $localStorage.location.lon
sendMessage('map', messageBody, to)
block: (id) ->
sendMessage('block', null, id)
delete: (id) ->
delete $localStorage.conversations[id]
}
fuckr.factory('chat', ['$http', '$localStorage', '$rootScope', '$q', 'profiles', 'authentication', 'API_URL', chat])
|
[
{
"context": "### Copyright (c) 2015 Magnus Leo. All rights reserved. ###\n\nmodule.exports = envir",
"end": 33,
"score": 0.9998666644096375,
"start": 23,
"tag": "NAME",
"value": "Magnus Leo"
}
] | src/modules/environment.coffee | magnusleo/Leo-Engine | 1 | ### Copyright (c) 2015 Magnus Leo. All rights reserved. ###
module.exports = environment =
gravity: 60 # Tiles per second^2
| 221569 | ### Copyright (c) 2015 <NAME>. All rights reserved. ###
module.exports = environment =
gravity: 60 # Tiles per second^2
| true | ### Copyright (c) 2015 PI:NAME:<NAME>END_PI. All rights reserved. ###
module.exports = environment =
gravity: 60 # Tiles per second^2
|
[
{
"context": "esents the costs to move between hexes\n#\n# @author Cédric ZUGER\n#\n\nclass @MovementGraph\n\n # Build the movement g",
"end": 80,
"score": 0.9998307228088379,
"start": 68,
"tag": "NAME",
"value": "Cédric ZUGER"
}
] | app/assets/javascripts/local_libs/movement_graph.coffee | czuger/coffhex-dev | 1 | # This class represents the costs to move between hexes
#
# @author Cédric ZUGER
#
class @MovementGraph
# Build the movement graph
constructor: ( loaded_data ) ->
@movement_graph = loaded_data.json_movement_graph
# Build the key for the movement graph
# from and to are AxialHexes
can_move: ( from, to ) ->
@cost( from, to ) <= 2
# Build the key for the movement graph
# from and to are AxialHexes
cost: ( from, to ) ->
@movement_graph[ @movement_key( from, to ) ]
# Build the key for the movement graph
# from and to are AxialHexes
movement_key: ( from, to ) ->
[ from.hex_key(), to.hex_key() ].join( '_' ) | 220800 | # This class represents the costs to move between hexes
#
# @author <NAME>
#
class @MovementGraph
# Build the movement graph
constructor: ( loaded_data ) ->
@movement_graph = loaded_data.json_movement_graph
# Build the key for the movement graph
# from and to are AxialHexes
can_move: ( from, to ) ->
@cost( from, to ) <= 2
# Build the key for the movement graph
# from and to are AxialHexes
cost: ( from, to ) ->
@movement_graph[ @movement_key( from, to ) ]
# Build the key for the movement graph
# from and to are AxialHexes
movement_key: ( from, to ) ->
[ from.hex_key(), to.hex_key() ].join( '_' ) | true | # This class represents the costs to move between hexes
#
# @author PI:NAME:<NAME>END_PI
#
class @MovementGraph
# Build the movement graph
constructor: ( loaded_data ) ->
@movement_graph = loaded_data.json_movement_graph
# Build the key for the movement graph
# from and to are AxialHexes
can_move: ( from, to ) ->
@cost( from, to ) <= 2
# Build the key for the movement graph
# from and to are AxialHexes
cost: ( from, to ) ->
@movement_graph[ @movement_key( from, to ) ]
# Build the key for the movement graph
# from and to are AxialHexes
movement_key: ( from, to ) ->
[ from.hex_key(), to.hex_key() ].join( '_' ) |
[
{
"context": "12\n expect(response.data.first_name).toBe 'maeby'\n done()\n\n\n it 'should call error callbac",
"end": 756,
"score": 0.9991765022277832,
"start": 751,
"tag": "NAME",
"value": "maeby"
},
{
"context": "l : 'http://reqr.es/api/users'\n data: name: 'mo... | src/tests/ajax/test_ajax.coffee | dashersw/spark | 1 | goog = goog or goog = require: ->
goog.require 'spark.ajax'
describe 'spark.ajax', ->
it 'should return default options', ->
return yes unless goog.DEBUG
defaults = spark.ajax.getOptions_()
expect(defaults.type).toBe 'GET'
expect(defaults.url).toBe ''
expect(defaults.data).toBeNull()
expect(defaults.dataType).toBeNull()
expect(typeof defaults.success).toBe 'function'
expect(typeof defaults.error).toBe 'function'
defaults.success()
defaults.error()
it 'should make request', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/12'
dataType: 'json'
success : (response) ->
expect(response.data.id).toBe 12
expect(response.data.first_name).toBe 'maeby'
done()
it 'should call error callback when request failed', (done) ->
succeed = no
spark.ajax.request
url : 'http://reqr.es/api/users/23'
success : -> succeed = yes
error : ->
expect(succeed).toBeFalsy()
done()
it 'should not try JSON.parse for html outputs', (done) ->
spark.ajax.request
url : '/'
success : (data) ->
expect(typeof data is 'string').toBeTruthy()
done()
it 'should do request with POST method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
type : 'POST'
success : (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
done()
it 'should do request with DELETE method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/2'
type : 'DELETE'
success : (data) -> done()
it 'should do POST request with object data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: name: 'morpheus', job : 'leader'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
expect(data.name).toBe 'morpheus'
expect(data.job).toBe 'leader'
done()
it 'should do POST request with a string data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: 'hello'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.hello).toBeDefined()
done()
it 'should add data url as query string if method is GET', (done) ->
spark.ajax.request
type : 'GET'
url : 'http://reqr.es/api/users'
data : a: 1, b: 2, c: 3
success : (data, e) ->
expect(data).toBeDefined()
expect(e.target.getLastUri()).toBe 'http://reqr.es/api/users?a=1&b=2&c=3' if goog.DEBUG
done()
| 24185 | goog = goog or goog = require: ->
goog.require 'spark.ajax'
describe 'spark.ajax', ->
it 'should return default options', ->
return yes unless goog.DEBUG
defaults = spark.ajax.getOptions_()
expect(defaults.type).toBe 'GET'
expect(defaults.url).toBe ''
expect(defaults.data).toBeNull()
expect(defaults.dataType).toBeNull()
expect(typeof defaults.success).toBe 'function'
expect(typeof defaults.error).toBe 'function'
defaults.success()
defaults.error()
it 'should make request', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/12'
dataType: 'json'
success : (response) ->
expect(response.data.id).toBe 12
expect(response.data.first_name).toBe '<NAME>'
done()
it 'should call error callback when request failed', (done) ->
succeed = no
spark.ajax.request
url : 'http://reqr.es/api/users/23'
success : -> succeed = yes
error : ->
expect(succeed).toBeFalsy()
done()
it 'should not try JSON.parse for html outputs', (done) ->
spark.ajax.request
url : '/'
success : (data) ->
expect(typeof data is 'string').toBeTruthy()
done()
it 'should do request with POST method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
type : 'POST'
success : (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
done()
it 'should do request with DELETE method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/2'
type : 'DELETE'
success : (data) -> done()
it 'should do POST request with object data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: name: '<NAME>', job : 'leader'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
expect(data.name).toBe '<NAME>'
expect(data.job).toBe 'leader'
done()
it 'should do POST request with a string data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: 'hello'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.hello).toBeDefined()
done()
it 'should add data url as query string if method is GET', (done) ->
spark.ajax.request
type : 'GET'
url : 'http://reqr.es/api/users'
data : a: 1, b: 2, c: 3
success : (data, e) ->
expect(data).toBeDefined()
expect(e.target.getLastUri()).toBe 'http://reqr.es/api/users?a=1&b=2&c=3' if goog.DEBUG
done()
| true | goog = goog or goog = require: ->
goog.require 'spark.ajax'
describe 'spark.ajax', ->
it 'should return default options', ->
return yes unless goog.DEBUG
defaults = spark.ajax.getOptions_()
expect(defaults.type).toBe 'GET'
expect(defaults.url).toBe ''
expect(defaults.data).toBeNull()
expect(defaults.dataType).toBeNull()
expect(typeof defaults.success).toBe 'function'
expect(typeof defaults.error).toBe 'function'
defaults.success()
defaults.error()
it 'should make request', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/12'
dataType: 'json'
success : (response) ->
expect(response.data.id).toBe 12
expect(response.data.first_name).toBe 'PI:NAME:<NAME>END_PI'
done()
it 'should call error callback when request failed', (done) ->
succeed = no
spark.ajax.request
url : 'http://reqr.es/api/users/23'
success : -> succeed = yes
error : ->
expect(succeed).toBeFalsy()
done()
it 'should not try JSON.parse for html outputs', (done) ->
spark.ajax.request
url : '/'
success : (data) ->
expect(typeof data is 'string').toBeTruthy()
done()
it 'should do request with POST method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
type : 'POST'
success : (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
done()
it 'should do request with DELETE method', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users/2'
type : 'DELETE'
success : (data) -> done()
it 'should do POST request with object data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: name: 'PI:NAME:<NAME>END_PI', job : 'leader'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.createdAt).toBeDefined()
expect(data.name).toBe 'PI:NAME:<NAME>END_PI'
expect(data.job).toBe 'leader'
done()
it 'should do POST request with a string data', (done) ->
spark.ajax.request
url : 'http://reqr.es/api/users'
data: 'hello'
type: 'POST'
success: (data) ->
expect(data.id).toBeDefined()
expect(data.hello).toBeDefined()
done()
it 'should add data url as query string if method is GET', (done) ->
spark.ajax.request
type : 'GET'
url : 'http://reqr.es/api/users'
data : a: 1, b: 2, c: 3
success : (data, e) ->
expect(data).toBeDefined()
expect(e.target.getLastUri()).toBe 'http://reqr.es/api/users?a=1&b=2&c=3' if goog.DEBUG
done()
|
[
{
"context": "nfig\n\n admin.setFieldValue\n 'email': \"admin@publish.org\"\n 'username': \"admin\"\n 'password': ",
"end": 855,
"score": 0.9999243021011353,
"start": 838,
"tag": "EMAIL",
"value": "admin@publish.org"
},
{
"context": "'email': \"admin@publish... | components/backend/modules/user/server.coffee | dni/sondling | 0 | Setting = require('./../../lib/model/Schema')("settings")
express = require 'express'
auth = require "../../utilities/auth"
utils = require "../../utilities/utils"
passport = require 'passport'
module.exports.setup = (app, config)->
User = require('../../lib/model/Schema')(config.dbTable)
# login
app.get '/login', (req, res)->
res.sendfile process.cwd()+'/components/backend/login.html'
app.post '/login', passport.authenticate('local', failureRedirect: '/login'), (req, res)->
res.redirect '/'
app.get '/logout', (req, res)->
req.logout()
res.redirect '/login'
app.get "/user", (req, res)->
res.send app.user
# create default admin user if no user exists
User.count {}, (err, count)->
if count == 0
admin = utils.createModel User, config
admin.setFieldValue
'email': "admin@publish.org"
'username': "admin"
'password': "password"
'title': "administrator"
admin.save()
console.log "admin user was created"
| 128663 | Setting = require('./../../lib/model/Schema')("settings")
express = require 'express'
auth = require "../../utilities/auth"
utils = require "../../utilities/utils"
passport = require 'passport'
module.exports.setup = (app, config)->
User = require('../../lib/model/Schema')(config.dbTable)
# login
app.get '/login', (req, res)->
res.sendfile process.cwd()+'/components/backend/login.html'
app.post '/login', passport.authenticate('local', failureRedirect: '/login'), (req, res)->
res.redirect '/'
app.get '/logout', (req, res)->
req.logout()
res.redirect '/login'
app.get "/user", (req, res)->
res.send app.user
# create default admin user if no user exists
User.count {}, (err, count)->
if count == 0
admin = utils.createModel User, config
admin.setFieldValue
'email': "<EMAIL>"
'username': "admin"
'password': "<PASSWORD>"
'title': "administrator"
admin.save()
console.log "admin user was created"
| true | Setting = require('./../../lib/model/Schema')("settings")
express = require 'express'
auth = require "../../utilities/auth"
utils = require "../../utilities/utils"
passport = require 'passport'
module.exports.setup = (app, config)->
User = require('../../lib/model/Schema')(config.dbTable)
# login
app.get '/login', (req, res)->
res.sendfile process.cwd()+'/components/backend/login.html'
app.post '/login', passport.authenticate('local', failureRedirect: '/login'), (req, res)->
res.redirect '/'
app.get '/logout', (req, res)->
req.logout()
res.redirect '/login'
app.get "/user", (req, res)->
res.send app.user
# create default admin user if no user exists
User.count {}, (err, count)->
if count == 0
admin = utils.createModel User, config
admin.setFieldValue
'email': "PI:EMAIL:<EMAIL>END_PI"
'username': "admin"
'password': "PI:PASSWORD:<PASSWORD>END_PI"
'title': "administrator"
admin.save()
console.log "admin user was created"
|
[
{
"context": "nLogins', ->\n describe 'when there is an author bob(012345), and replier jack(054321)', ->\n befo",
"end": 88,
"score": 0.8135949969291687,
"start": 85,
"tag": "NAME",
"value": "bob"
},
{
"context": " 'when there is an author bob(012345), and replier jack(05432... | spec/javascripts/app_spec.coffee | genewoo/ruby-china | 1 | describe 'App', ->
describe 'scanLogins', ->
describe 'when there is an author bob(012345), and replier jack(054321)', ->
beforeEach ->
@htmlContainer.append """
<div id="topic-show">
<div class="leader">
<a data-author="true" data-name="012345">bob</a>
</div>
</div>
<div id="replies">
<span class="name"><a data-name="054321">jack</a></span>
</div>
"""
@logins = App.scanLogins(@htmlContainer.find('a'))
@logins = ({login: k, name: v} for k, v of @logins)
it 'has 2 logins', ->
expect(@logins.length).toBe 2
it 'has the author with name 012345', ->
expect(@logins[0].name).toEqual '012345'
it 'has the author with login bob', ->
expect(@logins[0].login).toEqual 'bob'
it 'has the replier with name 054321', ->
expect(@logins[1].name).toEqual '054321'
it 'has the author with login jack', ->
expect(@logins[1].login).toEqual 'jack'
| 217675 | describe 'App', ->
describe 'scanLogins', ->
describe 'when there is an author <NAME>(012345), and replier <NAME>(054321)', ->
beforeEach ->
@htmlContainer.append """
<div id="topic-show">
<div class="leader">
<a data-author="true" data-name="012345">bob</a>
</div>
</div>
<div id="replies">
<span class="name"><a data-name="054321">jack</a></span>
</div>
"""
@logins = App.scanLogins(@htmlContainer.find('a'))
@logins = ({login: k, name: v} for k, v of @logins)
it 'has 2 logins', ->
expect(@logins.length).toBe 2
it 'has the author with name 012345', ->
expect(@logins[0].name).toEqual '012345'
it 'has the author with login bob', ->
expect(@logins[0].login).toEqual 'bob'
it 'has the replier with name 054321', ->
expect(@logins[1].name).toEqual '054321'
it 'has the author with login jack', ->
expect(@logins[1].login).toEqual 'jack'
| true | describe 'App', ->
describe 'scanLogins', ->
describe 'when there is an author PI:NAME:<NAME>END_PI(012345), and replier PI:NAME:<NAME>END_PI(054321)', ->
beforeEach ->
@htmlContainer.append """
<div id="topic-show">
<div class="leader">
<a data-author="true" data-name="012345">bob</a>
</div>
</div>
<div id="replies">
<span class="name"><a data-name="054321">jack</a></span>
</div>
"""
@logins = App.scanLogins(@htmlContainer.find('a'))
@logins = ({login: k, name: v} for k, v of @logins)
it 'has 2 logins', ->
expect(@logins.length).toBe 2
it 'has the author with name 012345', ->
expect(@logins[0].name).toEqual '012345'
it 'has the author with login bob', ->
expect(@logins[0].login).toEqual 'bob'
it 'has the replier with name 054321', ->
expect(@logins[1].name).toEqual '054321'
it 'has the author with login jack', ->
expect(@logins[1].login).toEqual 'jack'
|
[
{
"context": "rocess.coffee\n# processes crystal spec\n#\n# @author Chris Tate <chris@autocode.run>\n# @copyright 2015 Autocode\n#",
"end": 66,
"score": 0.9998694062232971,
"start": 56,
"tag": "NAME",
"value": "Chris Tate"
},
{
"context": "\n# processes crystal spec\n#\n# @author Chr... | src/autocode/process.coffee | crystal/autocode-js | 92 | #
# process.coffee
# processes crystal spec
#
# @author Chris Tate <chris@autocode.run>
# @copyright 2015 Autocode
# @license MIT
#
# load deps
crystal = {
format: require './format'
}
debug = require('debug')('build')
fs = require 'fs'
merge = require 'merge'
pluralize = require 'pluralize'
# valid detail types
detail_types = [
'bool'
'child'
'created'
'date'
'deleted'
'decimal'
'email'
'id'
'model'
'number'
'parent'
'password'
'select'
'string'
'text'
'time'
'updated'
]
access_exists = (role, arr) ->
for obj in arr
if obj.role.id == role
return true
false
process = (config, spec) ->
# get config
config = config or this.config
if !config then throw new Error '"config" required for process(config, spec)'
# get spec
spec = spec or this.spec
if !spec then throw new Error '"spec" required for process(config, spec)'
# create gen object
gen = {}
# process models
spec = merge.recursive spec, config.spec or {}
if spec.models
gen.model = {}
gen.models = []
for model_name of spec.models
model = processModel model_name, spec
gen.model[model_name] = model
gen.models.push model
# display processed spec in console
#console.log JSON.stringify gen, null, "\t"
gen
processModel = (model_name, spec) ->
# get model
model = spec.models[model_name]
# pluralize model name
if !model.plural
model.plural = pluralize model_name
model_gen = {
access: {}
has: {}
id: model_name
name: crystal.format model_name, model.plural
}
model_gen.model = model_gen
if model.details
model_gen.detail = {}
model_gen.details = []
for detail_name of model.details
detail = model.details[detail_name]
if !detail.plural
detail.plural = pluralize detail_name
detail_gen = {
access: {}
default: detail.default
id: detail_name
name: crystal.format detail_name, detail.plural
}
detail_gen.detail = detail_gen
detail_gen.model = model_gen
if model.access
for role of model.access
if Object.prototype.toString.call model.access[role].permissions == '[object Object]'
role_data = {
role: {
access: {
create: false
read: false
update: false
delete: false
}
name: crystal.format role
}
}
for access of model.access[role].permissions
if model.access[role].permissions[access] == '*' or model.access[role].permissions[access].indexOf(detail_name) != -1
if !detail_gen.access[access]
detail_gen.access[access] = {
roles: []
}
if !model_gen.access[access]
model_gen.access[access] = {
roles: []
}
if !access_exists role, detail_gen.access[access].roles
detail_gen.access[access].roles.push {
role: {
id: role
name: crystal.format(role)
}
}
if !access_exists role, model_gen.access[access].roles
model_gen.access[access].roles.push {
role: {
id: role,
name: crystal.format(role)
}
}
else if model.access[role].permissions == '*' or model.access[role].permissions.indexOf(detail_name) != -1
if !detail_gen.access.create
detail_gen.access.create = {
roles: []
}
detail_gen.access.create.detail = detail_gen
detail_gen.access.create.model = model_gen
if !detail_gen.access.read
detail_gen.access.read = {
roles: []
}
detail_gen.access.read.detail = detail_gen
detail_gen.access.read.model = model_gen
if !detail_gen.access.update
detail_gen.access.update = {
roles: []
}
detail_gen.access.update.detail = detail_gen
detail_gen.access.update.model = model_gen
if !detail_gen.access.delete
detail_gen.access.delete = {
roles: []
}
detail_gen.access.delete.detail = detail_gen
detail_gen.access.delete.model = model_gen
if !access_exists role, detail_gen.access.create.roles
detail_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.read.roles
detail_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.update.roles
detail_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.delete.roles
detail_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !model_gen.access.create
model_gen.access.create = {
roles: []
}
if !model_gen.access.read
model_gen.access.read = {
roles: []
}
if !model_gen.access.update
model_gen.access.update = {
roles: []
}
if !model_gen.access.delete
model_gen.access.delete = {
roles: []
}
if !access_exists role, model_gen.access.create.roles
model_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.read.roles
model_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.update.roles
model_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.delete.roles
model_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !detail_gen.access.create and !detail_gen.access.read and !detail_gen.access.update and !detail_gen.access.delete
delete detail_gen.access
if detail.detail
detail_gen.association = {}
detail_gen.association.detail = {
name: crystal.format detail.detail
}
if detail.model
if !detail_gen.association
detail_gen.association = {}
detail_gen.association.model = {
name: crystal.format detail.model
}
if detail.multiple != undefined
if detail.multiple == true
detail_gen.multiple = true
else if detail.multiple == false
detail_gen.multiple = false
else
debug 'Invalid value for "multiple": %s', detail.multiple
if detail.options
detail_options = []
for i of detail.options
detail_options.push {
option: detail.options[i]
}
detail_gen.options = detail_options
if detail.required != undefined
if detail.required == true
detail_gen.required = true
else if detail.required == false
detail_gen.required = false
else
debug 'Invalid value for "required": %s', detail.required
if detail.type
if detail_types.indexOf(detail.type) == -1
console.log "Unknown type (#{detail.type}) for detail (#{detail_name})"
else
detail_gen.is = {}
detail_gen.is[detail.type] = true
detail_gen.type = detail.type
model_gen.has[detail.type] = true
if detail.unique != undefined
if detail.unique == true
detail_gen.unique = {
is: { bool: true }
value: true
}
else if detail.unique == false
detail_gen.unique = {
is: { bool: true }
value: false
}
else
detail_gen.unique = {
is: { model: true }
model: {
name: crystal.format detail.unique
},
value: detail.unique
}
model_gen.detail[detail_name] = detail_gen
model_gen.details.push detail_gen
if !model_gen.access.create and !model_gen.access.read and !model_gen.access.update and !model_gen.access.delete
delete model_gen.access
model_gen
module.exports = process
| 35214 | #
# process.coffee
# processes crystal spec
#
# @author <NAME> <<EMAIL>>
# @copyright 2015 Autocode
# @license MIT
#
# load deps
crystal = {
format: require './format'
}
debug = require('debug')('build')
fs = require 'fs'
merge = require 'merge'
pluralize = require 'pluralize'
# valid detail types
detail_types = [
'bool'
'child'
'created'
'date'
'deleted'
'decimal'
'email'
'id'
'model'
'number'
'parent'
'password'
'select'
'string'
'text'
'time'
'updated'
]
access_exists = (role, arr) ->
for obj in arr
if obj.role.id == role
return true
false
process = (config, spec) ->
# get config
config = config or this.config
if !config then throw new Error '"config" required for process(config, spec)'
# get spec
spec = spec or this.spec
if !spec then throw new Error '"spec" required for process(config, spec)'
# create gen object
gen = {}
# process models
spec = merge.recursive spec, config.spec or {}
if spec.models
gen.model = {}
gen.models = []
for model_name of spec.models
model = processModel model_name, spec
gen.model[model_name] = model
gen.models.push model
# display processed spec in console
#console.log JSON.stringify gen, null, "\t"
gen
processModel = (model_name, spec) ->
# get model
model = spec.models[model_name]
# pluralize model name
if !model.plural
model.plural = pluralize model_name
model_gen = {
access: {}
has: {}
id: model_name
name: crystal.format model_name, model.plural
}
model_gen.model = model_gen
if model.details
model_gen.detail = {}
model_gen.details = []
for detail_name of model.details
detail = model.details[detail_name]
if !detail.plural
detail.plural = pluralize detail_name
detail_gen = {
access: {}
default: detail.default
id: detail_name
name: crystal.format detail_name, detail.plural
}
detail_gen.detail = detail_gen
detail_gen.model = model_gen
if model.access
for role of model.access
if Object.prototype.toString.call model.access[role].permissions == '[object Object]'
role_data = {
role: {
access: {
create: false
read: false
update: false
delete: false
}
name: crystal.format role
}
}
for access of model.access[role].permissions
if model.access[role].permissions[access] == '*' or model.access[role].permissions[access].indexOf(detail_name) != -1
if !detail_gen.access[access]
detail_gen.access[access] = {
roles: []
}
if !model_gen.access[access]
model_gen.access[access] = {
roles: []
}
if !access_exists role, detail_gen.access[access].roles
detail_gen.access[access].roles.push {
role: {
id: role
name: crystal.format(role)
}
}
if !access_exists role, model_gen.access[access].roles
model_gen.access[access].roles.push {
role: {
id: role,
name: crystal.format(role)
}
}
else if model.access[role].permissions == '*' or model.access[role].permissions.indexOf(detail_name) != -1
if !detail_gen.access.create
detail_gen.access.create = {
roles: []
}
detail_gen.access.create.detail = detail_gen
detail_gen.access.create.model = model_gen
if !detail_gen.access.read
detail_gen.access.read = {
roles: []
}
detail_gen.access.read.detail = detail_gen
detail_gen.access.read.model = model_gen
if !detail_gen.access.update
detail_gen.access.update = {
roles: []
}
detail_gen.access.update.detail = detail_gen
detail_gen.access.update.model = model_gen
if !detail_gen.access.delete
detail_gen.access.delete = {
roles: []
}
detail_gen.access.delete.detail = detail_gen
detail_gen.access.delete.model = model_gen
if !access_exists role, detail_gen.access.create.roles
detail_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.read.roles
detail_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.update.roles
detail_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.delete.roles
detail_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !model_gen.access.create
model_gen.access.create = {
roles: []
}
if !model_gen.access.read
model_gen.access.read = {
roles: []
}
if !model_gen.access.update
model_gen.access.update = {
roles: []
}
if !model_gen.access.delete
model_gen.access.delete = {
roles: []
}
if !access_exists role, model_gen.access.create.roles
model_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.read.roles
model_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.update.roles
model_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.delete.roles
model_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !detail_gen.access.create and !detail_gen.access.read and !detail_gen.access.update and !detail_gen.access.delete
delete detail_gen.access
if detail.detail
detail_gen.association = {}
detail_gen.association.detail = {
name: crystal.format detail.detail
}
if detail.model
if !detail_gen.association
detail_gen.association = {}
detail_gen.association.model = {
name: crystal.format detail.model
}
if detail.multiple != undefined
if detail.multiple == true
detail_gen.multiple = true
else if detail.multiple == false
detail_gen.multiple = false
else
debug 'Invalid value for "multiple": %s', detail.multiple
if detail.options
detail_options = []
for i of detail.options
detail_options.push {
option: detail.options[i]
}
detail_gen.options = detail_options
if detail.required != undefined
if detail.required == true
detail_gen.required = true
else if detail.required == false
detail_gen.required = false
else
debug 'Invalid value for "required": %s', detail.required
if detail.type
if detail_types.indexOf(detail.type) == -1
console.log "Unknown type (#{detail.type}) for detail (#{detail_name})"
else
detail_gen.is = {}
detail_gen.is[detail.type] = true
detail_gen.type = detail.type
model_gen.has[detail.type] = true
if detail.unique != undefined
if detail.unique == true
detail_gen.unique = {
is: { bool: true }
value: true
}
else if detail.unique == false
detail_gen.unique = {
is: { bool: true }
value: false
}
else
detail_gen.unique = {
is: { model: true }
model: {
name: crystal.format detail.unique
},
value: detail.unique
}
model_gen.detail[detail_name] = detail_gen
model_gen.details.push detail_gen
if !model_gen.access.create and !model_gen.access.read and !model_gen.access.update and !model_gen.access.delete
delete model_gen.access
model_gen
module.exports = process
| true | #
# process.coffee
# processes crystal spec
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @copyright 2015 Autocode
# @license MIT
#
# load deps
crystal = {
format: require './format'
}
debug = require('debug')('build')
fs = require 'fs'
merge = require 'merge'
pluralize = require 'pluralize'
# valid detail types
detail_types = [
'bool'
'child'
'created'
'date'
'deleted'
'decimal'
'email'
'id'
'model'
'number'
'parent'
'password'
'select'
'string'
'text'
'time'
'updated'
]
access_exists = (role, arr) ->
for obj in arr
if obj.role.id == role
return true
false
process = (config, spec) ->
# get config
config = config or this.config
if !config then throw new Error '"config" required for process(config, spec)'
# get spec
spec = spec or this.spec
if !spec then throw new Error '"spec" required for process(config, spec)'
# create gen object
gen = {}
# process models
spec = merge.recursive spec, config.spec or {}
if spec.models
gen.model = {}
gen.models = []
for model_name of spec.models
model = processModel model_name, spec
gen.model[model_name] = model
gen.models.push model
# display processed spec in console
#console.log JSON.stringify gen, null, "\t"
gen
processModel = (model_name, spec) ->
# get model
model = spec.models[model_name]
# pluralize model name
if !model.plural
model.plural = pluralize model_name
model_gen = {
access: {}
has: {}
id: model_name
name: crystal.format model_name, model.plural
}
model_gen.model = model_gen
if model.details
model_gen.detail = {}
model_gen.details = []
for detail_name of model.details
detail = model.details[detail_name]
if !detail.plural
detail.plural = pluralize detail_name
detail_gen = {
access: {}
default: detail.default
id: detail_name
name: crystal.format detail_name, detail.plural
}
detail_gen.detail = detail_gen
detail_gen.model = model_gen
if model.access
for role of model.access
if Object.prototype.toString.call model.access[role].permissions == '[object Object]'
role_data = {
role: {
access: {
create: false
read: false
update: false
delete: false
}
name: crystal.format role
}
}
for access of model.access[role].permissions
if model.access[role].permissions[access] == '*' or model.access[role].permissions[access].indexOf(detail_name) != -1
if !detail_gen.access[access]
detail_gen.access[access] = {
roles: []
}
if !model_gen.access[access]
model_gen.access[access] = {
roles: []
}
if !access_exists role, detail_gen.access[access].roles
detail_gen.access[access].roles.push {
role: {
id: role
name: crystal.format(role)
}
}
if !access_exists role, model_gen.access[access].roles
model_gen.access[access].roles.push {
role: {
id: role,
name: crystal.format(role)
}
}
else if model.access[role].permissions == '*' or model.access[role].permissions.indexOf(detail_name) != -1
if !detail_gen.access.create
detail_gen.access.create = {
roles: []
}
detail_gen.access.create.detail = detail_gen
detail_gen.access.create.model = model_gen
if !detail_gen.access.read
detail_gen.access.read = {
roles: []
}
detail_gen.access.read.detail = detail_gen
detail_gen.access.read.model = model_gen
if !detail_gen.access.update
detail_gen.access.update = {
roles: []
}
detail_gen.access.update.detail = detail_gen
detail_gen.access.update.model = model_gen
if !detail_gen.access.delete
detail_gen.access.delete = {
roles: []
}
detail_gen.access.delete.detail = detail_gen
detail_gen.access.delete.model = model_gen
if !access_exists role, detail_gen.access.create.roles
detail_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.read.roles
detail_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.update.roles
detail_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, detail_gen.access.delete.roles
detail_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !model_gen.access.create
model_gen.access.create = {
roles: []
}
if !model_gen.access.read
model_gen.access.read = {
roles: []
}
if !model_gen.access.update
model_gen.access.update = {
roles: []
}
if !model_gen.access.delete
model_gen.access.delete = {
roles: []
}
if !access_exists role, model_gen.access.create.roles
model_gen.access.create.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.read.roles
model_gen.access.read.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.update.roles
model_gen.access.update.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !access_exists role, model_gen.access.delete.roles
model_gen.access.delete.roles.push {
role: {
id: role
name: crystal.format role
}
}
if !detail_gen.access.create and !detail_gen.access.read and !detail_gen.access.update and !detail_gen.access.delete
delete detail_gen.access
if detail.detail
detail_gen.association = {}
detail_gen.association.detail = {
name: crystal.format detail.detail
}
if detail.model
if !detail_gen.association
detail_gen.association = {}
detail_gen.association.model = {
name: crystal.format detail.model
}
if detail.multiple != undefined
if detail.multiple == true
detail_gen.multiple = true
else if detail.multiple == false
detail_gen.multiple = false
else
debug 'Invalid value for "multiple": %s', detail.multiple
if detail.options
detail_options = []
for i of detail.options
detail_options.push {
option: detail.options[i]
}
detail_gen.options = detail_options
if detail.required != undefined
if detail.required == true
detail_gen.required = true
else if detail.required == false
detail_gen.required = false
else
debug 'Invalid value for "required": %s', detail.required
if detail.type
if detail_types.indexOf(detail.type) == -1
console.log "Unknown type (#{detail.type}) for detail (#{detail_name})"
else
detail_gen.is = {}
detail_gen.is[detail.type] = true
detail_gen.type = detail.type
model_gen.has[detail.type] = true
if detail.unique != undefined
if detail.unique == true
detail_gen.unique = {
is: { bool: true }
value: true
}
else if detail.unique == false
detail_gen.unique = {
is: { bool: true }
value: false
}
else
detail_gen.unique = {
is: { model: true }
model: {
name: crystal.format detail.unique
},
value: detail.unique
}
model_gen.detail[detail_name] = detail_gen
model_gen.details.push detail_gen
if !model_gen.access.create and !model_gen.access.read and !model_gen.access.update and !model_gen.access.delete
delete model_gen.access
model_gen
module.exports = process
|
[
{
"context": "ollers/getter'\ntrack_sc = \"https://soundcloud.com/chrome-sparks/goddess-1\"\ntrack_yt = \"https://www.youtube.com/wa",
"end": 119,
"score": 0.9903866052627563,
"start": 106,
"tag": "USERNAME",
"value": "chrome-sparks"
},
{
"context": ") ->\n (res.title).should.... | test/getter.test.coffee | chriscx/Disko | 0 | should = require 'should'
Getter = require '../app/controllers/getter'
track_sc = "https://soundcloud.com/chrome-sparks/goddess-1"
track_yt = "https://www.youtube.com/watch?v=H7HmzwI67ec"
describe 'Getter', ->
it 'gets Soundcloud track info', (done) ->
Getter.dispatch track_sc, (res) ->
(res.title).should.be.eql 'goddess'
done()
it 'gets Youtube track info', (done) ->
Getter.dispatch track_yt, (res) ->
(res.title).should.be.eql 'Owl City & Carly Rae Jepsen - Good Time'
done() | 159169 | should = require 'should'
Getter = require '../app/controllers/getter'
track_sc = "https://soundcloud.com/chrome-sparks/goddess-1"
track_yt = "https://www.youtube.com/watch?v=H7HmzwI67ec"
describe 'Getter', ->
it 'gets Soundcloud track info', (done) ->
Getter.dispatch track_sc, (res) ->
(res.title).should.be.eql 'goddess'
done()
it 'gets Youtube track info', (done) ->
Getter.dispatch track_yt, (res) ->
(res.title).should.be.eql 'Owl City & <NAME> - Good Time'
done() | true | should = require 'should'
Getter = require '../app/controllers/getter'
track_sc = "https://soundcloud.com/chrome-sparks/goddess-1"
track_yt = "https://www.youtube.com/watch?v=H7HmzwI67ec"
describe 'Getter', ->
it 'gets Soundcloud track info', (done) ->
Getter.dispatch track_sc, (res) ->
(res.title).should.be.eql 'goddess'
done()
it 'gets Youtube track info', (done) ->
Getter.dispatch track_yt, (res) ->
(res.title).should.be.eql 'Owl City & PI:NAME:<NAME>END_PI - Good Time'
done() |
[
{
"context": "ective()\n @scope.cocktailRecipe =\n name: 'Margarita (Cadillac)'\n description: 'A classic marg wi",
"end": 148,
"score": 0.9934225678443909,
"start": 139,
"tag": "NAME",
"value": "Margarita"
},
{
"context": " @scope.cocktailRecipe =\n name: 'Margari... | spec/javascripts/directives/recipeCard_spec.coffee | baberthal/cocktails | 0 | #= require spec_helper
describe 'recipeCard directive', ->
beforeEach ->
@setupDirective()
@scope.cocktailRecipe =
name: 'Margarita (Cadillac)'
description: 'A classic marg with Grand Marnier'
it 'replaces the element with the appropriate content', ->
element = @compile("<recipe-card></recipe-card>")(@scope)
expect(element.html()).toContain("Details")
| 2207 | #= require spec_helper
describe 'recipeCard directive', ->
beforeEach ->
@setupDirective()
@scope.cocktailRecipe =
name: '<NAME> (<NAME>)'
description: 'A classic marg with Grand Marnier'
it 'replaces the element with the appropriate content', ->
element = @compile("<recipe-card></recipe-card>")(@scope)
expect(element.html()).toContain("Details")
| true | #= require spec_helper
describe 'recipeCard directive', ->
beforeEach ->
@setupDirective()
@scope.cocktailRecipe =
name: 'PI:NAME:<NAME>END_PI (PI:NAME:<NAME>END_PI)'
description: 'A classic marg with Grand Marnier'
it 'replaces the element with the appropriate content', ->
element = @compile("<recipe-card></recipe-card>")(@scope)
expect(element.html()).toContain("Details")
|
[
{
"context": "ssert.isTrue(\n gitSetEmailStub.calledWith('deploy@nrt.io'),\n \"Expected the git email to be set to d",
"end": 1042,
"score": 0.9998578429222107,
"start": 1029,
"tag": "EMAIL",
"value": "deploy@nrt.io"
},
{
"context": "io'),\n \"Expected the git em... | server/test/units/deploy.coffee | unepwcmc/NRT | 0 | assert = require('chai').assert
helpers = require '../helpers'
sinon = require 'sinon'
Promise = require 'bluebird'
CommandRunner = require('../../lib/command_runner')
Git = require('../../lib/git')
GitHubDeploy = require('../../lib/git_hub_deploy')
Deploy = require('../../lib/deploy')
suite('Deploy')
test('.updateFromTag sets the gits username,
pulls the given tag,
runs npm install in both client and server,
notifying github at each step', (done) ->
sandbox = sinon.sandbox.create()
tagName = "corporate-banana"
gitSetEmailStub = sandbox.stub(Git, 'setEmail', ->
new Promise((resolve)-> resolve())
)
gitFetchStub = sandbox.stub(Git, 'fetch', ->
new Promise((resolve)-> resolve())
)
gitCheckoutStub = sandbox.stub(Git, 'checkout', ->
new Promise((resolve)-> resolve())
)
deploy = {
updateDeployState: sandbox.spy(->
new Promise((resolve)-> resolve())
)
}
Deploy.updateFromTag(tagName, deploy).then( ->
try
assert.isTrue(
gitSetEmailStub.calledWith('deploy@nrt.io'),
"Expected the git email to be set to deploy@nrt.io"
)
assert.isTrue(
gitFetchStub.calledOnce,
"Expected git fetch to be called"
)
assert.isTrue(
gitCheckoutStub.calledWith(tagName),
"Expected the fetched tag to be checked out"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', 'Fetching tags'),
"Expected github to be notified of fetching tags"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', "Checking out tag '#{tagName}'"),
"Expected github to be notified of tag checkout "
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy starts a new deploy and checks out the given tag, then
installs the client and server NPM modules, and runs grunt before
reporting success', (done) ->
sandbox = sinon.sandbox.create()
tagName = 'twiki'
updateStub = sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve)-> resolve())
)
startGithubDeployStub = sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
deployUpdateStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve)-> resolve())
)
npmClientStub = sandbox.stub(Deploy, 'npmInstallClient', ->
new Promise((resolve) -> resolve())
)
gruntStub = sandbox.stub(Deploy, 'grunt', ->
new Promise((resolve) -> resolve())
)
npmServerStub = sandbox.stub(Deploy, 'npmInstallServer', ->
new Promise((resolve) -> resolve())
)
Deploy.deploy(tagName).then(->
try
assert.isTrue(
startGithubDeployStub.calledOnce,
"Expected GitHubDeploy::start to be called"
)
assert.isTrue(
updateStub.calledWith(tagName),
"Expected Deploy.updateFromtag to be called with the tagname"
)
assert.isTrue(
npmClientStub.calledOnce
"Expected Deploy.npmInstallClient to be called"
)
assert.isTrue(
gruntStub.calledOnce
"Expected Deploy.grunt to be called"
)
assert.isTrue(
npmServerStub.calledOnce
"Expected Deploy.npmInstallServer to be called"
)
assert.isTrue(
deployUpdateStateStub.callCount > 0,
"""Expected deploy.updateDeployState to be called at least once
(called #{deployUpdateStateStub.callCount})"""
)
assert.isTrue(
deployUpdateStateStub.calledWith('success'),
"Expected to be notified of successful deploy"
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy posts the error status if an error occurs', (done) ->
sandbox = sinon.sandbox.create()
sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
updateDeployStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve) -> resolve())
)
failMessage = "Big end has gone"
sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve, reject) -> reject(new Error(failMessage)))
)
Deploy.deploy().then(->
sandbox.restore()
done(new Error("Deploy should fail"))
).catch((err)->
try
assert.strictEqual err.message, failMessage,
"Expected the right error to be thrown"
assert.isTrue updateDeployStateStub.calledOnce,
"Expected updateDeployState to be called"
updateDeployStateCall = updateDeployStateStub.getCall(0)
assert.isTrue updateDeployStateStub.calledWith('failure', failMessage),
"""
Expected updateDeployState to be called with
'failure', #{failMessage}, but called with
#{updateDeployStateCall.args}
"""
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallClient changes into the client/ dir and calls
npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.npmInstallClient().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallClient rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallClient().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallServer calls npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
Deploy.npmInstallServer().then(->
try
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallServer rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallServer().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.grunt changes to the client/ directory and calls grunt', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.grunt().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['grunt'],
"Expected spawn to be called with grunt"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.grunt rejects if grunt fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.grunt().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "grunt exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
| 119399 | assert = require('chai').assert
helpers = require '../helpers'
sinon = require 'sinon'
Promise = require 'bluebird'
CommandRunner = require('../../lib/command_runner')
Git = require('../../lib/git')
GitHubDeploy = require('../../lib/git_hub_deploy')
Deploy = require('../../lib/deploy')
suite('Deploy')
test('.updateFromTag sets the gits username,
pulls the given tag,
runs npm install in both client and server,
notifying github at each step', (done) ->
sandbox = sinon.sandbox.create()
tagName = "corporate-banana"
gitSetEmailStub = sandbox.stub(Git, 'setEmail', ->
new Promise((resolve)-> resolve())
)
gitFetchStub = sandbox.stub(Git, 'fetch', ->
new Promise((resolve)-> resolve())
)
gitCheckoutStub = sandbox.stub(Git, 'checkout', ->
new Promise((resolve)-> resolve())
)
deploy = {
updateDeployState: sandbox.spy(->
new Promise((resolve)-> resolve())
)
}
Deploy.updateFromTag(tagName, deploy).then( ->
try
assert.isTrue(
gitSetEmailStub.calledWith('<EMAIL>'),
"Expected the git email to be set to <EMAIL>"
)
assert.isTrue(
gitFetchStub.calledOnce,
"Expected git fetch to be called"
)
assert.isTrue(
gitCheckoutStub.calledWith(tagName),
"Expected the fetched tag to be checked out"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', 'Fetching tags'),
"Expected github to be notified of fetching tags"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', "Checking out tag '#{tagName}'"),
"Expected github to be notified of tag checkout "
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy starts a new deploy and checks out the given tag, then
installs the client and server NPM modules, and runs grunt before
reporting success', (done) ->
sandbox = sinon.sandbox.create()
tagName = 'twiki'
updateStub = sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve)-> resolve())
)
startGithubDeployStub = sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
deployUpdateStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve)-> resolve())
)
npmClientStub = sandbox.stub(Deploy, 'npmInstallClient', ->
new Promise((resolve) -> resolve())
)
gruntStub = sandbox.stub(Deploy, 'grunt', ->
new Promise((resolve) -> resolve())
)
npmServerStub = sandbox.stub(Deploy, 'npmInstallServer', ->
new Promise((resolve) -> resolve())
)
Deploy.deploy(tagName).then(->
try
assert.isTrue(
startGithubDeployStub.calledOnce,
"Expected GitHubDeploy::start to be called"
)
assert.isTrue(
updateStub.calledWith(tagName),
"Expected Deploy.updateFromtag to be called with the tagname"
)
assert.isTrue(
npmClientStub.calledOnce
"Expected Deploy.npmInstallClient to be called"
)
assert.isTrue(
gruntStub.calledOnce
"Expected Deploy.grunt to be called"
)
assert.isTrue(
npmServerStub.calledOnce
"Expected Deploy.npmInstallServer to be called"
)
assert.isTrue(
deployUpdateStateStub.callCount > 0,
"""Expected deploy.updateDeployState to be called at least once
(called #{deployUpdateStateStub.callCount})"""
)
assert.isTrue(
deployUpdateStateStub.calledWith('success'),
"Expected to be notified of successful deploy"
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy posts the error status if an error occurs', (done) ->
sandbox = sinon.sandbox.create()
sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
updateDeployStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve) -> resolve())
)
failMessage = "Big end has gone"
sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve, reject) -> reject(new Error(failMessage)))
)
Deploy.deploy().then(->
sandbox.restore()
done(new Error("Deploy should fail"))
).catch((err)->
try
assert.strictEqual err.message, failMessage,
"Expected the right error to be thrown"
assert.isTrue updateDeployStateStub.calledOnce,
"Expected updateDeployState to be called"
updateDeployStateCall = updateDeployStateStub.getCall(0)
assert.isTrue updateDeployStateStub.calledWith('failure', failMessage),
"""
Expected updateDeployState to be called with
'failure', #{failMessage}, but called with
#{updateDeployStateCall.args}
"""
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallClient changes into the client/ dir and calls
npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.npmInstallClient().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallClient rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallClient().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallServer calls npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
Deploy.npmInstallServer().then(->
try
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallServer rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallServer().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.grunt changes to the client/ directory and calls grunt', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.grunt().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['grunt'],
"Expected spawn to be called with grunt"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.grunt rejects if grunt fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.grunt().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "grunt exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
| true | assert = require('chai').assert
helpers = require '../helpers'
sinon = require 'sinon'
Promise = require 'bluebird'
CommandRunner = require('../../lib/command_runner')
Git = require('../../lib/git')
GitHubDeploy = require('../../lib/git_hub_deploy')
Deploy = require('../../lib/deploy')
suite('Deploy')
test('.updateFromTag sets the gits username,
pulls the given tag,
runs npm install in both client and server,
notifying github at each step', (done) ->
sandbox = sinon.sandbox.create()
tagName = "corporate-banana"
gitSetEmailStub = sandbox.stub(Git, 'setEmail', ->
new Promise((resolve)-> resolve())
)
gitFetchStub = sandbox.stub(Git, 'fetch', ->
new Promise((resolve)-> resolve())
)
gitCheckoutStub = sandbox.stub(Git, 'checkout', ->
new Promise((resolve)-> resolve())
)
deploy = {
updateDeployState: sandbox.spy(->
new Promise((resolve)-> resolve())
)
}
Deploy.updateFromTag(tagName, deploy).then( ->
try
assert.isTrue(
gitSetEmailStub.calledWith('PI:EMAIL:<EMAIL>END_PI'),
"Expected the git email to be set to PI:EMAIL:<EMAIL>END_PI"
)
assert.isTrue(
gitFetchStub.calledOnce,
"Expected git fetch to be called"
)
assert.isTrue(
gitCheckoutStub.calledWith(tagName),
"Expected the fetched tag to be checked out"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', 'Fetching tags'),
"Expected github to be notified of fetching tags"
)
assert.isTrue(
deploy.updateDeployState.calledWith('pending', "Checking out tag '#{tagName}'"),
"Expected github to be notified of tag checkout "
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy starts a new deploy and checks out the given tag, then
installs the client and server NPM modules, and runs grunt before
reporting success', (done) ->
sandbox = sinon.sandbox.create()
tagName = 'twiki'
updateStub = sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve)-> resolve())
)
startGithubDeployStub = sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
deployUpdateStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve)-> resolve())
)
npmClientStub = sandbox.stub(Deploy, 'npmInstallClient', ->
new Promise((resolve) -> resolve())
)
gruntStub = sandbox.stub(Deploy, 'grunt', ->
new Promise((resolve) -> resolve())
)
npmServerStub = sandbox.stub(Deploy, 'npmInstallServer', ->
new Promise((resolve) -> resolve())
)
Deploy.deploy(tagName).then(->
try
assert.isTrue(
startGithubDeployStub.calledOnce,
"Expected GitHubDeploy::start to be called"
)
assert.isTrue(
updateStub.calledWith(tagName),
"Expected Deploy.updateFromtag to be called with the tagname"
)
assert.isTrue(
npmClientStub.calledOnce
"Expected Deploy.npmInstallClient to be called"
)
assert.isTrue(
gruntStub.calledOnce
"Expected Deploy.grunt to be called"
)
assert.isTrue(
npmServerStub.calledOnce
"Expected Deploy.npmInstallServer to be called"
)
assert.isTrue(
deployUpdateStateStub.callCount > 0,
"""Expected deploy.updateDeployState to be called at least once
(called #{deployUpdateStateStub.callCount})"""
)
assert.isTrue(
deployUpdateStateStub.calledWith('success'),
"Expected to be notified of successful deploy"
)
done()
catch err
done(err)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.deploy posts the error status if an error occurs', (done) ->
sandbox = sinon.sandbox.create()
sandbox.stub(GitHubDeploy::, 'start', ->
new Promise((resolve)->
@id = 5
resolve()
)
)
updateDeployStateStub = sandbox.stub(GitHubDeploy::, 'updateDeployState', ->
new Promise((resolve) -> resolve())
)
failMessage = "Big end has gone"
sandbox.stub(Deploy, 'updateFromTag', ->
new Promise((resolve, reject) -> reject(new Error(failMessage)))
)
Deploy.deploy().then(->
sandbox.restore()
done(new Error("Deploy should fail"))
).catch((err)->
try
assert.strictEqual err.message, failMessage,
"Expected the right error to be thrown"
assert.isTrue updateDeployStateStub.calledOnce,
"Expected updateDeployState to be called"
updateDeployStateCall = updateDeployStateStub.getCall(0)
assert.isTrue updateDeployStateStub.calledWith('failure', failMessage),
"""
Expected updateDeployState to be called with
'failure', #{failMessage}, but called with
#{updateDeployStateCall.args}
"""
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallClient changes into the client/ dir and calls
npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.npmInstallClient().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallClient rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallClient().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.npmInstallServer calls npm install', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
Deploy.npmInstallServer().then(->
try
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['npm', ['install']],
"Expected spawn to be called with npm install"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.npmInstallServer rejects if npm install fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.npmInstallServer().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "npm install exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
test('.grunt changes to the client/ directory and calls grunt', (done) ->
sandbox = sinon.sandbox.create()
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(0)
}
)
chdirStub = sandbox.stub(process, 'chdir', ->)
cwd = process.cwd()
Deploy.grunt().then(->
try
firstChdir = chdirStub.getCall(0)
assert.isNotNull firstChdir, "Expected chdir to be called at least once"
assert.strictEqual(
firstChdir.args[0],
'../client',
"Expected process to be changed to client dir"
)
spawnCall = spawnStub.getCall(0)
assert.isNotNull(spawnCall, "Expected spawn to be called at least once")
assert.deepEqual(
spawnCall.args,
['grunt'],
"Expected spawn to be called with grunt"
)
secondChdir = chdirStub.getCall(1)
assert.isNotNull secondChdir, "Expected chdir to be called a second time"
assert.strictEqual(
secondChdir.args[0],
cwd,
"Expected process to be change to server dir"
)
done()
catch assertErr
console.error assertErr.stack
done(assertErr)
finally
sandbox.restore()
).catch((err)->
sandbox.restore()
done(err)
)
)
test('.grunt rejects if grunt fails', (done) ->
sandbox = sinon.sandbox.create()
chdirStub = sandbox.stub(process, 'chdir', ->)
spawnStub = sandbox.stub(CommandRunner, 'spawn', ->
return {
on: (event, cb) ->
if event is 'close'
cb(1)
}
)
Deploy.grunt().then(->
sandbox.restore()
done(new Error("Expected npm install to fail"))
).catch((err)->
try
assert.strictEqual(err.constructor.name, "Error",
"Expected an error from reject"
)
assert.strictEqual(err.message, "grunt exited with status code 1")
done()
catch assertErr
done(assertErr)
finally
sandbox.restore()
)
)
|
[
{
"context": "ame}.sock\"\n env: 'development'\n cookie_secret: 'super secret cookie key'\n\n# get latest git commit hash and branch if in d",
"end": 217,
"score": 0.9973466992378235,
"start": 194,
"tag": "KEY",
"value": "super secret cookie key"
}
] | app/config/app.sample.coffee | cmckni3-boneyard/basic-cms | 1 | pkg = require '../../package.json'
conf =
appName: pkg.name
appUser: pkg.name
version: pkg.version
port: 3000
# socket: "/tmp/#{pkg.name}.sock"
env: 'development'
cookie_secret: 'super secret cookie key'
# get latest git commit hash and branch if in dev environment
if conf.env is 'development'
exec = require('child_process').exec
exec 'git rev-parse --short HEAD', (err, stdout, stderr) ->
conf.hash = stdout
conf.hashLink = "#{pkg.repository.web}/commit/#{stdout}"
exec 'git rev-parse --abbrev-ref HEAD', (err, stdout, stderr) ->
conf.branch = stdout
module.exports = conf
| 73092 | pkg = require '../../package.json'
conf =
appName: pkg.name
appUser: pkg.name
version: pkg.version
port: 3000
# socket: "/tmp/#{pkg.name}.sock"
env: 'development'
cookie_secret: '<KEY>'
# get latest git commit hash and branch if in dev environment
if conf.env is 'development'
exec = require('child_process').exec
exec 'git rev-parse --short HEAD', (err, stdout, stderr) ->
conf.hash = stdout
conf.hashLink = "#{pkg.repository.web}/commit/#{stdout}"
exec 'git rev-parse --abbrev-ref HEAD', (err, stdout, stderr) ->
conf.branch = stdout
module.exports = conf
| true | pkg = require '../../package.json'
conf =
appName: pkg.name
appUser: pkg.name
version: pkg.version
port: 3000
# socket: "/tmp/#{pkg.name}.sock"
env: 'development'
cookie_secret: 'PI:KEY:<KEY>END_PI'
# get latest git commit hash and branch if in dev environment
if conf.env is 'development'
exec = require('child_process').exec
exec 'git rev-parse --short HEAD', (err, stdout, stderr) ->
conf.hash = stdout
conf.hashLink = "#{pkg.repository.web}/commit/#{stdout}"
exec 'git rev-parse --abbrev-ref HEAD', (err, stdout, stderr) ->
conf.branch = stdout
module.exports = conf
|
[
{
"context": "s)\n ```\n neo\n .createNode({\n name: 'Kieve'\n })\n ```\n ###\n createNode: createNod",
"end": 410,
"score": 0.9973462820053101,
"start": 405,
"tag": "NAME",
"value": "Kieve"
},
{
"context": " ```\n neo.createUniqueNode('people', 'name', ... | src/node.coffee | kievechua/js-neo4j | 2 | utils = require './utils'
module.exports =
# ###Create node
###
Without properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node)
```
neo
.createNode()
```
With properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node-with-properties)
```
neo
.createNode({
name: 'Kieve'
})
```
###
createNode: createNode = (params) ->
utils.post(
"#{@url}/db/data/node",
params,
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
cNode: createNode
# Considering dropping this in favour of Constraint
# ###read or create unique node (create)
###
```
neo.createUniqueNode('people', 'name', 'Kieve', { age: 18 }, 'create_or_fail')
```
###
# createUniqueNode: createUniqueNode = (label, key, value, params, mode = "get_or_create") ->
# utils.post(
# "#{@url}/db/data/index/node/#{label}?uniqueness=#{mode}",
# {
# key: key
# value: value
# properties: params
# },
# (node) ->
# id = node.body.self.split('/')
# id = id[id.length - 1]
# node.body.data._id = id
# return node.body.data
# )
# cUniqueNode: createUniqueNode
# ###read node
# Note that the response contains URI/templates for the available operations for readting properties and relationships.
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-read-node)
```
neo.readNode(1)
```
###
readNode: readNode = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}",
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
rNode: readNode
# ###Delete node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-delete-node)
```
neo.deleteNode(1)
```
###
deleteNode: deleteNode = (nodeId) ->
utils.del("#{@url}/db/data/node/#{nodeId}", (node) -> node.ok)
dNode: deleteNode
# ###read properties for node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-read-properties-for-node)
```
neo.readNodeProperty(1)
```
###
readNodeProperty: readNodeProperty = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}/properties",
(node) ->
node.body._id = nodeId
return node.body
)
rNodeProperty: readNodeProperty
# ###Set property on node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-set-property-on-node)
```
neo
.updateNodeProperty(1, 'name', 'kieve')
```
###
# ###Update node properties
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-update-node-properties)
```
neo
.updateNodeProperty(1, { 'name': 'kieve' })
```
###
updateNodeProperty: updateNodeProperty = (nodeId, property, value) ->
if value
value = JSON.stringify(value)
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
value = property
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.put(
url,
value,
(node) -> node.ok
)
uNodeProperty: updateNodeProperty
# ###Delete all properties from node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-all-properties-from-node)
```
neo
.deleteNodeProperty(1)
```
###
# ###Delete a named property from a node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-a-named-property-from-a-node)
```
neo
.deleteNodeProperty(1, 'name')
```
###
deleteNodeProperty: deleteNodeProperty = (nodeId, property) ->
if property
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.del(url, (node) -> node.ok)
dNodeProperty: deleteNodeProperty
# # ###read all nodes with a label
# ###
# It crash the database, need further investigation
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-all-nodes-with-a-label)
# ```
# neo
# .readNodeByLabel('person')
# ```
# ###
# # ###read nodes by label and property
# ###
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-nodes-by-label-and-property)
# ```
# neo
# .readNodeByLabel('person', { name: 'kieve chua' })
# ```
# ###
# readNodeByLabel: readNodeByLabel = (label, property) ->
# if property
# utils.get("#{@url}/db/data/label/#{label}/nodes", property)
# else
# utils.get("#{@url}/db/data/label/#{label}/nodes")
# rNodeByLabel: readNodeByLabel
| 168126 | utils = require './utils'
module.exports =
# ###Create node
###
Without properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node)
```
neo
.createNode()
```
With properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node-with-properties)
```
neo
.createNode({
name: '<NAME>'
})
```
###
createNode: createNode = (params) ->
utils.post(
"#{@url}/db/data/node",
params,
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
cNode: createNode
# Considering dropping this in favour of Constraint
# ###read or create unique node (create)
###
```
neo.createUniqueNode('people', 'name', '<NAME>', { age: 18 }, 'create_or_fail')
```
###
# createUniqueNode: createUniqueNode = (label, key, value, params, mode = "get_or_create") ->
# utils.post(
# "#{@url}/db/data/index/node/#{label}?uniqueness=#{mode}",
# {
# key: key
# value: value
# properties: params
# },
# (node) ->
# id = node.body.self.split('/')
# id = id[id.length - 1]
# node.body.data._id = id
# return node.body.data
# )
# cUniqueNode: createUniqueNode
# ###read node
# Note that the response contains URI/templates for the available operations for readting properties and relationships.
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-read-node)
```
neo.readNode(1)
```
###
readNode: readNode = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}",
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
rNode: readNode
# ###Delete node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-delete-node)
```
neo.deleteNode(1)
```
###
deleteNode: deleteNode = (nodeId) ->
utils.del("#{@url}/db/data/node/#{nodeId}", (node) -> node.ok)
dNode: deleteNode
# ###read properties for node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-read-properties-for-node)
```
neo.readNodeProperty(1)
```
###
readNodeProperty: readNodeProperty = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}/properties",
(node) ->
node.body._id = nodeId
return node.body
)
rNodeProperty: readNodeProperty
# ###Set property on node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-set-property-on-node)
```
neo
.updateNodeProperty(1, 'name', 'kieve')
```
###
# ###Update node properties
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-update-node-properties)
```
neo
.updateNodeProperty(1, { 'name': 'kieve' })
```
###
updateNodeProperty: updateNodeProperty = (nodeId, property, value) ->
if value
value = JSON.stringify(value)
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
value = property
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.put(
url,
value,
(node) -> node.ok
)
uNodeProperty: updateNodeProperty
# ###Delete all properties from node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-all-properties-from-node)
```
neo
.deleteNodeProperty(1)
```
###
# ###Delete a named property from a node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-a-named-property-from-a-node)
```
neo
.deleteNodeProperty(1, 'name')
```
###
deleteNodeProperty: deleteNodeProperty = (nodeId, property) ->
if property
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.del(url, (node) -> node.ok)
dNodeProperty: deleteNodeProperty
# # ###read all nodes with a label
# ###
# It crash the database, need further investigation
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-all-nodes-with-a-label)
# ```
# neo
# .readNodeByLabel('person')
# ```
# ###
# # ###read nodes by label and property
# ###
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-nodes-by-label-and-property)
# ```
# neo
# .readNodeByLabel('person', { name: '<NAME>' })
# ```
# ###
# readNodeByLabel: readNodeByLabel = (label, property) ->
# if property
# utils.get("#{@url}/db/data/label/#{label}/nodes", property)
# else
# utils.get("#{@url}/db/data/label/#{label}/nodes")
# rNodeByLabel: readNodeByLabel
| true | utils = require './utils'
module.exports =
# ###Create node
###
Without properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node)
```
neo
.createNode()
```
With properties [Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-create-node-with-properties)
```
neo
.createNode({
name: 'PI:NAME:<NAME>END_PI'
})
```
###
createNode: createNode = (params) ->
utils.post(
"#{@url}/db/data/node",
params,
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
cNode: createNode
# Considering dropping this in favour of Constraint
# ###read or create unique node (create)
###
```
neo.createUniqueNode('people', 'name', 'PI:NAME:<NAME>END_PI', { age: 18 }, 'create_or_fail')
```
###
# createUniqueNode: createUniqueNode = (label, key, value, params, mode = "get_or_create") ->
# utils.post(
# "#{@url}/db/data/index/node/#{label}?uniqueness=#{mode}",
# {
# key: key
# value: value
# properties: params
# },
# (node) ->
# id = node.body.self.split('/')
# id = id[id.length - 1]
# node.body.data._id = id
# return node.body.data
# )
# cUniqueNode: createUniqueNode
# ###read node
# Note that the response contains URI/templates for the available operations for readting properties and relationships.
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-read-node)
```
neo.readNode(1)
```
###
readNode: readNode = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}",
(node) ->
id = node.body.self.split('/')
id = id[id.length - 1]
node.body.data._id = id
return node.body.data
)
rNode: readNode
# ###Delete node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-nodes.html#rest-api-delete-node)
```
neo.deleteNode(1)
```
###
deleteNode: deleteNode = (nodeId) ->
utils.del("#{@url}/db/data/node/#{nodeId}", (node) -> node.ok)
dNode: deleteNode
# ###read properties for node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-read-properties-for-node)
```
neo.readNodeProperty(1)
```
###
readNodeProperty: readNodeProperty = (nodeId) ->
utils.get(
"#{@url}/db/data/node/#{nodeId}/properties",
(node) ->
node.body._id = nodeId
return node.body
)
rNodeProperty: readNodeProperty
# ###Set property on node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-set-property-on-node)
```
neo
.updateNodeProperty(1, 'name', 'kieve')
```
###
# ###Update node properties
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-update-node-properties)
```
neo
.updateNodeProperty(1, { 'name': 'kieve' })
```
###
updateNodeProperty: updateNodeProperty = (nodeId, property, value) ->
if value
value = JSON.stringify(value)
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
value = property
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.put(
url,
value,
(node) -> node.ok
)
uNodeProperty: updateNodeProperty
# ###Delete all properties from node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-all-properties-from-node)
```
neo
.deleteNodeProperty(1)
```
###
# ###Delete a named property from a node
###
[Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-properties.html#rest-api-delete-a-named-property-from-a-node)
```
neo
.deleteNodeProperty(1, 'name')
```
###
deleteNodeProperty: deleteNodeProperty = (nodeId, property) ->
if property
url = "#{@url}/db/data/node/#{nodeId}/properties/#{property}"
else
url = "#{@url}/db/data/node/#{nodeId}/properties"
utils.del(url, (node) -> node.ok)
dNodeProperty: deleteNodeProperty
# # ###read all nodes with a label
# ###
# It crash the database, need further investigation
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-all-nodes-with-a-label)
# ```
# neo
# .readNodeByLabel('person')
# ```
# ###
# # ###read nodes by label and property
# ###
# [Details](http://docs.neo4j.org/chunked/milestone/rest-api-node-labels.html#rest-api-read-nodes-by-label-and-property)
# ```
# neo
# .readNodeByLabel('person', { name: 'PI:NAME:<NAME>END_PI' })
# ```
# ###
# readNodeByLabel: readNodeByLabel = (label, property) ->
# if property
# utils.get("#{@url}/db/data/label/#{label}/nodes", property)
# else
# utils.get("#{@url}/db/data/label/#{label}/nodes")
# rNodeByLabel: readNodeByLabel
|
[
{
"context": " filePath: mp3\n host: \"127.0.0.1\"\n port: master_info.source_p",
"end": 1349,
"score": 0.9997367858886719,
"start": 1340,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "aster_info.source_port\n ... | test/handoffs/slave.coffee | firebrandv2/FirebrandNetwork.ga | 342 | # Test handing off live listeners from one slave to another, as we would
# during a graceful restart. Test that the listener connection stays
# alive through the restart and keeps receiving data.
MasterMode = $src "modes/master"
SlaveMode = $src "modes/slave"
StreamListener = $src "util/stream_listener"
IcecastSource = $src "util/icecast_source"
mp3 = $file "mp3/mp3-44100-128-s.mp3"
MasterHelper = require "../helpers/master"
SlaveHelper = require "../helpers/slave"
RPC = require "ipc-rpc"
RPCProxy = $src "util/rpc_proxy"
cp = require "child_process"
_ = require "underscore"
util = require "util"
debug = require("debug")("sm:tests:handoffs:slave")
describe "Slave Handoffs/Respawns", ->
master_info = null
source = null
before (done) ->
# unfortunately, to test slave mode, we need a master. that means
# we get to do a lot here that hopefully gets tested elsewhere
MasterHelper.startMaster "mp3", (err,info) ->
throw err if err
master_info = info
debug "started master. Connect at: #{master_info.slave_uri}"
debug "Stream Key is #{master_info.stream_key}"
source = new IcecastSource
format: "mp3"
filePath: mp3
host: "127.0.0.1"
port: master_info.source_port
password: master_info.source_password
stream: master_info.stream_key
source.start (err) ->
throw err if err
done()
describe "Worker Respawns", ->
slave = null
slave_port = null
before (done) ->
this.timeout(10000)
SlaveHelper.startSlave master_info.slave_uri, 2, (err,slave_info) ->
throw err if err
slave = slave_info.slave
slave.once "full_strength", ->
slave_port = slave.slavePort()
done()
it "can accept a stream listener", (done) ->
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect 1500, (err) =>
throw err if err
listener.disconnect()
done()
it "does not disconnect a listener while shutting down a worker", (done) ->
this.timeout 5000
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
slave.status (err,status) ->
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
debug "listener is on worker #{ worker.id }"
# shut this worker down...
slave.shutdownWorker worker.id, (err) ->
throw err if err
debug "worker is shut down"
# our listener should still be getting data...
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "Slave Handoffs", ->
s1 = null
s1rpc = null
s2 = null
s2rpc = null
slave_config = null
slave_port = null
listener = null
before (done) ->
slave_config = [
"--mode=slave"
"--slave:master=#{master_info.slave_uri}",
"--port=0",
"--cluster=1",
"--no-log:stdout"
]
done()
describe "Initial Slave", ->
before (done) ->
this.timeout 20000
s1 = cp.fork "./index.js", slave_config
process.on "exit", -> s1.kill()
debug "Setting up s1 RPC"
new RPC s1, (err,r) ->
throw err if err
s1rpc = r
debug "Starting loop for s1 OK"
tries = 0
okF = ->
tries += 1
s1rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s1 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s1 OK success after #{tries} tries"
# FIXME: On Node 0.10, we run into an issue handling
# connections that arrive before we have a loaded
# worker. For the moment, simply make sure we have
# one here
s1rpc.request "ready", null, null, timeout:5000, (err) ->
throw err if err
debug "s1 is ready with worker"
done()
okF()
it "is listening on a port", (done) ->
s1rpc.request "slave_port", (err,port) ->
throw err if err
slave_port = port
expect(port).to.be.number
done()
it "can accept a listener connection", (done) ->
debug "Connecting listener to 127.0.0.1:#{slave_port}/#{master_info.stream_key}"
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "New Slave", ->
before (done) ->
this.timeout 20000
s2 = cp.fork "./index.js", ["--handoff",slave_config...]
process.on "exit", -> s2.kill()
new RPC s2, (err,r) ->
s2rpc = r
debug "Starting loop for s2 OK"
tries = 0
okF = ->
tries += 1
s2rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s2 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s2 OK success after #{tries} tries"
done()
okF()
it "should not immediately be listening on a port", (done) ->
s2rpc.request "slave_port", (err,port) ->
throw err if err
expect(port).to.be.undefined
done()
describe "Handoff", ->
proxy = null
before (done) ->
# we need to disconnect our listeners, and then attach our proxy
# RPC to bind the two together
s1rpc.disconnect()
s2rpc.disconnect()
proxy = new RPCProxy s1, s2
done()
it "should run a handoff and initial slave should exit", (done) ->
this.timeout 12000
# we trigger the handoff by sending USR2 to s1
s1.kill("SIGUSR2")
proxy.on "error", (msg) =>
err = new Error msg.error
err.stack = msg.err_stack
throw err
t = setTimeout =>
throw new Error "Initial slave did not exit within 10 seconds."
, 10000
s1.on "exit", (code) =>
expect(code).to.eql 0
clearTimeout t
proxy.disconnect()
new RPC s2, (err,r) ->
s2rpc = r
s2rpc.request "OK", (err,msg) ->
throw err if err
debug "s2rpc reconnected and got OK"
done()
it "new slave should show a connected listener", (done) ->
s2rpc.request "status", (err,status) ->
throw err if err
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
done()
it "listener should still receive data", (done) ->
this.timeout 3000
listener.once "bytes", ->
done()
it "new slave should accept a new listener connection", (done) ->
l2 = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
l2.connect 1500, (err) =>
throw err if err
l2.once "bytes", ->
done()
| 224467 | # Test handing off live listeners from one slave to another, as we would
# during a graceful restart. Test that the listener connection stays
# alive through the restart and keeps receiving data.
MasterMode = $src "modes/master"
SlaveMode = $src "modes/slave"
StreamListener = $src "util/stream_listener"
IcecastSource = $src "util/icecast_source"
mp3 = $file "mp3/mp3-44100-128-s.mp3"
MasterHelper = require "../helpers/master"
SlaveHelper = require "../helpers/slave"
RPC = require "ipc-rpc"
RPCProxy = $src "util/rpc_proxy"
cp = require "child_process"
_ = require "underscore"
util = require "util"
debug = require("debug")("sm:tests:handoffs:slave")
describe "Slave Handoffs/Respawns", ->
master_info = null
source = null
before (done) ->
# unfortunately, to test slave mode, we need a master. that means
# we get to do a lot here that hopefully gets tested elsewhere
MasterHelper.startMaster "mp3", (err,info) ->
throw err if err
master_info = info
debug "started master. Connect at: #{master_info.slave_uri}"
debug "Stream Key is #{master_info.stream_key}"
source = new IcecastSource
format: "mp3"
filePath: mp3
host: "127.0.0.1"
port: master_info.source_port
password: <PASSWORD>
stream: master_info.stream_key
source.start (err) ->
throw err if err
done()
describe "Worker Respawns", ->
slave = null
slave_port = null
before (done) ->
this.timeout(10000)
SlaveHelper.startSlave master_info.slave_uri, 2, (err,slave_info) ->
throw err if err
slave = slave_info.slave
slave.once "full_strength", ->
slave_port = slave.slavePort()
done()
it "can accept a stream listener", (done) ->
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect 1500, (err) =>
throw err if err
listener.disconnect()
done()
it "does not disconnect a listener while shutting down a worker", (done) ->
this.timeout 5000
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
slave.status (err,status) ->
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
debug "listener is on worker #{ worker.id }"
# shut this worker down...
slave.shutdownWorker worker.id, (err) ->
throw err if err
debug "worker is shut down"
# our listener should still be getting data...
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "Slave Handoffs", ->
s1 = null
s1rpc = null
s2 = null
s2rpc = null
slave_config = null
slave_port = null
listener = null
before (done) ->
slave_config = [
"--mode=slave"
"--slave:master=#{master_info.slave_uri}",
"--port=0",
"--cluster=1",
"--no-log:stdout"
]
done()
describe "Initial Slave", ->
before (done) ->
this.timeout 20000
s1 = cp.fork "./index.js", slave_config
process.on "exit", -> s1.kill()
debug "Setting up s1 RPC"
new RPC s1, (err,r) ->
throw err if err
s1rpc = r
debug "Starting loop for s1 OK"
tries = 0
okF = ->
tries += 1
s1rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s1 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s1 OK success after #{tries} tries"
# FIXME: On Node 0.10, we run into an issue handling
# connections that arrive before we have a loaded
# worker. For the moment, simply make sure we have
# one here
s1rpc.request "ready", null, null, timeout:5000, (err) ->
throw err if err
debug "s1 is ready with worker"
done()
okF()
it "is listening on a port", (done) ->
s1rpc.request "slave_port", (err,port) ->
throw err if err
slave_port = port
expect(port).to.be.number
done()
it "can accept a listener connection", (done) ->
debug "Connecting listener to 127.0.0.1:#{slave_port}/#{master_info.stream_key}"
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "New Slave", ->
before (done) ->
this.timeout 20000
s2 = cp.fork "./index.js", ["--handoff",slave_config...]
process.on "exit", -> s2.kill()
new RPC s2, (err,r) ->
s2rpc = r
debug "Starting loop for s2 OK"
tries = 0
okF = ->
tries += 1
s2rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s2 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s2 OK success after #{tries} tries"
done()
okF()
it "should not immediately be listening on a port", (done) ->
s2rpc.request "slave_port", (err,port) ->
throw err if err
expect(port).to.be.undefined
done()
describe "Handoff", ->
proxy = null
before (done) ->
# we need to disconnect our listeners, and then attach our proxy
# RPC to bind the two together
s1rpc.disconnect()
s2rpc.disconnect()
proxy = new RPCProxy s1, s2
done()
it "should run a handoff and initial slave should exit", (done) ->
this.timeout 12000
# we trigger the handoff by sending USR2 to s1
s1.kill("SIGUSR2")
proxy.on "error", (msg) =>
err = new Error msg.error
err.stack = msg.err_stack
throw err
t = setTimeout =>
throw new Error "Initial slave did not exit within 10 seconds."
, 10000
s1.on "exit", (code) =>
expect(code).to.eql 0
clearTimeout t
proxy.disconnect()
new RPC s2, (err,r) ->
s2rpc = r
s2rpc.request "OK", (err,msg) ->
throw err if err
debug "s2rpc reconnected and got OK"
done()
it "new slave should show a connected listener", (done) ->
s2rpc.request "status", (err,status) ->
throw err if err
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
done()
it "listener should still receive data", (done) ->
this.timeout 3000
listener.once "bytes", ->
done()
it "new slave should accept a new listener connection", (done) ->
l2 = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
l2.connect 1500, (err) =>
throw err if err
l2.once "bytes", ->
done()
| true | # Test handing off live listeners from one slave to another, as we would
# during a graceful restart. Test that the listener connection stays
# alive through the restart and keeps receiving data.
MasterMode = $src "modes/master"
SlaveMode = $src "modes/slave"
StreamListener = $src "util/stream_listener"
IcecastSource = $src "util/icecast_source"
mp3 = $file "mp3/mp3-44100-128-s.mp3"
MasterHelper = require "../helpers/master"
SlaveHelper = require "../helpers/slave"
RPC = require "ipc-rpc"
RPCProxy = $src "util/rpc_proxy"
cp = require "child_process"
_ = require "underscore"
util = require "util"
debug = require("debug")("sm:tests:handoffs:slave")
describe "Slave Handoffs/Respawns", ->
master_info = null
source = null
before (done) ->
# unfortunately, to test slave mode, we need a master. that means
# we get to do a lot here that hopefully gets tested elsewhere
MasterHelper.startMaster "mp3", (err,info) ->
throw err if err
master_info = info
debug "started master. Connect at: #{master_info.slave_uri}"
debug "Stream Key is #{master_info.stream_key}"
source = new IcecastSource
format: "mp3"
filePath: mp3
host: "127.0.0.1"
port: master_info.source_port
password: PI:PASSWORD:<PASSWORD>END_PI
stream: master_info.stream_key
source.start (err) ->
throw err if err
done()
describe "Worker Respawns", ->
slave = null
slave_port = null
before (done) ->
this.timeout(10000)
SlaveHelper.startSlave master_info.slave_uri, 2, (err,slave_info) ->
throw err if err
slave = slave_info.slave
slave.once "full_strength", ->
slave_port = slave.slavePort()
done()
it "can accept a stream listener", (done) ->
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect 1500, (err) =>
throw err if err
listener.disconnect()
done()
it "does not disconnect a listener while shutting down a worker", (done) ->
this.timeout 5000
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
slave.status (err,status) ->
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
debug "listener is on worker #{ worker.id }"
# shut this worker down...
slave.shutdownWorker worker.id, (err) ->
throw err if err
debug "worker is shut down"
# our listener should still be getting data...
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "Slave Handoffs", ->
s1 = null
s1rpc = null
s2 = null
s2rpc = null
slave_config = null
slave_port = null
listener = null
before (done) ->
slave_config = [
"--mode=slave"
"--slave:master=#{master_info.slave_uri}",
"--port=0",
"--cluster=1",
"--no-log:stdout"
]
done()
describe "Initial Slave", ->
before (done) ->
this.timeout 20000
s1 = cp.fork "./index.js", slave_config
process.on "exit", -> s1.kill()
debug "Setting up s1 RPC"
new RPC s1, (err,r) ->
throw err if err
s1rpc = r
debug "Starting loop for s1 OK"
tries = 0
okF = ->
tries += 1
s1rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s1 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s1 OK success after #{tries} tries"
# FIXME: On Node 0.10, we run into an issue handling
# connections that arrive before we have a loaded
# worker. For the moment, simply make sure we have
# one here
s1rpc.request "ready", null, null, timeout:5000, (err) ->
throw err if err
debug "s1 is ready with worker"
done()
okF()
it "is listening on a port", (done) ->
s1rpc.request "slave_port", (err,port) ->
throw err if err
slave_port = port
expect(port).to.be.number
done()
it "can accept a listener connection", (done) ->
debug "Connecting listener to 127.0.0.1:#{slave_port}/#{master_info.stream_key}"
listener = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
listener.connect (err) =>
throw err if err
debug "listener connected"
listener.once "bytes", ->
debug "listener got bytes"
done()
describe "New Slave", ->
before (done) ->
this.timeout 20000
s2 = cp.fork "./index.js", ["--handoff",slave_config...]
process.on "exit", -> s2.kill()
new RPC s2, (err,r) ->
s2rpc = r
debug "Starting loop for s2 OK"
tries = 0
okF = ->
tries += 1
s2rpc.request "OK", null, null, timeout:500, (err,msg) ->
if err
debug "s2 OK error. Tries: #{tries}"
okF() if tries < 20
else
debug "s2 OK success after #{tries} tries"
done()
okF()
it "should not immediately be listening on a port", (done) ->
s2rpc.request "slave_port", (err,port) ->
throw err if err
expect(port).to.be.undefined
done()
describe "Handoff", ->
proxy = null
before (done) ->
# we need to disconnect our listeners, and then attach our proxy
# RPC to bind the two together
s1rpc.disconnect()
s2rpc.disconnect()
proxy = new RPCProxy s1, s2
done()
it "should run a handoff and initial slave should exit", (done) ->
this.timeout 12000
# we trigger the handoff by sending USR2 to s1
s1.kill("SIGUSR2")
proxy.on "error", (msg) =>
err = new Error msg.error
err.stack = msg.err_stack
throw err
t = setTimeout =>
throw new Error "Initial slave did not exit within 10 seconds."
, 10000
s1.on "exit", (code) =>
expect(code).to.eql 0
clearTimeout t
proxy.disconnect()
new RPC s2, (err,r) ->
s2rpc = r
s2rpc.request "OK", (err,msg) ->
throw err if err
debug "s2rpc reconnected and got OK"
done()
it "new slave should show a connected listener", (done) ->
s2rpc.request "status", (err,status) ->
throw err if err
worker = _(status).find (s) ->
s.streams[master_info.stream_key].listeners == 1
throw new Error("Failed to find worker with listener") if !worker
done()
it "listener should still receive data", (done) ->
this.timeout 3000
listener.once "bytes", ->
done()
it "new slave should accept a new listener connection", (done) ->
l2 = new StreamListener "127.0.0.1", slave_port, master_info.stream_key
l2.connect 1500, (err) =>
throw err if err
l2.once "bytes", ->
done()
|
[
{
"context": "#\n# Project's main unit\n#\n# Copyright (C) 2013 Nikolay Nemshilov\n#\nclass Table extends Element\n\n constructor: (op",
"end": 64,
"score": 0.9998843669891357,
"start": 47,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | E008/table/src/table.coffee | lovely-io/lovely.io-show | 1 | #
# Project's main unit
#
# Copyright (C) 2013 Nikolay Nemshilov
#
class Table extends Element
constructor: (options)->
if options and options.nodeType is 1
element = options
options = undefined
else
element = 'table'
super element, options
header: ->
@find('thead tr')
footer: ->
@find('tfoot tr')
rows: ->
@find('tbody tr')
sort: (index, order)->
rows = @rows().sort (row_a, row_b)->
a = row_a.find('td')[index].text()
b = row_b.find('td')[index].text()
a = parseFloat(a) if /^\d+$/.test(a)
b = parseFloat(b) if /^\d+$/.test(b)
if order is 'desc'
if a < b then 1 else if a > b then -1 : 0
else # asc
if a > b then 1 else if a < b then -1 : 0
tbody = @first('tbody')
rows.forEach (row)->
tbody.append(row)
return @ # self
| 136832 | #
# Project's main unit
#
# Copyright (C) 2013 <NAME>
#
class Table extends Element
constructor: (options)->
if options and options.nodeType is 1
element = options
options = undefined
else
element = 'table'
super element, options
header: ->
@find('thead tr')
footer: ->
@find('tfoot tr')
rows: ->
@find('tbody tr')
sort: (index, order)->
rows = @rows().sort (row_a, row_b)->
a = row_a.find('td')[index].text()
b = row_b.find('td')[index].text()
a = parseFloat(a) if /^\d+$/.test(a)
b = parseFloat(b) if /^\d+$/.test(b)
if order is 'desc'
if a < b then 1 else if a > b then -1 : 0
else # asc
if a > b then 1 else if a < b then -1 : 0
tbody = @first('tbody')
rows.forEach (row)->
tbody.append(row)
return @ # self
| true | #
# Project's main unit
#
# Copyright (C) 2013 PI:NAME:<NAME>END_PI
#
class Table extends Element
constructor: (options)->
if options and options.nodeType is 1
element = options
options = undefined
else
element = 'table'
super element, options
header: ->
@find('thead tr')
footer: ->
@find('tfoot tr')
rows: ->
@find('tbody tr')
sort: (index, order)->
rows = @rows().sort (row_a, row_b)->
a = row_a.find('td')[index].text()
b = row_b.find('td')[index].text()
a = parseFloat(a) if /^\d+$/.test(a)
b = parseFloat(b) if /^\d+$/.test(b)
if order is 'desc'
if a < b then 1 else if a > b then -1 : 0
else # asc
if a > b then 1 else if a < b then -1 : 0
tbody = @first('tbody')
rows.forEach (row)->
tbody.append(row)
return @ # self
|
[
{
"context": " \"customs\",\n \"cv\",\n \"cw\",\n \"cx\",\n \"cy\",\n \"cyclone\",\n \"cz\",\n \"dagger\",\n \"dag",
"end": 7094,
"score": 0.5010324716567993,
"start": 7092,
"tag": "NAME",
"value": "cy"
},
{
"context": " \"cx\",\n \"cy\",\n \"cyclo... | spirit/core/static/spirit/scripts/src/emoji_list.coffee | Ke-xueting/Spirit | 974 | ###
Emoji list
###
stModules.emojiList = [
"+1",
"+1_tone1",
"+1_tone2",
"+1_tone3",
"+1_tone4",
"+1_tone5",
"-1",
"-1_tone1",
"-1_tone2",
"-1_tone3",
"-1_tone4",
"-1_tone5",
"100",
"1234",
"8ball",
"a",
"ab",
"abc",
"abcd",
"ac",
"accept",
"ad",
"admission_tickets",
"ae",
"aerial_tramway",
"af",
"ag",
"ai",
"airplane",
"airplane_arriving",
"airplane_departure",
"airplane_small",
"al",
"alarm_clock",
"alembic",
"alien",
"am",
"ambulance",
"amphora",
"anchor",
"angel",
"angel_tone1",
"angel_tone2",
"angel_tone3",
"angel_tone4",
"angel_tone5",
"anger",
"anger_right",
"angry",
"anguished",
"ant",
"ao",
"apple",
"aq",
"aquarius",
"ar",
"archery",
"aries",
"arrow_backward",
"arrow_double_down",
"arrow_double_up",
"arrow_down",
"arrow_down_small",
"arrow_forward",
"arrow_heading_down",
"arrow_heading_up",
"arrow_left",
"arrow_lower_left",
"arrow_lower_right",
"arrow_right",
"arrow_right_hook",
"arrow_up",
"arrow_up_down",
"arrow_up_small",
"arrow_upper_left",
"arrow_upper_right",
"arrows_clockwise",
"arrows_counterclockwise",
"art",
"articulated_lorry",
"as",
"asterisk",
"astonished",
"at",
"athletic_shoe",
"atm",
"atom",
"atom_symbol",
"au",
"aw",
"ax",
"az",
"b",
"ba",
"baby",
"baby_bottle",
"baby_chick",
"baby_symbol",
"baby_tone1",
"baby_tone2",
"baby_tone3",
"baby_tone4",
"baby_tone5",
"back",
"badminton",
"baggage_claim",
"balloon",
"ballot_box",
"ballot_box_with_ballot",
"ballot_box_with_check",
"bamboo",
"banana",
"bangbang",
"bank",
"bar_chart",
"barber",
"baseball",
"basketball",
"basketball_player",
"basketball_player_tone1",
"basketball_player_tone2",
"basketball_player_tone3",
"basketball_player_tone4",
"basketball_player_tone5",
"bath",
"bath_tone1",
"bath_tone2",
"bath_tone3",
"bath_tone4",
"bath_tone5",
"bathtub",
"battery",
"bb",
"bd",
"be",
"beach",
"beach_umbrella",
"beach_with_umbrella",
"bear",
"bed",
"bee",
"beer",
"beers",
"beetle",
"beginner",
"bell",
"bellhop",
"bellhop_bell",
"bento",
"bf",
"bg",
"bh",
"bi",
"bicyclist",
"bicyclist_tone1",
"bicyclist_tone2",
"bicyclist_tone3",
"bicyclist_tone4",
"bicyclist_tone5",
"bike",
"bikini",
"biohazard",
"biohazard_sign",
"bird",
"birthday",
"bj",
"bl",
"black_circle",
"black_joker",
"black_large_square",
"black_medium_small_square",
"black_medium_square",
"black_nib",
"black_small_square",
"black_square_button",
"blossom",
"blowfish",
"blue_book",
"blue_car",
"blue_heart",
"blush",
"bm",
"bn",
"bo",
"boar",
"bomb",
"book",
"bookmark",
"bookmark_tabs",
"books",
"boom",
"boot",
"bottle_with_popping_cork",
"bouquet",
"bow",
"bow_and_arrow",
"bow_tone1",
"bow_tone2",
"bow_tone3",
"bow_tone4",
"bow_tone5",
"bowling",
"boy",
"boy_tone1",
"boy_tone2",
"boy_tone3",
"boy_tone4",
"boy_tone5",
"bq",
"br",
"bread",
"bride_with_veil",
"bride_with_veil_tone1",
"bride_with_veil_tone2",
"bride_with_veil_tone3",
"bride_with_veil_tone4",
"bride_with_veil_tone5",
"bridge_at_night",
"briefcase",
"broken_heart",
"bs",
"bt",
"bug",
"building_construction",
"bulb",
"bullettrain_front",
"bullettrain_side",
"burrito",
"bus",
"busstop",
"bust_in_silhouette",
"busts_in_silhouette",
"bv",
"bw",
"by",
"bz",
"ca",
"cactus",
"cake",
"calendar",
"calendar_spiral",
"calling",
"camel",
"camera",
"camera_with_flash",
"camping",
"cancer",
"candle",
"candy",
"capital_abcd",
"capricorn",
"card_box",
"card_file_box",
"card_index",
"card_index_dividers",
"carousel_horse",
"cat",
"cat2",
"cc",
"cd",
"cf",
"cg",
"ch",
"chains",
"champagne",
"chart",
"chart_with_downwards_trend",
"chart_with_upwards_trend",
"checkered_flag",
"cheese",
"cheese_wedge",
"cherries",
"cherry_blossom",
"chestnut",
"chicken",
"children_crossing",
"chile",
"chipmunk",
"chocolate_bar",
"christmas_tree",
"church",
"ci",
"cinema",
"circus_tent",
"city_dusk",
"city_sunrise",
"city_sunset",
"cityscape",
"ck",
"cl",
"clap",
"clap_tone1",
"clap_tone2",
"clap_tone3",
"clap_tone4",
"clap_tone5",
"clapper",
"classical_building",
"clipboard",
"clock",
"clock1",
"clock10",
"clock1030",
"clock11",
"clock1130",
"clock12",
"clock1230",
"clock130",
"clock2",
"clock230",
"clock3",
"clock330",
"clock4",
"clock430",
"clock5",
"clock530",
"clock6",
"clock630",
"clock7",
"clock730",
"clock8",
"clock830",
"clock9",
"clock930",
"closed_book",
"closed_lock_with_key",
"closed_umbrella",
"cloud",
"cloud_lightning",
"cloud_rain",
"cloud_snow",
"cloud_tornado",
"cloud_with_lightning",
"cloud_with_rain",
"cloud_with_snow",
"cloud_with_tornado",
"clubs",
"cm",
"cn",
"co",
"cocktail",
"coffee",
"coffin",
"cold_sweat",
"comet",
"compression",
"computer",
"confetti_ball",
"confounded",
"confused",
"congo",
"congratulations",
"construction",
"construction_site",
"construction_worker",
"construction_worker_tone1",
"construction_worker_tone2",
"construction_worker_tone3",
"construction_worker_tone4",
"construction_worker_tone5",
"control_knobs",
"convenience_store",
"cookie",
"cool",
"cop",
"cop_tone1",
"cop_tone2",
"cop_tone3",
"cop_tone4",
"cop_tone5",
"copyright",
"corn",
"couch",
"couch_and_lamp",
"couple",
"couple_mm",
"couple_with_heart",
"couple_with_heart_mm",
"couple_with_heart_ww",
"couple_ww",
"couplekiss",
"couplekiss_mm",
"couplekiss_ww",
"cow",
"cow2",
"cp",
"cr",
"crab",
"crayon",
"credit_card",
"crescent_moon",
"cricket",
"cricket_bat_ball",
"crocodile",
"cross",
"crossed_flags",
"crossed_swords",
"crown",
"cruise_ship",
"cry",
"crying_cat_face",
"crystal_ball",
"cu",
"cupid",
"curly_loop",
"currency_exchange",
"curry",
"custard",
"customs",
"cv",
"cw",
"cx",
"cy",
"cyclone",
"cz",
"dagger",
"dagger_knife",
"dancer",
"dancer_tone1",
"dancer_tone2",
"dancer_tone3",
"dancer_tone4",
"dancer_tone5",
"dancers",
"dango",
"dark_sunglasses",
"dart",
"dash",
"date",
"de",
"deciduous_tree",
"department_store",
"derelict_house_building",
"desert",
"desert_island",
"desktop",
"desktop_computer",
"dg",
"diamond_shape_with_a_dot_inside",
"diamonds",
"disappointed",
"disappointed_relieved",
"dividers",
"dizzy",
"dizzy_face",
"dj",
"dk",
"dm",
"do",
"do_not_litter",
"dog",
"dog2",
"dollar",
"dolls",
"dolphin",
"door",
"double_vertical_bar",
"doughnut",
"dove",
"dove_of_peace",
"dragon",
"dragon_face",
"dress",
"dromedary_camel",
"droplet",
"dvd",
"dz",
"e-mail",
"ea",
"ear",
"ear_of_rice",
"ear_tone1",
"ear_tone2",
"ear_tone3",
"ear_tone4",
"ear_tone5",
"earth_africa",
"earth_americas",
"earth_asia",
"ec",
"ee",
"eg",
"egg",
"eggplant",
"eh",
"eight",
"eight_pointed_black_star",
"eight_spoked_asterisk",
"electric_plug",
"elephant",
"email",
"end",
"envelope",
"envelope_with_arrow",
"er",
"es",
"et",
"eu",
"euro",
"european_castle",
"european_post_office",
"evergreen_tree",
"exclamation",
"expressionless",
"eye",
"eye_in_speech_bubble",
"eyeglasses",
"eyes",
"face_with_head_bandage",
"face_with_rolling_eyes",
"face_with_thermometer",
"factory",
"fallen_leaf",
"family",
"family_mmb",
"family_mmbb",
"family_mmg",
"family_mmgb",
"family_mmgg",
"family_mwbb",
"family_mwg",
"family_mwgb",
"family_mwgg",
"family_wwb",
"family_wwbb",
"family_wwg",
"family_wwgb",
"family_wwgg",
"fast_forward",
"fax",
"fearful",
"feet",
"ferris_wheel",
"ferry",
"fi",
"field_hockey",
"file_cabinet",
"file_folder",
"film_frames",
"film_projector",
"fire",
"fire_engine",
"fireworks",
"first_quarter_moon",
"first_quarter_moon_with_face",
"fish",
"fish_cake",
"fishing_pole_and_fish",
"fist",
"fist_tone1",
"fist_tone2",
"fist_tone3",
"fist_tone4",
"fist_tone5",
"five",
"fj",
"fk",
"flag_ac",
"flag_ad",
"flag_ae",
"flag_af",
"flag_ag",
"flag_ai",
"flag_al",
"flag_am",
"flag_ao",
"flag_aq",
"flag_ar",
"flag_as",
"flag_at",
"flag_au",
"flag_aw",
"flag_ax",
"flag_az",
"flag_ba",
"flag_bb",
"flag_bd",
"flag_be",
"flag_bf",
"flag_bg",
"flag_bh",
"flag_bi",
"flag_bj",
"flag_bl",
"flag_black",
"flag_bm",
"flag_bn",
"flag_bo",
"flag_bq",
"flag_br",
"flag_bs",
"flag_bt",
"flag_bv",
"flag_bw",
"flag_by",
"flag_bz",
"flag_ca",
"flag_cc",
"flag_cd",
"flag_cf",
"flag_cg",
"flag_ch",
"flag_ci",
"flag_ck",
"flag_cl",
"flag_cm",
"flag_cn",
"flag_co",
"flag_cp",
"flag_cr",
"flag_cu",
"flag_cv",
"flag_cw",
"flag_cx",
"flag_cy",
"flag_cz",
"flag_de",
"flag_dg",
"flag_dj",
"flag_dk",
"flag_dm",
"flag_do",
"flag_dz",
"flag_ea",
"flag_ec",
"flag_ee",
"flag_eg",
"flag_eh",
"flag_er",
"flag_es",
"flag_et",
"flag_eu",
"flag_fi",
"flag_fj",
"flag_fk",
"flag_fm",
"flag_fo",
"flag_fr",
"flag_ga",
"flag_gb",
"flag_gd",
"flag_ge",
"flag_gf",
"flag_gg",
"flag_gh",
"flag_gi",
"flag_gl",
"flag_gm",
"flag_gn",
"flag_gp",
"flag_gq",
"flag_gr",
"flag_gs",
"flag_gt",
"flag_gu",
"flag_gw",
"flag_gy",
"flag_hk",
"flag_hm",
"flag_hn",
"flag_hr",
"flag_ht",
"flag_hu",
"flag_ic",
"flag_id",
"flag_ie",
"flag_il",
"flag_im",
"flag_in",
"flag_io",
"flag_iq",
"flag_ir",
"flag_is",
"flag_it",
"flag_je",
"flag_jm",
"flag_jo",
"flag_jp",
"flag_ke",
"flag_kg",
"flag_kh",
"flag_ki",
"flag_km",
"flag_kn",
"flag_kp",
"flag_kr",
"flag_kw",
"flag_ky",
"flag_kz",
"flag_la",
"flag_lb",
"flag_lc",
"flag_li",
"flag_lk",
"flag_lr",
"flag_ls",
"flag_lt",
"flag_lu",
"flag_lv",
"flag_ly",
"flag_ma",
"flag_mc",
"flag_md",
"flag_me",
"flag_mf",
"flag_mg",
"flag_mh",
"flag_mk",
"flag_ml",
"flag_mm",
"flag_mn",
"flag_mo",
"flag_mp",
"flag_mq",
"flag_mr",
"flag_ms",
"flag_mt",
"flag_mu",
"flag_mv",
"flag_mw",
"flag_mx",
"flag_my",
"flag_mz",
"flag_na",
"flag_nc",
"flag_ne",
"flag_nf",
"flag_ng",
"flag_ni",
"flag_nl",
"flag_no",
"flag_np",
"flag_nr",
"flag_nu",
"flag_nz",
"flag_om",
"flag_pa",
"flag_pe",
"flag_pf",
"flag_pg",
"flag_ph",
"flag_pk",
"flag_pl",
"flag_pm",
"flag_pn",
"flag_pr",
"flag_ps",
"flag_pt",
"flag_pw",
"flag_py",
"flag_qa",
"flag_re",
"flag_ro",
"flag_rs",
"flag_ru",
"flag_rw",
"flag_sa",
"flag_sb",
"flag_sc",
"flag_sd",
"flag_se",
"flag_sg",
"flag_sh",
"flag_si",
"flag_sj",
"flag_sk",
"flag_sl",
"flag_sm",
"flag_sn",
"flag_so",
"flag_sr",
"flag_ss",
"flag_st",
"flag_sv",
"flag_sx",
"flag_sy",
"flag_sz",
"flag_ta",
"flag_tc",
"flag_td",
"flag_tf",
"flag_tg",
"flag_th",
"flag_tj",
"flag_tk",
"flag_tl",
"flag_tm",
"flag_tn",
"flag_to",
"flag_tr",
"flag_tt",
"flag_tv",
"flag_tw",
"flag_tz",
"flag_ua",
"flag_ug",
"flag_um",
"flag_us",
"flag_uy",
"flag_uz",
"flag_va",
"flag_vc",
"flag_ve",
"flag_vg",
"flag_vi",
"flag_vn",
"flag_vu",
"flag_wf",
"flag_white",
"flag_ws",
"flag_xk",
"flag_ye",
"flag_yt",
"flag_za",
"flag_zm",
"flag_zw",
"flags",
"flame",
"flan",
"flashlight",
"fleur-de-lis",
"floppy_disk",
"flower_playing_cards",
"flushed",
"fm",
"fo",
"fog",
"foggy",
"football",
"footprints",
"fork_and_knife",
"fork_and_knife_with_plate",
"fork_knife_plate",
"fountain",
"four",
"four_leaf_clover",
"fr",
"frame_photo",
"frame_with_picture",
"free",
"fried_shrimp",
"fries",
"frog",
"frowning",
"frowning2",
"fuelpump",
"full_moon",
"full_moon_with_face",
"funeral_urn",
"ga",
"game_die",
"gb",
"gd",
"ge",
"gear",
"gem",
"gemini",
"gf",
"gg",
"gh",
"ghost",
"gi",
"gift",
"gift_heart",
"girl",
"girl_tone1",
"girl_tone2",
"girl_tone3",
"girl_tone4",
"girl_tone5",
"gl",
"globe_with_meridians",
"gm",
"gn",
"goat",
"golf",
"golfer",
"gp",
"gq",
"gr",
"grandma",
"grandma_tone1",
"grandma_tone2",
"grandma_tone3",
"grandma_tone4",
"grandma_tone5",
"grapes",
"green_apple",
"green_book",
"green_heart",
"grey_exclamation",
"grey_question",
"grimacing",
"grin",
"grinning",
"gs",
"gt",
"gu",
"guardsman",
"guardsman_tone1",
"guardsman_tone2",
"guardsman_tone3",
"guardsman_tone4",
"guardsman_tone5",
"guitar",
"gun",
"gw",
"gy",
"haircut",
"haircut_tone1",
"haircut_tone2",
"haircut_tone3",
"haircut_tone4",
"haircut_tone5",
"hamburger",
"hammer",
"hammer_and_pick",
"hammer_and_wrench",
"hammer_pick",
"hamster",
"hand_splayed",
"hand_splayed_tone1",
"hand_splayed_tone2",
"hand_splayed_tone3",
"hand_splayed_tone4",
"hand_splayed_tone5",
"handbag",
"hankey",
"hash",
"hatched_chick",
"hatching_chick",
"head_bandage",
"headphones",
"hear_no_evil",
"heart",
"heart_decoration",
"heart_exclamation",
"heart_eyes",
"heart_eyes_cat",
"heartbeat",
"heartpulse",
"hearts",
"heavy_check_mark",
"heavy_division_sign",
"heavy_dollar_sign",
"heavy_heart_exclamation_mark_ornament",
"heavy_minus_sign",
"heavy_multiplication_x",
"heavy_plus_sign",
"helicopter",
"helmet_with_cross",
"helmet_with_white_cross",
"herb",
"hibiscus",
"high_brightness",
"high_heel",
"hk",
"hm",
"hn",
"hockey",
"hole",
"homes",
"honey_pot",
"horse",
"horse_racing",
"horse_racing_tone1",
"horse_racing_tone2",
"horse_racing_tone3",
"horse_racing_tone4",
"horse_racing_tone5",
"hospital",
"hot_dog",
"hot_pepper",
"hotdog",
"hotel",
"hotsprings",
"hourglass",
"hourglass_flowing_sand",
"house",
"house_abandoned",
"house_buildings",
"house_with_garden",
"hr",
"ht",
"hu",
"hugging",
"hugging_face",
"hushed",
"ic",
"ice_cream",
"ice_skate",
"icecream",
"id",
"ideograph_advantage",
"ie",
"il",
"im",
"imp",
"in",
"inbox_tray",
"incoming_envelope",
"indonesia",
"information_desk_person",
"information_desk_person_tone1",
"information_desk_person_tone2",
"information_desk_person_tone3",
"information_desk_person_tone4",
"information_desk_person_tone5",
"information_source",
"innocent",
"interrobang",
"io",
"iphone",
"iq",
"ir",
"is",
"island",
"it",
"izakaya_lantern",
"jack_o_lantern",
"japan",
"japanese_castle",
"japanese_goblin",
"japanese_ogre",
"je",
"jeans",
"jm",
"jo",
"joy",
"joy_cat",
"joystick",
"jp",
"kaaba",
"ke",
"key",
"key2",
"keyboard",
"keycap_asterisk",
"kg",
"kh",
"ki",
"kimono",
"kiss",
"kiss_mm",
"kiss_ww",
"kissing",
"kissing_cat",
"kissing_closed_eyes",
"kissing_heart",
"kissing_smiling_eyes",
"km",
"kn",
"knife",
"koala",
"koko",
"kp",
"kr",
"kw",
"ky",
"kz",
"la",
"label",
"large_blue_circle",
"large_blue_diamond",
"large_orange_diamond",
"last_quarter_moon",
"last_quarter_moon_with_face",
"latin_cross",
"laughing",
"lb",
"lc",
"leaves",
"ledger",
"left_luggage",
"left_right_arrow",
"leftwards_arrow_with_hook",
"lemon",
"leo",
"leopard",
"level_slider",
"levitate",
"li",
"libra",
"lifter",
"lifter_tone1",
"lifter_tone2",
"lifter_tone3",
"lifter_tone4",
"lifter_tone5",
"light_rail",
"link",
"linked_paperclips",
"lion",
"lion_face",
"lips",
"lipstick",
"lk",
"lock",
"lock_with_ink_pen",
"lollipop",
"loop",
"loud_sound",
"loudspeaker",
"love_hotel",
"love_letter",
"low_brightness",
"lower_left_ballpoint_pen",
"lower_left_crayon",
"lower_left_fountain_pen",
"lower_left_paintbrush",
"lr",
"ls",
"lt",
"lu",
"lv",
"ly",
"m",
"ma",
"mag",
"mag_right",
"mahjong",
"mailbox",
"mailbox_closed",
"mailbox_with_mail",
"mailbox_with_no_mail",
"man",
"man_in_business_suit_levitating",
"man_tone1",
"man_tone2",
"man_tone3",
"man_tone4",
"man_tone5",
"man_with_gua_pi_mao",
"man_with_gua_pi_mao_tone1",
"man_with_gua_pi_mao_tone2",
"man_with_gua_pi_mao_tone3",
"man_with_gua_pi_mao_tone4",
"man_with_gua_pi_mao_tone5",
"man_with_turban",
"man_with_turban_tone1",
"man_with_turban_tone2",
"man_with_turban_tone3",
"man_with_turban_tone4",
"man_with_turban_tone5",
"mans_shoe",
"mantlepiece_clock",
"map",
"maple_leaf",
"mask",
"massage",
"massage_tone1",
"massage_tone2",
"massage_tone3",
"massage_tone4",
"massage_tone5",
"mc",
"md",
"me",
"meat_on_bone",
"medal",
"mega",
"melon",
"menorah",
"mens",
"metal",
"metal_tone1",
"metal_tone2",
"metal_tone3",
"metal_tone4",
"metal_tone5",
"metro",
"mf",
"mg",
"mh",
"microphone",
"microphone2",
"microscope",
"middle_finger",
"middle_finger_tone1",
"middle_finger_tone2",
"middle_finger_tone3",
"middle_finger_tone4",
"middle_finger_tone5",
"military_medal",
"milky_way",
"minibus",
"minidisc",
"mk",
"ml",
"mm",
"mn",
"mo",
"mobile_phone_off",
"money_mouth",
"money_mouth_face",
"money_with_wings",
"moneybag",
"monkey",
"monkey_face",
"monorail",
"mortar_board",
"mosque",
"motorboat",
"motorcycle",
"motorway",
"mount_fuji",
"mountain",
"mountain_bicyclist",
"mountain_bicyclist_tone1",
"mountain_bicyclist_tone2",
"mountain_bicyclist_tone3",
"mountain_bicyclist_tone4",
"mountain_bicyclist_tone5",
"mountain_cableway",
"mountain_railway",
"mountain_snow",
"mouse",
"mouse2",
"mouse_three_button",
"movie_camera",
"moyai",
"mp",
"mq",
"mr",
"ms",
"mt",
"mu",
"muscle",
"muscle_tone1",
"muscle_tone2",
"muscle_tone3",
"muscle_tone4",
"muscle_tone5",
"mushroom",
"musical_keyboard",
"musical_note",
"musical_score",
"mute",
"mv",
"mw",
"mx",
"my",
"mz",
"na",
"nail_care",
"nail_care_tone1",
"nail_care_tone2",
"nail_care_tone3",
"nail_care_tone4",
"nail_care_tone5",
"name_badge",
"national_park",
"nc",
"ne",
"necktie",
"negative_squared_cross_mark",
"nerd",
"nerd_face",
"neutral_face",
"new",
"new_moon",
"new_moon_with_face",
"newspaper",
"newspaper2",
"next_track",
"nf",
"ng",
"ni",
"nigeria",
"night_with_stars",
"nine",
"nl",
"no",
"no_bell",
"no_bicycles",
"no_entry",
"no_entry_sign",
"no_good",
"no_good_tone1",
"no_good_tone2",
"no_good_tone3",
"no_good_tone4",
"no_good_tone5",
"no_mobile_phones",
"no_mouth",
"no_pedestrians",
"no_smoking",
"non-potable_water",
"nose",
"nose_tone1",
"nose_tone2",
"nose_tone3",
"nose_tone4",
"nose_tone5",
"notebook",
"notebook_with_decorative_cover",
"notepad_spiral",
"notes",
"np",
"nr",
"nu",
"nut_and_bolt",
"nz",
"o",
"o2",
"ocean",
"octopus",
"oden",
"office",
"oil",
"oil_drum",
"ok",
"ok_hand",
"ok_hand_tone1",
"ok_hand_tone2",
"ok_hand_tone3",
"ok_hand_tone4",
"ok_hand_tone5",
"ok_woman",
"ok_woman_tone1",
"ok_woman_tone2",
"ok_woman_tone3",
"ok_woman_tone4",
"ok_woman_tone5",
"old_key",
"older_man",
"older_man_tone1",
"older_man_tone2",
"older_man_tone3",
"older_man_tone4",
"older_man_tone5",
"older_woman",
"older_woman_tone1",
"older_woman_tone2",
"older_woman_tone3",
"older_woman_tone4",
"older_woman_tone5",
"om",
"om_symbol",
"on",
"oncoming_automobile",
"oncoming_bus",
"oncoming_police_car",
"oncoming_taxi",
"one",
"open_file_folder",
"open_hands",
"open_hands_tone1",
"open_hands_tone2",
"open_hands_tone3",
"open_hands_tone4",
"open_hands_tone5",
"open_mouth",
"ophiuchus",
"orange_book",
"orthodox_cross",
"outbox_tray",
"ox",
"pa",
"package",
"page_facing_up",
"page_with_curl",
"pager",
"paintbrush",
"palm_tree",
"panda_face",
"paperclip",
"paperclips",
"park",
"parking",
"part_alternation_mark",
"partly_sunny",
"passenger_ship",
"passport_control",
"pause_button",
"paw_prints",
"pe",
"peace",
"peace_symbol",
"peach",
"pear",
"pen_ballpoint",
"pen_fountain",
"pencil",
"pencil2",
"penguin",
"pensive",
"performing_arts",
"persevere",
"person_frowning",
"person_frowning_tone1",
"person_frowning_tone2",
"person_frowning_tone3",
"person_frowning_tone4",
"person_frowning_tone5",
"person_with_ball",
"person_with_ball_tone1",
"person_with_ball_tone2",
"person_with_ball_tone3",
"person_with_ball_tone4",
"person_with_ball_tone5",
"person_with_blond_hair",
"person_with_blond_hair_tone1",
"person_with_blond_hair_tone2",
"person_with_blond_hair_tone3",
"person_with_blond_hair_tone4",
"person_with_blond_hair_tone5",
"person_with_pouting_face",
"person_with_pouting_face_tone1",
"person_with_pouting_face_tone2",
"person_with_pouting_face_tone3",
"person_with_pouting_face_tone4",
"person_with_pouting_face_tone5",
"pf",
"pg",
"ph",
"pick",
"pig",
"pig2",
"pig_nose",
"pill",
"pineapple",
"ping_pong",
"pisces",
"pizza",
"pk",
"pl",
"place_of_worship",
"play_pause",
"pm",
"pn",
"point_down",
"point_down_tone1",
"point_down_tone2",
"point_down_tone3",
"point_down_tone4",
"point_down_tone5",
"point_left",
"point_left_tone1",
"point_left_tone2",
"point_left_tone3",
"point_left_tone4",
"point_left_tone5",
"point_right",
"point_right_tone1",
"point_right_tone2",
"point_right_tone3",
"point_right_tone4",
"point_right_tone5",
"point_up",
"point_up_2",
"point_up_2_tone1",
"point_up_2_tone2",
"point_up_2_tone3",
"point_up_2_tone4",
"point_up_2_tone5",
"point_up_tone1",
"point_up_tone2",
"point_up_tone3",
"point_up_tone4",
"point_up_tone5",
"police_car",
"poo",
"poodle",
"poop",
"popcorn",
"post_office",
"postal_horn",
"postbox",
"potable_water",
"pouch",
"poultry_leg",
"pound",
"pouting_cat",
"pr",
"pray",
"pray_tone1",
"pray_tone2",
"pray_tone3",
"pray_tone4",
"pray_tone5",
"prayer_beads",
"previous_track",
"princess",
"princess_tone1",
"princess_tone2",
"princess_tone3",
"princess_tone4",
"princess_tone5",
"printer",
"projector",
"ps",
"pt",
"pudding",
"punch",
"punch_tone1",
"punch_tone2",
"punch_tone3",
"punch_tone4",
"punch_tone5",
"purple_heart",
"purse",
"pushpin",
"put_litter_in_its_place",
"pw",
"py",
"qa",
"question",
"rabbit",
"rabbit2",
"race_car",
"racehorse",
"racing_car",
"racing_motorcycle",
"radio",
"radio_button",
"radioactive",
"radioactive_sign",
"rage",
"railroad_track",
"railway_car",
"railway_track",
"rainbow",
"raised_hand",
"raised_hand_tone1",
"raised_hand_tone2",
"raised_hand_tone3",
"raised_hand_tone4",
"raised_hand_tone5",
"raised_hand_with_fingers_splayed",
"raised_hand_with_fingers_splayed_tone1",
"raised_hand_with_fingers_splayed_tone2",
"raised_hand_with_fingers_splayed_tone3",
"raised_hand_with_fingers_splayed_tone4",
"raised_hand_with_fingers_splayed_tone5",
"raised_hand_with_part_between_middle_and_ring_fingers",
"raised_hand_with_part_between_middle_and_ring_fingers_tone1",
"raised_hand_with_part_between_middle_and_ring_fingers_tone2",
"raised_hand_with_part_between_middle_and_ring_fingers_tone3",
"raised_hand_with_part_between_middle_and_ring_fingers_tone4",
"raised_hand_with_part_between_middle_and_ring_fingers_tone5",
"raised_hands",
"raised_hands_tone1",
"raised_hands_tone2",
"raised_hands_tone3",
"raised_hands_tone4",
"raised_hands_tone5",
"raising_hand",
"raising_hand_tone1",
"raising_hand_tone2",
"raising_hand_tone3",
"raising_hand_tone4",
"raising_hand_tone5",
"ram",
"ramen",
"rat",
"re",
"record_button",
"recycle",
"red_car",
"red_circle",
"registered",
"relaxed",
"relieved",
"reminder_ribbon",
"repeat",
"repeat_one",
"restroom",
"reversed_hand_with_middle_finger_extended",
"reversed_hand_with_middle_finger_extended_tone1",
"reversed_hand_with_middle_finger_extended_tone2",
"reversed_hand_with_middle_finger_extended_tone3",
"reversed_hand_with_middle_finger_extended_tone4",
"reversed_hand_with_middle_finger_extended_tone5",
"revolving_hearts",
"rewind",
"ribbon",
"rice",
"rice_ball",
"rice_cracker",
"rice_scene",
"right_anger_bubble",
"ring",
"ro",
"robot",
"robot_face",
"rocket",
"rolled_up_newspaper",
"roller_coaster",
"rolling_eyes",
"rooster",
"rose",
"rosette",
"rotating_light",
"round_pushpin",
"rowboat",
"rowboat_tone1",
"rowboat_tone2",
"rowboat_tone3",
"rowboat_tone4",
"rowboat_tone5",
"rs",
"ru",
"rugby_football",
"runner",
"runner_tone1",
"runner_tone2",
"runner_tone3",
"runner_tone4",
"runner_tone5",
"running_shirt_with_sash",
"rw",
"sa",
"sagittarius",
"sailboat",
"sake",
"sandal",
"santa",
"santa_tone1",
"santa_tone2",
"santa_tone3",
"santa_tone4",
"santa_tone5",
"satellite",
"satellite_orbital",
"satisfied",
"saudi",
"saudiarabia",
"saxophone",
"sb",
"sc",
"scales",
"school",
"school_satchel",
"scissors",
"scorpion",
"scorpius",
"scream",
"scream_cat",
"scroll",
"sd",
"se",
"seat",
"secret",
"see_no_evil",
"seedling",
"seven",
"sg",
"sh",
"shamrock",
"shaved_ice",
"sheep",
"shell",
"shield",
"shinto_shrine",
"ship",
"shirt",
"shit",
"shopping_bags",
"shower",
"si",
"sign_of_the_horns",
"sign_of_the_horns_tone1",
"sign_of_the_horns_tone2",
"sign_of_the_horns_tone3",
"sign_of_the_horns_tone4",
"sign_of_the_horns_tone5",
"signal_strength",
"six",
"six_pointed_star",
"sj",
"sk",
"skeleton",
"ski",
"skier",
"skull",
"skull_and_crossbones",
"skull_crossbones",
"sl",
"sleeping",
"sleeping_accommodation",
"sleepy",
"sleuth_or_spy",
"sleuth_or_spy_tone1",
"sleuth_or_spy_tone2",
"sleuth_or_spy_tone3",
"sleuth_or_spy_tone4",
"sleuth_or_spy_tone5",
"slight_frown",
"slight_smile",
"slightly_frowning_face",
"slightly_smiling_face",
"slot_machine",
"sm",
"small_airplane",
"small_blue_diamond",
"small_orange_diamond",
"small_red_triangle",
"small_red_triangle_down",
"smile",
"smile_cat",
"smiley",
"smiley_cat",
"smiling_imp",
"smirk",
"smirk_cat",
"smoking",
"sn",
"snail",
"snake",
"snow_capped_mountain",
"snowboarder",
"snowflake",
"snowman",
"snowman2",
"so",
"sob",
"soccer",
"soon",
"sos",
"sound",
"space_invader",
"spades",
"spaghetti",
"sparkle",
"sparkler",
"sparkles",
"sparkling_heart",
"speak_no_evil",
"speaker",
"speaking_head",
"speaking_head_in_silhouette",
"speech_balloon",
"speedboat",
"spider",
"spider_web",
"spiral_calendar_pad",
"spiral_note_pad",
"sports_medal",
"spy",
"spy_tone1",
"spy_tone2",
"spy_tone3",
"spy_tone4",
"spy_tone5",
"sr",
"ss",
"st",
"stadium",
"star",
"star2",
"star_and_crescent",
"star_of_david",
"stars",
"station",
"statue_of_liberty",
"steam_locomotive",
"stew",
"stop_button",
"stopwatch",
"straight_ruler",
"strawberry",
"stuck_out_tongue",
"stuck_out_tongue_closed_eyes",
"stuck_out_tongue_winking_eye",
"studio_microphone",
"sun_with_face",
"sunflower",
"sunglasses",
"sunny",
"sunrise",
"sunrise_over_mountains",
"surfer",
"surfer_tone1",
"surfer_tone2",
"surfer_tone3",
"surfer_tone4",
"surfer_tone5",
"sushi",
"suspension_railway",
"sv",
"sweat",
"sweat_drops",
"sweat_smile",
"sweet_potato",
"swimmer",
"swimmer_tone1",
"swimmer_tone2",
"swimmer_tone3",
"swimmer_tone4",
"swimmer_tone5",
"sx",
"sy",
"symbols",
"synagogue",
"syringe",
"sz",
"ta",
"table_tennis",
"taco",
"tada",
"tanabata_tree",
"tangerine",
"taurus",
"taxi",
"tc",
"td",
"tea",
"telephone",
"telephone_receiver",
"telescope",
"ten",
"tennis",
"tent",
"tf",
"tg",
"th",
"thermometer",
"thermometer_face",
"thinking",
"thinking_face",
"thought_balloon",
"three",
"three_button_mouse",
"thumbdown",
"thumbdown_tone1",
"thumbdown_tone2",
"thumbdown_tone3",
"thumbdown_tone4",
"thumbdown_tone5",
"thumbsdown",
"thumbsdown_tone1",
"thumbsdown_tone2",
"thumbsdown_tone3",
"thumbsdown_tone4",
"thumbsdown_tone5",
"thumbsup",
"thumbsup_tone1",
"thumbsup_tone2",
"thumbsup_tone3",
"thumbsup_tone4",
"thumbsup_tone5",
"thumbup",
"thumbup_tone1",
"thumbup_tone2",
"thumbup_tone3",
"thumbup_tone4",
"thumbup_tone5",
"thunder_cloud_and_rain",
"thunder_cloud_rain",
"ticket",
"tickets",
"tiger",
"tiger2",
"timer",
"timer_clock",
"tired_face",
"tj",
"tk",
"tl",
"tm",
"tn",
"to",
"toilet",
"tokyo_tower",
"tomato",
"tongue",
"tools",
"top",
"tophat",
"tr",
"track_next",
"track_previous",
"trackball",
"tractor",
"traffic_light",
"train",
"train2",
"tram",
"triangular_flag_on_post",
"triangular_ruler",
"trident",
"triumph",
"trolleybus",
"trophy",
"tropical_drink",
"tropical_fish",
"truck",
"trumpet",
"tt",
"tulip",
"turkey",
"turkmenistan",
"turtle",
"tuvalu",
"tv",
"tw",
"twisted_rightwards_arrows",
"two",
"two_hearts",
"two_men_holding_hands",
"two_women_holding_hands",
"tz",
"u5272",
"u5408",
"u55b6",
"u6307",
"u6708",
"u6709",
"u6e80",
"u7121",
"u7533",
"u7981",
"u7a7a",
"ua",
"ug",
"um",
"umbrella",
"umbrella2",
"umbrella_on_ground",
"unamused",
"underage",
"unicorn",
"unicorn_face",
"unlock",
"up",
"upside_down",
"upside_down_face",
"urn",
"us",
"uy",
"uz",
"v",
"v_tone1",
"v_tone2",
"v_tone3",
"v_tone4",
"v_tone5",
"va",
"vc",
"ve",
"vertical_traffic_light",
"vg",
"vhs",
"vi",
"vibration_mode",
"video_camera",
"video_game",
"violin",
"virgo",
"vn",
"volcano",
"volleyball",
"vs",
"vu",
"vulcan",
"vulcan_tone1",
"vulcan_tone2",
"vulcan_tone3",
"vulcan_tone4",
"vulcan_tone5",
"walking",
"walking_tone1",
"walking_tone2",
"walking_tone3",
"walking_tone4",
"walking_tone5",
"waning_crescent_moon",
"waning_gibbous_moon",
"warning",
"wastebasket",
"watch",
"water_buffalo",
"watermelon",
"wave",
"wave_tone1",
"wave_tone2",
"wave_tone3",
"wave_tone4",
"wave_tone5",
"waving_black_flag",
"waving_white_flag",
"wavy_dash",
"waxing_crescent_moon",
"waxing_gibbous_moon",
"wc",
"weary",
"wedding",
"weight_lifter",
"weight_lifter_tone1",
"weight_lifter_tone2",
"weight_lifter_tone3",
"weight_lifter_tone4",
"weight_lifter_tone5",
"wf",
"whale",
"whale2",
"wheel_of_dharma",
"wheelchair",
"white_check_mark",
"white_circle",
"white_flower",
"white_frowning_face",
"white_large_square",
"white_medium_small_square",
"white_medium_square",
"white_small_square",
"white_square_button",
"white_sun_behind_cloud",
"white_sun_behind_cloud_with_rain",
"white_sun_cloud",
"white_sun_rain_cloud",
"white_sun_small_cloud",
"white_sun_with_small_cloud",
"wind_blowing_face",
"wind_chime",
"wine_glass",
"wink",
"wolf",
"woman",
"woman_tone1",
"woman_tone2",
"woman_tone3",
"woman_tone4",
"woman_tone5",
"womans_clothes",
"womans_hat",
"womens",
"world_map",
"worried",
"worship_symbol",
"wrench",
"writing_hand",
"writing_hand_tone1",
"writing_hand_tone2",
"writing_hand_tone3",
"writing_hand_tone4",
"writing_hand_tone5",
"ws",
"x",
"xk",
"ye",
"yellow_heart",
"yen",
"yin_yang",
"yt",
"yum",
"za",
"zap",
"zero",
"zipper_mouth",
"zipper_mouth_face",
"zm",
"zw",
"zzz"
]
| 205283 | ###
Emoji list
###
stModules.emojiList = [
"+1",
"+1_tone1",
"+1_tone2",
"+1_tone3",
"+1_tone4",
"+1_tone5",
"-1",
"-1_tone1",
"-1_tone2",
"-1_tone3",
"-1_tone4",
"-1_tone5",
"100",
"1234",
"8ball",
"a",
"ab",
"abc",
"abcd",
"ac",
"accept",
"ad",
"admission_tickets",
"ae",
"aerial_tramway",
"af",
"ag",
"ai",
"airplane",
"airplane_arriving",
"airplane_departure",
"airplane_small",
"al",
"alarm_clock",
"alembic",
"alien",
"am",
"ambulance",
"amphora",
"anchor",
"angel",
"angel_tone1",
"angel_tone2",
"angel_tone3",
"angel_tone4",
"angel_tone5",
"anger",
"anger_right",
"angry",
"anguished",
"ant",
"ao",
"apple",
"aq",
"aquarius",
"ar",
"archery",
"aries",
"arrow_backward",
"arrow_double_down",
"arrow_double_up",
"arrow_down",
"arrow_down_small",
"arrow_forward",
"arrow_heading_down",
"arrow_heading_up",
"arrow_left",
"arrow_lower_left",
"arrow_lower_right",
"arrow_right",
"arrow_right_hook",
"arrow_up",
"arrow_up_down",
"arrow_up_small",
"arrow_upper_left",
"arrow_upper_right",
"arrows_clockwise",
"arrows_counterclockwise",
"art",
"articulated_lorry",
"as",
"asterisk",
"astonished",
"at",
"athletic_shoe",
"atm",
"atom",
"atom_symbol",
"au",
"aw",
"ax",
"az",
"b",
"ba",
"baby",
"baby_bottle",
"baby_chick",
"baby_symbol",
"baby_tone1",
"baby_tone2",
"baby_tone3",
"baby_tone4",
"baby_tone5",
"back",
"badminton",
"baggage_claim",
"balloon",
"ballot_box",
"ballot_box_with_ballot",
"ballot_box_with_check",
"bamboo",
"banana",
"bangbang",
"bank",
"bar_chart",
"barber",
"baseball",
"basketball",
"basketball_player",
"basketball_player_tone1",
"basketball_player_tone2",
"basketball_player_tone3",
"basketball_player_tone4",
"basketball_player_tone5",
"bath",
"bath_tone1",
"bath_tone2",
"bath_tone3",
"bath_tone4",
"bath_tone5",
"bathtub",
"battery",
"bb",
"bd",
"be",
"beach",
"beach_umbrella",
"beach_with_umbrella",
"bear",
"bed",
"bee",
"beer",
"beers",
"beetle",
"beginner",
"bell",
"bellhop",
"bellhop_bell",
"bento",
"bf",
"bg",
"bh",
"bi",
"bicyclist",
"bicyclist_tone1",
"bicyclist_tone2",
"bicyclist_tone3",
"bicyclist_tone4",
"bicyclist_tone5",
"bike",
"bikini",
"biohazard",
"biohazard_sign",
"bird",
"birthday",
"bj",
"bl",
"black_circle",
"black_joker",
"black_large_square",
"black_medium_small_square",
"black_medium_square",
"black_nib",
"black_small_square",
"black_square_button",
"blossom",
"blowfish",
"blue_book",
"blue_car",
"blue_heart",
"blush",
"bm",
"bn",
"bo",
"boar",
"bomb",
"book",
"bookmark",
"bookmark_tabs",
"books",
"boom",
"boot",
"bottle_with_popping_cork",
"bouquet",
"bow",
"bow_and_arrow",
"bow_tone1",
"bow_tone2",
"bow_tone3",
"bow_tone4",
"bow_tone5",
"bowling",
"boy",
"boy_tone1",
"boy_tone2",
"boy_tone3",
"boy_tone4",
"boy_tone5",
"bq",
"br",
"bread",
"bride_with_veil",
"bride_with_veil_tone1",
"bride_with_veil_tone2",
"bride_with_veil_tone3",
"bride_with_veil_tone4",
"bride_with_veil_tone5",
"bridge_at_night",
"briefcase",
"broken_heart",
"bs",
"bt",
"bug",
"building_construction",
"bulb",
"bullettrain_front",
"bullettrain_side",
"burrito",
"bus",
"busstop",
"bust_in_silhouette",
"busts_in_silhouette",
"bv",
"bw",
"by",
"bz",
"ca",
"cactus",
"cake",
"calendar",
"calendar_spiral",
"calling",
"camel",
"camera",
"camera_with_flash",
"camping",
"cancer",
"candle",
"candy",
"capital_abcd",
"capricorn",
"card_box",
"card_file_box",
"card_index",
"card_index_dividers",
"carousel_horse",
"cat",
"cat2",
"cc",
"cd",
"cf",
"cg",
"ch",
"chains",
"champagne",
"chart",
"chart_with_downwards_trend",
"chart_with_upwards_trend",
"checkered_flag",
"cheese",
"cheese_wedge",
"cherries",
"cherry_blossom",
"chestnut",
"chicken",
"children_crossing",
"chile",
"chipmunk",
"chocolate_bar",
"christmas_tree",
"church",
"ci",
"cinema",
"circus_tent",
"city_dusk",
"city_sunrise",
"city_sunset",
"cityscape",
"ck",
"cl",
"clap",
"clap_tone1",
"clap_tone2",
"clap_tone3",
"clap_tone4",
"clap_tone5",
"clapper",
"classical_building",
"clipboard",
"clock",
"clock1",
"clock10",
"clock1030",
"clock11",
"clock1130",
"clock12",
"clock1230",
"clock130",
"clock2",
"clock230",
"clock3",
"clock330",
"clock4",
"clock430",
"clock5",
"clock530",
"clock6",
"clock630",
"clock7",
"clock730",
"clock8",
"clock830",
"clock9",
"clock930",
"closed_book",
"closed_lock_with_key",
"closed_umbrella",
"cloud",
"cloud_lightning",
"cloud_rain",
"cloud_snow",
"cloud_tornado",
"cloud_with_lightning",
"cloud_with_rain",
"cloud_with_snow",
"cloud_with_tornado",
"clubs",
"cm",
"cn",
"co",
"cocktail",
"coffee",
"coffin",
"cold_sweat",
"comet",
"compression",
"computer",
"confetti_ball",
"confounded",
"confused",
"congo",
"congratulations",
"construction",
"construction_site",
"construction_worker",
"construction_worker_tone1",
"construction_worker_tone2",
"construction_worker_tone3",
"construction_worker_tone4",
"construction_worker_tone5",
"control_knobs",
"convenience_store",
"cookie",
"cool",
"cop",
"cop_tone1",
"cop_tone2",
"cop_tone3",
"cop_tone4",
"cop_tone5",
"copyright",
"corn",
"couch",
"couch_and_lamp",
"couple",
"couple_mm",
"couple_with_heart",
"couple_with_heart_mm",
"couple_with_heart_ww",
"couple_ww",
"couplekiss",
"couplekiss_mm",
"couplekiss_ww",
"cow",
"cow2",
"cp",
"cr",
"crab",
"crayon",
"credit_card",
"crescent_moon",
"cricket",
"cricket_bat_ball",
"crocodile",
"cross",
"crossed_flags",
"crossed_swords",
"crown",
"cruise_ship",
"cry",
"crying_cat_face",
"crystal_ball",
"cu",
"cupid",
"curly_loop",
"currency_exchange",
"curry",
"custard",
"customs",
"cv",
"cw",
"cx",
"<NAME>",
"cyclone",
"cz",
"<NAME>",
"dagger_knife",
"<NAME>",
"dancer_tone1",
"dancer_tone2",
"dancer_tone3",
"dancer_tone4",
"dancer_tone5",
"<NAME>",
"<NAME>",
"dark_sunglasses",
"dart",
"dash",
"date",
"de",
"deciduous_tree",
"department_store",
"derelict_house_building",
"desert",
"desert_island",
"desktop",
"desktop_computer",
"dg",
"diamond_shape_with_a_dot_inside",
"diamonds",
"disappointed",
"disappointed_relieved",
"dividers",
"dizzy",
"dizzy_face",
"dj",
"dk",
"dm",
"do",
"do_not_litter",
"dog",
"dog2",
"dollar",
"dolls",
"dolphin",
"door",
"double_vertical_bar",
"doughnut",
"dove",
"dove_of_peace",
"dragon",
"dragon_face",
"dress",
"dromedary_camel",
"droplet",
"dvd",
"dz",
"e-mail",
"ea",
"ear",
"ear_of_rice",
"ear_tone1",
"ear_tone2",
"ear_tone3",
"ear_tone4",
"ear_tone5",
"earth_africa",
"earth_americas",
"earth_asia",
"ec",
"ee",
"eg",
"egg",
"eggplant",
"eh",
"eight",
"eight_pointed_black_star",
"eight_spoked_asterisk",
"electric_plug",
"elephant",
"email",
"end",
"envelope",
"envelope_with_arrow",
"er",
"es",
"et",
"eu",
"euro",
"european_castle",
"european_post_office",
"evergreen_tree",
"exclamation",
"expressionless",
"eye",
"eye_in_speech_bubble",
"eyeglasses",
"eyes",
"face_with_head_bandage",
"face_with_rolling_eyes",
"face_with_thermometer",
"factory",
"fallen_leaf",
"family",
"family_mmb",
"family_mmbb",
"family_mmg",
"family_mmgb",
"family_mmgg",
"family_mwbb",
"family_mwg",
"family_mwgb",
"family_mwgg",
"family_wwb",
"family_wwbb",
"family_wwg",
"family_wwgb",
"family_wwgg",
"fast_forward",
"fax",
"fearful",
"feet",
"ferris_wheel",
"ferry",
"fi",
"field_hockey",
"file_cabinet",
"file_folder",
"film_frames",
"film_projector",
"fire",
"fire_engine",
"fireworks",
"first_quarter_moon",
"first_quarter_moon_with_face",
"fish",
"fish_cake",
"fishing_pole_and_fish",
"fist",
"fist_tone1",
"fist_tone2",
"fist_tone3",
"fist_tone4",
"fist_tone5",
"five",
"fj",
"fk",
"flag_ac",
"flag_ad",
"flag_ae",
"flag_af",
"flag_ag",
"flag_ai",
"flag_al",
"flag_am",
"flag_ao",
"flag_aq",
"flag_ar",
"flag_as",
"flag_at",
"flag_au",
"flag_aw",
"flag_ax",
"flag_az",
"flag_ba",
"flag_bb",
"flag_bd",
"flag_be",
"flag_bf",
"flag_bg",
"flag_bh",
"flag_bi",
"flag_bj",
"flag_bl",
"flag_black",
"flag_bm",
"flag_bn",
"flag_bo",
"flag_bq",
"flag_br",
"flag_bs",
"flag_bt",
"flag_bv",
"flag_bw",
"flag_by",
"flag_bz",
"flag_ca",
"flag_cc",
"flag_cd",
"flag_cf",
"flag_cg",
"flag_ch",
"flag_ci",
"flag_ck",
"flag_cl",
"flag_cm",
"flag_cn",
"flag_co",
"flag_cp",
"flag_cr",
"flag_cu",
"flag_cv",
"flag_cw",
"flag_cx",
"flag_cy",
"flag_cz",
"flag_de",
"flag_dg",
"flag_dj",
"flag_dk",
"flag_dm",
"flag_do",
"flag_dz",
"flag_ea",
"flag_ec",
"flag_ee",
"flag_eg",
"flag_eh",
"flag_er",
"flag_es",
"flag_et",
"flag_eu",
"flag_fi",
"flag_fj",
"flag_fk",
"flag_fm",
"flag_fo",
"flag_fr",
"flag_ga",
"flag_gb",
"flag_gd",
"flag_ge",
"flag_gf",
"flag_gg",
"flag_gh",
"flag_gi",
"flag_gl",
"flag_gm",
"flag_gn",
"flag_gp",
"flag_gq",
"flag_gr",
"flag_gs",
"flag_gt",
"flag_gu",
"flag_gw",
"flag_gy",
"flag_hk",
"flag_hm",
"flag_hn",
"flag_hr",
"flag_ht",
"flag_hu",
"flag_ic",
"flag_id",
"flag_ie",
"flag_il",
"flag_im",
"flag_in",
"flag_io",
"flag_iq",
"flag_ir",
"flag_is",
"flag_it",
"flag_je",
"flag_jm",
"flag_jo",
"flag_jp",
"flag_ke",
"flag_kg",
"flag_kh",
"flag_ki",
"flag_km",
"flag_kn",
"flag_kp",
"flag_kr",
"flag_kw",
"flag_ky",
"flag_kz",
"flag_la",
"flag_lb",
"flag_lc",
"flag_li",
"flag_lk",
"flag_lr",
"flag_ls",
"flag_lt",
"flag_lu",
"flag_lv",
"flag_ly",
"flag_ma",
"flag_mc",
"flag_md",
"flag_me",
"flag_mf",
"flag_mg",
"flag_mh",
"flag_mk",
"flag_ml",
"flag_mm",
"flag_mn",
"flag_mo",
"flag_mp",
"flag_mq",
"flag_mr",
"flag_ms",
"flag_mt",
"flag_mu",
"flag_mv",
"flag_mw",
"flag_mx",
"flag_my",
"flag_mz",
"flag_na",
"flag_nc",
"flag_ne",
"flag_nf",
"flag_ng",
"flag_ni",
"flag_nl",
"flag_no",
"flag_np",
"flag_nr",
"flag_nu",
"flag_nz",
"flag_om",
"flag_pa",
"flag_pe",
"flag_pf",
"flag_pg",
"flag_ph",
"flag_pk",
"flag_pl",
"flag_pm",
"flag_pn",
"flag_pr",
"flag_ps",
"flag_pt",
"flag_pw",
"flag_py",
"flag_qa",
"flag_re",
"flag_ro",
"flag_rs",
"flag_ru",
"flag_rw",
"flag_sa",
"flag_sb",
"flag_sc",
"flag_sd",
"flag_se",
"flag_sg",
"flag_sh",
"flag_si",
"flag_sj",
"flag_sk",
"flag_sl",
"flag_sm",
"flag_sn",
"flag_so",
"flag_sr",
"flag_ss",
"flag_st",
"flag_sv",
"flag_sx",
"flag_sy",
"flag_sz",
"flag_ta",
"flag_tc",
"flag_td",
"flag_tf",
"flag_tg",
"flag_th",
"flag_tj",
"flag_tk",
"flag_tl",
"flag_tm",
"flag_tn",
"flag_to",
"flag_tr",
"flag_tt",
"flag_tv",
"flag_tw",
"flag_tz",
"flag_ua",
"flag_ug",
"flag_um",
"flag_us",
"flag_uy",
"flag_uz",
"flag_va",
"flag_vc",
"flag_ve",
"flag_vg",
"flag_vi",
"flag_vn",
"flag_vu",
"flag_wf",
"flag_white",
"flag_ws",
"flag_xk",
"flag_ye",
"flag_yt",
"flag_za",
"flag_zm",
"flag_zw",
"flags",
"flame",
"flan",
"flashlight",
"fleur-de-lis",
"floppy_disk",
"flower_playing_cards",
"flushed",
"fm",
"fo",
"fog",
"foggy",
"football",
"footprints",
"fork_and_knife",
"fork_and_knife_with_plate",
"fork_knife_plate",
"fountain",
"four",
"four_leaf_clover",
"fr",
"frame_photo",
"frame_with_picture",
"free",
"fried_shrimp",
"fries",
"frog",
"frowning",
"frowning2",
"fuelpump",
"full_moon",
"full_moon_with_face",
"funeral_urn",
"ga",
"game_die",
"gb",
"gd",
"ge",
"gear",
"gem",
"gemini",
"gf",
"gg",
"gh",
"ghost",
"gi",
"gift",
"gift_heart",
"girl",
"girl_tone1",
"girl_tone2",
"girl_tone3",
"girl_tone4",
"girl_tone5",
"gl",
"globe_with_meridians",
"gm",
"gn",
"goat",
"golf",
"golfer",
"gp",
"gq",
"gr",
"grandma",
"grandma_tone1",
"grandma_tone2",
"grandma_tone3",
"grandma_tone4",
"grandma_tone5",
"grapes",
"green_apple",
"green_book",
"green_heart",
"grey_exclamation",
"grey_question",
"grimacing",
"grin",
"grinning",
"gs",
"gt",
"gu",
"<NAME>man",
"guardsman_tone1",
"guardsman_tone2",
"guardsman_tone3",
"guardsman_tone4",
"guardsman_tone5",
"guitar",
"gun",
"gw",
"gy",
"haircut",
"haircut_tone1",
"haircut_tone2",
"haircut_tone3",
"haircut_tone4",
"haircut_tone5",
"hamburger",
"hammer",
"hammer_and_pick",
"hammer_and_wrench",
"hammer_pick",
"hamster",
"hand_splayed",
"hand_splayed_tone1",
"hand_splayed_tone2",
"hand_splayed_tone3",
"hand_splayed_tone4",
"hand_splayed_tone5",
"handbag",
"hankey",
"hash",
"hatched_chick",
"hatching_chick",
"head_bandage",
"headphones",
"hear_no_evil",
"heart",
"heart_decoration",
"heart_exclamation",
"heart_eyes",
"heart_eyes_cat",
"heartbeat",
"heartpulse",
"hearts",
"heavy_check_mark",
"heavy_division_sign",
"heavy_dollar_sign",
"heavy_heart_exclamation_mark_ornament",
"heavy_minus_sign",
"heavy_multiplication_x",
"heavy_plus_sign",
"helicopter",
"helmet_with_cross",
"helmet_with_white_cross",
"herb",
"hib<NAME>",
"high_brightness",
"high_heel",
"hk",
"hm",
"hn",
"hockey",
"hole",
"homes",
"honey_pot",
"horse",
"horse_racing",
"horse_racing_tone1",
"horse_racing_tone2",
"horse_racing_tone3",
"horse_racing_tone4",
"horse_racing_tone5",
"hospital",
"hot_dog",
"hot_pepper",
"hotdog",
"hotel",
"hotsprings",
"hourglass",
"hourglass_flowing_sand",
"house",
"house_abandoned",
"house_buildings",
"house_with_garden",
"hr",
"ht",
"hu",
"hugging",
"hugging_face",
"hushed",
"ic",
"ice_cream",
"ice_skate",
"icecream",
"id",
"ideograph_advantage",
"ie",
"il",
"im",
"imp",
"in",
"inbox_tray",
"incoming_envelope",
"indonesia",
"information_desk_person",
"information_desk_person_tone1",
"information_desk_person_tone2",
"information_desk_person_tone3",
"information_desk_person_tone4",
"information_desk_person_tone5",
"information_source",
"innocent",
"interrobang",
"io",
"iphone",
"iq",
"ir",
"is",
"island",
"it",
"izakaya_lantern",
"jack_o_lantern",
"japan",
"japanese_castle",
"japanese_goblin",
"japanese_ogre",
"je",
"jeans",
"jm",
"jo",
"joy",
"joy_cat",
"joystick",
"jp",
"kaaba",
"ke",
"key",
"key2",
"keyboard",
"keycap_asterisk",
"kg",
"kh",
"ki",
"kimono",
"kiss",
"kiss_mm",
"kiss_ww",
"kissing",
"kissing_cat",
"kissing_closed_eyes",
"kissing_heart",
"kissing_smiling_eyes",
"km",
"kn",
"knife",
"koala",
"koko",
"kp",
"kr",
"kw",
"ky",
"kz",
"la",
"label",
"large_blue_circle",
"large_blue_diamond",
"large_orange_diamond",
"last_quarter_moon",
"last_quarter_moon_with_face",
"latin_cross",
"laughing",
"lb",
"lc",
"leaves",
"ledger",
"left_luggage",
"left_right_arrow",
"leftwards_arrow_with_hook",
"lemon",
"leo",
"leopard",
"level_slider",
"levitate",
"li",
"libra",
"lifter",
"lifter_tone1",
"lifter_tone2",
"lifter_tone3",
"lifter_tone4",
"lifter_tone5",
"light_rail",
"link",
"linked_paperclips",
"lion",
"lion_face",
"lips",
"lipstick",
"lk",
"lock",
"lock_with_ink_pen",
"lollipop",
"loop",
"loud_sound",
"loudspeaker",
"love_hotel",
"love_letter",
"low_brightness",
"lower_left_ballpoint_pen",
"lower_left_crayon",
"lower_left_fountain_pen",
"lower_left_paintbrush",
"lr",
"ls",
"lt",
"lu",
"lv",
"ly",
"m",
"ma",
"mag",
"mag_right",
"mahjong",
"mailbox",
"mailbox_closed",
"mailbox_with_mail",
"mailbox_with_no_mail",
"man",
"man_in_business_suit_levitating",
"man_tone1",
"man_tone2",
"man_tone3",
"man_tone4",
"man_tone5",
"man_with_gua_pi_mao",
"man_with_gua_pi_mao_tone1",
"man_with_gua_pi_mao_tone2",
"man_with_gua_pi_mao_tone3",
"man_with_gua_pi_mao_tone4",
"man_with_gua_pi_mao_tone5",
"man_with_turban",
"man_with_turban_tone1",
"man_with_turban_tone2",
"man_with_turban_tone3",
"man_with_turban_tone4",
"man_with_turban_tone5",
"mans_shoe",
"mantlepiece_clock",
"map",
"maple_leaf",
"mask",
"massage",
"massage_tone1",
"massage_tone2",
"massage_tone3",
"massage_tone4",
"massage_tone5",
"mc",
"md",
"me",
"meat_on_bone",
"medal",
"mega",
"melon",
"menorah",
"mens",
"metal",
"metal_tone1",
"metal_tone2",
"metal_tone3",
"metal_tone4",
"metal_tone5",
"metro",
"mf",
"mg",
"mh",
"microphone",
"microphone2",
"microscope",
"middle_finger",
"middle_finger_tone1",
"middle_finger_tone2",
"middle_finger_tone3",
"middle_finger_tone4",
"middle_finger_tone5",
"military_medal",
"milky_way",
"minibus",
"minidisc",
"mk",
"ml",
"mm",
"mn",
"mo",
"mobile_phone_off",
"money_mouth",
"money_mouth_face",
"money_with_wings",
"moneybag",
"monkey",
"monkey_face",
"monorail",
"mortar_board",
"mosque",
"motorboat",
"motorcycle",
"motorway",
"mount_fuji",
"mountain",
"mountain_bicyclist",
"mountain_bicyclist_tone1",
"mountain_bicyclist_tone2",
"mountain_bicyclist_tone3",
"mountain_bicyclist_tone4",
"mountain_bicyclist_tone5",
"mountain_cableway",
"mountain_railway",
"mountain_snow",
"mouse",
"mouse2",
"mouse_three_button",
"movie_camera",
"moyai",
"mp",
"mq",
"mr",
"ms",
"mt",
"mu",
"muscle",
"muscle_tone1",
"muscle_tone2",
"muscle_tone3",
"muscle_tone4",
"muscle_tone5",
"mushroom",
"musical_keyboard",
"musical_note",
"musical_score",
"mute",
"mv",
"mw",
"mx",
"my",
"mz",
"na",
"nail_care",
"nail_care_tone1",
"nail_care_tone2",
"nail_care_tone3",
"nail_care_tone4",
"nail_care_tone5",
"name_badge",
"national_park",
"nc",
"ne",
"necktie",
"negative_squared_cross_mark",
"nerd",
"nerd_face",
"neutral_face",
"new",
"new_moon",
"new_moon_with_face",
"newspaper",
"newspaper2",
"next_track",
"nf",
"ng",
"ni",
"nigeria",
"night_with_stars",
"nine",
"nl",
"no",
"no_bell",
"no_bicycles",
"no_entry",
"no_entry_sign",
"no_good",
"no_good_tone1",
"no_good_tone2",
"no_good_tone3",
"no_good_tone4",
"no_good_tone5",
"no_mobile_phones",
"no_mouth",
"no_pedestrians",
"no_smoking",
"non-potable_water",
"nose",
"nose_tone1",
"nose_tone2",
"nose_tone3",
"nose_tone4",
"nose_tone5",
"notebook",
"notebook_with_decorative_cover",
"notepad_spiral",
"notes",
"np",
"nr",
"nu",
"nut_and_bolt",
"nz",
"o",
"o2",
"ocean",
"octopus",
"oden",
"office",
"oil",
"oil_drum",
"ok",
"ok_hand",
"ok_hand_tone1",
"ok_hand_tone2",
"ok_hand_tone3",
"ok_hand_tone4",
"ok_hand_tone5",
"ok_woman",
"ok_woman_tone1",
"ok_woman_tone2",
"ok_woman_tone3",
"ok_woman_tone4",
"ok_woman_tone5",
"old_key",
"older_man",
"older_man_tone1",
"older_man_tone2",
"older_man_tone3",
"older_man_tone4",
"older_man_tone5",
"older_woman",
"older_woman_tone1",
"older_woman_tone2",
"older_woman_tone3",
"older_woman_tone4",
"older_woman_tone5",
"om",
"om_symbol",
"on",
"oncoming_automobile",
"oncoming_bus",
"oncoming_police_car",
"oncoming_taxi",
"one",
"open_file_folder",
"open_hands",
"open_hands_tone1",
"open_hands_tone2",
"open_hands_tone3",
"open_hands_tone4",
"open_hands_tone5",
"open_mouth",
"ophiuchus",
"orange_book",
"orthodox_cross",
"outbox_tray",
"ox",
"pa",
"package",
"page_facing_up",
"page_with_curl",
"pager",
"paintbrush",
"palm_tree",
"panda_face",
"paperclip",
"paperclips",
"park",
"parking",
"part_alternation_mark",
"partly_sunny",
"passenger_ship",
"passport_control",
"pause_button",
"paw_prints",
"pe",
"peace",
"peace_symbol",
"peach",
"pear",
"pen_ballpoint",
"pen_fountain",
"pencil",
"pencil2",
"penguin",
"pensive",
"performing_arts",
"persevere",
"person_frowning",
"person_frowning_tone1",
"person_frowning_tone2",
"person_frowning_tone3",
"person_frowning_tone4",
"person_frowning_tone5",
"person_with_ball",
"person_with_ball_tone1",
"person_with_ball_tone2",
"person_with_ball_tone3",
"person_with_ball_tone4",
"person_with_ball_tone5",
"person_with_blond_hair",
"person_with_blond_hair_tone1",
"person_with_blond_hair_tone2",
"person_with_blond_hair_tone3",
"person_with_blond_hair_tone4",
"person_with_blond_hair_tone5",
"person_with_pouting_face",
"person_with_pouting_face_tone1",
"person_with_pouting_face_tone2",
"person_with_pouting_face_tone3",
"person_with_pouting_face_tone4",
"person_with_pouting_face_tone5",
"pf",
"pg",
"ph",
"pick",
"pig",
"pig2",
"pig_nose",
"pill",
"pineapple",
"ping_pong",
"pisces",
"pizza",
"pk",
"pl",
"place_of_worship",
"play_pause",
"pm",
"pn",
"point_down",
"point_down_tone1",
"point_down_tone2",
"point_down_tone3",
"point_down_tone4",
"point_down_tone5",
"point_left",
"point_left_tone1",
"point_left_tone2",
"point_left_tone3",
"point_left_tone4",
"point_left_tone5",
"point_right",
"point_right_tone1",
"point_right_tone2",
"point_right_tone3",
"point_right_tone4",
"point_right_tone5",
"point_up",
"point_up_2",
"point_up_2_tone1",
"point_up_2_tone2",
"point_up_2_tone3",
"point_up_2_tone4",
"point_up_2_tone5",
"point_up_tone1",
"point_up_tone2",
"point_up_tone3",
"point_up_tone4",
"point_up_tone5",
"police_car",
"poo",
"poodle",
"poop",
"popcorn",
"post_office",
"postal_horn",
"postbox",
"potable_water",
"pouch",
"poultry_leg",
"pound",
"pouting_cat",
"pr",
"pray",
"pray_tone1",
"pray_tone2",
"pray_tone3",
"pray_tone4",
"pray_tone5",
"prayer_beads",
"previous_track",
"princess",
"princess_tone1",
"princess_tone2",
"princess_tone3",
"princess_tone4",
"princess_tone5",
"printer",
"projector",
"ps",
"pt",
"pudding",
"punch",
"punch_tone1",
"punch_tone2",
"punch_tone3",
"punch_tone4",
"punch_tone5",
"purple_heart",
"purse",
"pushpin",
"put_litter_in_its_place",
"pw",
"py",
"qa",
"question",
"rabbit",
"rabbit2",
"race_car",
"racehorse",
"racing_car",
"racing_motorcycle",
"radio",
"radio_button",
"radioactive",
"radioactive_sign",
"rage",
"railroad_track",
"railway_car",
"railway_track",
"rainbow",
"raised_hand",
"raised_hand_tone1",
"raised_hand_tone2",
"raised_hand_tone3",
"raised_hand_tone4",
"raised_hand_tone5",
"raised_hand_with_fingers_splayed",
"raised_hand_with_fingers_splayed_tone1",
"raised_hand_with_fingers_splayed_tone2",
"raised_hand_with_fingers_splayed_tone3",
"raised_hand_with_fingers_splayed_tone4",
"raised_hand_with_fingers_splayed_tone5",
"raised_hand_with_part_between_middle_and_ring_fingers",
"raised_hand_with_part_between_middle_and_ring_fingers_tone1",
"raised_hand_with_part_between_middle_and_ring_fingers_tone2",
"raised_hand_with_part_between_middle_and_ring_fingers_tone3",
"raised_hand_with_part_between_middle_and_ring_fingers_tone4",
"raised_hand_with_part_between_middle_and_ring_fingers_tone5",
"raised_hands",
"raised_hands_tone1",
"raised_hands_tone2",
"raised_hands_tone3",
"raised_hands_tone4",
"raised_hands_tone5",
"raising_hand",
"raising_hand_tone1",
"raising_hand_tone2",
"raising_hand_tone3",
"raising_hand_tone4",
"raising_hand_tone5",
"ram",
"ramen",
"rat",
"re",
"record_button",
"recycle",
"red_car",
"red_circle",
"registered",
"relaxed",
"relieved",
"reminder_ribbon",
"repeat",
"repeat_one",
"restroom",
"reversed_hand_with_middle_finger_extended",
"reversed_hand_with_middle_finger_extended_tone1",
"reversed_hand_with_middle_finger_extended_tone2",
"reversed_hand_with_middle_finger_extended_tone3",
"reversed_hand_with_middle_finger_extended_tone4",
"reversed_hand_with_middle_finger_extended_tone5",
"revolving_hearts",
"rewind",
"ribbon",
"rice",
"rice_ball",
"rice_cracker",
"rice_scene",
"right_anger_bubble",
"ring",
"ro",
"robot",
"robot_face",
"rocket",
"rolled_up_newspaper",
"roller_coaster",
"rolling_eyes",
"rooster",
"rose",
"rosette",
"rotating_light",
"round_pushpin",
"rowboat",
"rowboat_tone1",
"rowboat_tone2",
"rowboat_tone3",
"rowboat_tone4",
"rowboat_tone5",
"rs",
"ru",
"rugby_football",
"runner",
"runner_tone1",
"runner_tone2",
"runner_tone3",
"runner_tone4",
"runner_tone5",
"running_shirt_with_sash",
"rw",
"sa",
"sagittarius",
"sailboat",
"sake",
"sandal",
"santa",
"santa_tone1",
"santa_tone2",
"santa_tone3",
"santa_tone4",
"santa_tone5",
"satellite",
"satellite_orbital",
"satisfied",
"saudi",
"saudiarabia",
"saxophone",
"sb",
"sc",
"scales",
"school",
"school_satchel",
"scissors",
"scorpion",
"scorpius",
"scream",
"scream_cat",
"scroll",
"sd",
"se",
"seat",
"secret",
"see_no_evil",
"seedling",
"seven",
"sg",
"sh",
"shamrock",
"shaved_ice",
"sheep",
"shell",
"shield",
"shinto_shrine",
"ship",
"shirt",
"shit",
"shopping_bags",
"shower",
"si",
"sign_of_the_horns",
"sign_of_the_horns_tone1",
"sign_of_the_horns_tone2",
"sign_of_the_horns_tone3",
"sign_of_the_horns_tone4",
"sign_of_the_horns_tone5",
"signal_strength",
"six",
"six_pointed_star",
"sj",
"sk",
"skeleton",
"ski",
"skier",
"skull",
"skull_and_crossbones",
"skull_crossbones",
"sl",
"sleeping",
"sleeping_accommodation",
"sleepy",
"sleuth_or_spy",
"sleuth_or_spy_tone1",
"sleuth_or_spy_tone2",
"sleuth_or_spy_tone3",
"sleuth_or_spy_tone4",
"sleuth_or_spy_tone5",
"slight_frown",
"slight_smile",
"slightly_frowning_face",
"slightly_smiling_face",
"slot_machine",
"sm",
"small_airplane",
"small_blue_diamond",
"small_orange_diamond",
"small_red_triangle",
"small_red_triangle_down",
"smile",
"smile_cat",
"smiley",
"smiley_cat",
"smiling_imp",
"smirk",
"smirk_cat",
"smoking",
"sn",
"snail",
"snake",
"snow_capped_mountain",
"snowboarder",
"snowflake",
"snowman",
"snowman2",
"so",
"sob",
"soccer",
"soon",
"sos",
"sound",
"space_invader",
"spades",
"spaghetti",
"sparkle",
"sparkler",
"sparkles",
"sparkling_heart",
"speak_no_evil",
"speaker",
"speaking_head",
"speaking_head_in_silhouette",
"speech_balloon",
"speedboat",
"spider",
"spider_web",
"spiral_calendar_pad",
"spiral_note_pad",
"sports_medal",
"spy",
"spy_tone1",
"spy_tone2",
"spy_tone3",
"spy_tone4",
"spy_tone5",
"sr",
"ss",
"st",
"stadium",
"star",
"star2",
"star_and_crescent",
"star_of_david",
"stars",
"station",
"statue_of_liberty",
"steam_locomotive",
"stew",
"stop_button",
"stopwatch",
"straight_ruler",
"strawberry",
"stuck_out_tongue",
"stuck_out_tongue_closed_eyes",
"stuck_out_tongue_winking_eye",
"studio_microphone",
"sun_with_face",
"sunflower",
"sunglasses",
"sunny",
"sunrise",
"sunrise_over_mountains",
"surfer",
"surfer_tone1",
"surfer_tone2",
"surfer_tone3",
"surfer_tone4",
"surfer_tone5",
"sushi",
"suspension_railway",
"sv",
"sweat",
"sweat_drops",
"sweat_smile",
"sweet_potato",
"swimmer",
"swimmer_tone1",
"swimmer_tone2",
"swimmer_tone3",
"swimmer_tone4",
"swimmer_tone5",
"sx",
"sy",
"symbols",
"synagogue",
"syringe",
"sz",
"ta",
"table_tennis",
"taco",
"tada",
"tanabata_tree",
"tangerine",
"taurus",
"taxi",
"tc",
"td",
"tea",
"telephone",
"telephone_receiver",
"telescope",
"ten",
"tennis",
"tent",
"tf",
"tg",
"th",
"thermometer",
"thermometer_face",
"thinking",
"thinking_face",
"thought_balloon",
"three",
"three_button_mouse",
"thumbdown",
"thumbdown_tone1",
"thumbdown_tone2",
"thumbdown_tone3",
"thumbdown_tone4",
"thumbdown_tone5",
"thumbsdown",
"thumbsdown_tone1",
"thumbsdown_tone2",
"thumbsdown_tone3",
"thumbsdown_tone4",
"thumbsdown_tone5",
"thumbsup",
"thumbsup_tone1",
"thumbsup_tone2",
"thumbsup_tone3",
"thumbsup_tone4",
"thumbsup_tone5",
"thumbup",
"thumbup_tone1",
"thumbup_tone2",
"thumbup_tone3",
"thumbup_tone4",
"thumbup_tone5",
"thunder_cloud_and_rain",
"thunder_cloud_rain",
"ticket",
"tickets",
"tiger",
"tiger2",
"timer",
"timer_clock",
"tired_face",
"tj",
"tk",
"tl",
"tm",
"tn",
"to",
"toilet",
"tokyo_tower",
"tomato",
"tongue",
"tools",
"top",
"tophat",
"tr",
"track_next",
"track_previous",
"trackball",
"tractor",
"traffic_light",
"train",
"train2",
"tram",
"triangular_flag_on_post",
"triangular_ruler",
"trident",
"triumph",
"trolleybus",
"trophy",
"tropical_drink",
"tropical_fish",
"truck",
"trumpet",
"tt",
"tulip",
"turkey",
"turkmenistan",
"turtle",
"tuvalu",
"tv",
"tw",
"twisted_rightwards_arrows",
"two",
"two_hearts",
"two_men_holding_hands",
"two_women_holding_hands",
"tz",
"u5272",
"u5408",
"u55b6",
"u6307",
"u6708",
"u6709",
"u6e80",
"u7121",
"u7533",
"u7981",
"u7a7a",
"ua",
"ug",
"um",
"umbrella",
"umbrella2",
"umbrella_on_ground",
"unamused",
"underage",
"unicorn",
"unicorn_face",
"unlock",
"up",
"upside_down",
"upside_down_face",
"urn",
"us",
"uy",
"uz",
"v",
"v_tone1",
"v_tone2",
"v_tone3",
"v_tone4",
"v_tone5",
"va",
"vc",
"ve",
"vertical_traffic_light",
"vg",
"vhs",
"vi",
"vibration_mode",
"video_camera",
"video_game",
"violin",
"virgo",
"vn",
"volcano",
"volleyball",
"vs",
"vu",
"vulcan",
"vulcan_tone1",
"vulcan_tone2",
"vulcan_tone3",
"vulcan_tone4",
"vulcan_tone5",
"walking",
"walking_tone1",
"walking_tone2",
"walking_tone3",
"walking_tone4",
"walking_tone5",
"waning_crescent_moon",
"waning_gibbous_moon",
"warning",
"wastebasket",
"watch",
"water_buffalo",
"watermelon",
"wave",
"wave_tone1",
"wave_tone2",
"wave_tone3",
"wave_tone4",
"wave_tone5",
"waving_black_flag",
"waving_white_flag",
"wavy_dash",
"waxing_crescent_moon",
"waxing_gibbous_moon",
"wc",
"weary",
"wedding",
"weight_lifter",
"weight_lifter_tone1",
"weight_lifter_tone2",
"weight_lifter_tone3",
"weight_lifter_tone4",
"weight_lifter_tone5",
"wf",
"whale",
"whale2",
"wheel_of_dharma",
"wheelchair",
"white_check_mark",
"white_circle",
"white_flower",
"white_frowning_face",
"white_large_square",
"white_medium_small_square",
"white_medium_square",
"white_small_square",
"white_square_button",
"white_sun_behind_cloud",
"white_sun_behind_cloud_with_rain",
"white_sun_cloud",
"white_sun_rain_cloud",
"white_sun_small_cloud",
"white_sun_with_small_cloud",
"wind_blowing_face",
"wind_chime",
"wine_glass",
"wink",
"wolf",
"woman",
"woman_tone1",
"woman_tone2",
"woman_tone3",
"woman_tone4",
"woman_tone5",
"womans_clothes",
"womans_hat",
"womens",
"world_map",
"worried",
"worship_symbol",
"wrench",
"writing_hand",
"writing_hand_tone1",
"writing_hand_tone2",
"writing_hand_tone3",
"writing_hand_tone4",
"writing_hand_tone5",
"ws",
"x",
"xk",
"ye",
"yellow_heart",
"yen",
"yin_yang",
"yt",
"yum",
"za",
"zap",
"zero",
"zipper_mouth",
"zipper_mouth_face",
"zm",
"zw",
"zzz"
]
| true | ###
Emoji list
###
stModules.emojiList = [
"+1",
"+1_tone1",
"+1_tone2",
"+1_tone3",
"+1_tone4",
"+1_tone5",
"-1",
"-1_tone1",
"-1_tone2",
"-1_tone3",
"-1_tone4",
"-1_tone5",
"100",
"1234",
"8ball",
"a",
"ab",
"abc",
"abcd",
"ac",
"accept",
"ad",
"admission_tickets",
"ae",
"aerial_tramway",
"af",
"ag",
"ai",
"airplane",
"airplane_arriving",
"airplane_departure",
"airplane_small",
"al",
"alarm_clock",
"alembic",
"alien",
"am",
"ambulance",
"amphora",
"anchor",
"angel",
"angel_tone1",
"angel_tone2",
"angel_tone3",
"angel_tone4",
"angel_tone5",
"anger",
"anger_right",
"angry",
"anguished",
"ant",
"ao",
"apple",
"aq",
"aquarius",
"ar",
"archery",
"aries",
"arrow_backward",
"arrow_double_down",
"arrow_double_up",
"arrow_down",
"arrow_down_small",
"arrow_forward",
"arrow_heading_down",
"arrow_heading_up",
"arrow_left",
"arrow_lower_left",
"arrow_lower_right",
"arrow_right",
"arrow_right_hook",
"arrow_up",
"arrow_up_down",
"arrow_up_small",
"arrow_upper_left",
"arrow_upper_right",
"arrows_clockwise",
"arrows_counterclockwise",
"art",
"articulated_lorry",
"as",
"asterisk",
"astonished",
"at",
"athletic_shoe",
"atm",
"atom",
"atom_symbol",
"au",
"aw",
"ax",
"az",
"b",
"ba",
"baby",
"baby_bottle",
"baby_chick",
"baby_symbol",
"baby_tone1",
"baby_tone2",
"baby_tone3",
"baby_tone4",
"baby_tone5",
"back",
"badminton",
"baggage_claim",
"balloon",
"ballot_box",
"ballot_box_with_ballot",
"ballot_box_with_check",
"bamboo",
"banana",
"bangbang",
"bank",
"bar_chart",
"barber",
"baseball",
"basketball",
"basketball_player",
"basketball_player_tone1",
"basketball_player_tone2",
"basketball_player_tone3",
"basketball_player_tone4",
"basketball_player_tone5",
"bath",
"bath_tone1",
"bath_tone2",
"bath_tone3",
"bath_tone4",
"bath_tone5",
"bathtub",
"battery",
"bb",
"bd",
"be",
"beach",
"beach_umbrella",
"beach_with_umbrella",
"bear",
"bed",
"bee",
"beer",
"beers",
"beetle",
"beginner",
"bell",
"bellhop",
"bellhop_bell",
"bento",
"bf",
"bg",
"bh",
"bi",
"bicyclist",
"bicyclist_tone1",
"bicyclist_tone2",
"bicyclist_tone3",
"bicyclist_tone4",
"bicyclist_tone5",
"bike",
"bikini",
"biohazard",
"biohazard_sign",
"bird",
"birthday",
"bj",
"bl",
"black_circle",
"black_joker",
"black_large_square",
"black_medium_small_square",
"black_medium_square",
"black_nib",
"black_small_square",
"black_square_button",
"blossom",
"blowfish",
"blue_book",
"blue_car",
"blue_heart",
"blush",
"bm",
"bn",
"bo",
"boar",
"bomb",
"book",
"bookmark",
"bookmark_tabs",
"books",
"boom",
"boot",
"bottle_with_popping_cork",
"bouquet",
"bow",
"bow_and_arrow",
"bow_tone1",
"bow_tone2",
"bow_tone3",
"bow_tone4",
"bow_tone5",
"bowling",
"boy",
"boy_tone1",
"boy_tone2",
"boy_tone3",
"boy_tone4",
"boy_tone5",
"bq",
"br",
"bread",
"bride_with_veil",
"bride_with_veil_tone1",
"bride_with_veil_tone2",
"bride_with_veil_tone3",
"bride_with_veil_tone4",
"bride_with_veil_tone5",
"bridge_at_night",
"briefcase",
"broken_heart",
"bs",
"bt",
"bug",
"building_construction",
"bulb",
"bullettrain_front",
"bullettrain_side",
"burrito",
"bus",
"busstop",
"bust_in_silhouette",
"busts_in_silhouette",
"bv",
"bw",
"by",
"bz",
"ca",
"cactus",
"cake",
"calendar",
"calendar_spiral",
"calling",
"camel",
"camera",
"camera_with_flash",
"camping",
"cancer",
"candle",
"candy",
"capital_abcd",
"capricorn",
"card_box",
"card_file_box",
"card_index",
"card_index_dividers",
"carousel_horse",
"cat",
"cat2",
"cc",
"cd",
"cf",
"cg",
"ch",
"chains",
"champagne",
"chart",
"chart_with_downwards_trend",
"chart_with_upwards_trend",
"checkered_flag",
"cheese",
"cheese_wedge",
"cherries",
"cherry_blossom",
"chestnut",
"chicken",
"children_crossing",
"chile",
"chipmunk",
"chocolate_bar",
"christmas_tree",
"church",
"ci",
"cinema",
"circus_tent",
"city_dusk",
"city_sunrise",
"city_sunset",
"cityscape",
"ck",
"cl",
"clap",
"clap_tone1",
"clap_tone2",
"clap_tone3",
"clap_tone4",
"clap_tone5",
"clapper",
"classical_building",
"clipboard",
"clock",
"clock1",
"clock10",
"clock1030",
"clock11",
"clock1130",
"clock12",
"clock1230",
"clock130",
"clock2",
"clock230",
"clock3",
"clock330",
"clock4",
"clock430",
"clock5",
"clock530",
"clock6",
"clock630",
"clock7",
"clock730",
"clock8",
"clock830",
"clock9",
"clock930",
"closed_book",
"closed_lock_with_key",
"closed_umbrella",
"cloud",
"cloud_lightning",
"cloud_rain",
"cloud_snow",
"cloud_tornado",
"cloud_with_lightning",
"cloud_with_rain",
"cloud_with_snow",
"cloud_with_tornado",
"clubs",
"cm",
"cn",
"co",
"cocktail",
"coffee",
"coffin",
"cold_sweat",
"comet",
"compression",
"computer",
"confetti_ball",
"confounded",
"confused",
"congo",
"congratulations",
"construction",
"construction_site",
"construction_worker",
"construction_worker_tone1",
"construction_worker_tone2",
"construction_worker_tone3",
"construction_worker_tone4",
"construction_worker_tone5",
"control_knobs",
"convenience_store",
"cookie",
"cool",
"cop",
"cop_tone1",
"cop_tone2",
"cop_tone3",
"cop_tone4",
"cop_tone5",
"copyright",
"corn",
"couch",
"couch_and_lamp",
"couple",
"couple_mm",
"couple_with_heart",
"couple_with_heart_mm",
"couple_with_heart_ww",
"couple_ww",
"couplekiss",
"couplekiss_mm",
"couplekiss_ww",
"cow",
"cow2",
"cp",
"cr",
"crab",
"crayon",
"credit_card",
"crescent_moon",
"cricket",
"cricket_bat_ball",
"crocodile",
"cross",
"crossed_flags",
"crossed_swords",
"crown",
"cruise_ship",
"cry",
"crying_cat_face",
"crystal_ball",
"cu",
"cupid",
"curly_loop",
"currency_exchange",
"curry",
"custard",
"customs",
"cv",
"cw",
"cx",
"PI:NAME:<NAME>END_PI",
"cyclone",
"cz",
"PI:NAME:<NAME>END_PI",
"dagger_knife",
"PI:NAME:<NAME>END_PI",
"dancer_tone1",
"dancer_tone2",
"dancer_tone3",
"dancer_tone4",
"dancer_tone5",
"PI:NAME:<NAME>END_PI",
"PI:NAME:<NAME>END_PI",
"dark_sunglasses",
"dart",
"dash",
"date",
"de",
"deciduous_tree",
"department_store",
"derelict_house_building",
"desert",
"desert_island",
"desktop",
"desktop_computer",
"dg",
"diamond_shape_with_a_dot_inside",
"diamonds",
"disappointed",
"disappointed_relieved",
"dividers",
"dizzy",
"dizzy_face",
"dj",
"dk",
"dm",
"do",
"do_not_litter",
"dog",
"dog2",
"dollar",
"dolls",
"dolphin",
"door",
"double_vertical_bar",
"doughnut",
"dove",
"dove_of_peace",
"dragon",
"dragon_face",
"dress",
"dromedary_camel",
"droplet",
"dvd",
"dz",
"e-mail",
"ea",
"ear",
"ear_of_rice",
"ear_tone1",
"ear_tone2",
"ear_tone3",
"ear_tone4",
"ear_tone5",
"earth_africa",
"earth_americas",
"earth_asia",
"ec",
"ee",
"eg",
"egg",
"eggplant",
"eh",
"eight",
"eight_pointed_black_star",
"eight_spoked_asterisk",
"electric_plug",
"elephant",
"email",
"end",
"envelope",
"envelope_with_arrow",
"er",
"es",
"et",
"eu",
"euro",
"european_castle",
"european_post_office",
"evergreen_tree",
"exclamation",
"expressionless",
"eye",
"eye_in_speech_bubble",
"eyeglasses",
"eyes",
"face_with_head_bandage",
"face_with_rolling_eyes",
"face_with_thermometer",
"factory",
"fallen_leaf",
"family",
"family_mmb",
"family_mmbb",
"family_mmg",
"family_mmgb",
"family_mmgg",
"family_mwbb",
"family_mwg",
"family_mwgb",
"family_mwgg",
"family_wwb",
"family_wwbb",
"family_wwg",
"family_wwgb",
"family_wwgg",
"fast_forward",
"fax",
"fearful",
"feet",
"ferris_wheel",
"ferry",
"fi",
"field_hockey",
"file_cabinet",
"file_folder",
"film_frames",
"film_projector",
"fire",
"fire_engine",
"fireworks",
"first_quarter_moon",
"first_quarter_moon_with_face",
"fish",
"fish_cake",
"fishing_pole_and_fish",
"fist",
"fist_tone1",
"fist_tone2",
"fist_tone3",
"fist_tone4",
"fist_tone5",
"five",
"fj",
"fk",
"flag_ac",
"flag_ad",
"flag_ae",
"flag_af",
"flag_ag",
"flag_ai",
"flag_al",
"flag_am",
"flag_ao",
"flag_aq",
"flag_ar",
"flag_as",
"flag_at",
"flag_au",
"flag_aw",
"flag_ax",
"flag_az",
"flag_ba",
"flag_bb",
"flag_bd",
"flag_be",
"flag_bf",
"flag_bg",
"flag_bh",
"flag_bi",
"flag_bj",
"flag_bl",
"flag_black",
"flag_bm",
"flag_bn",
"flag_bo",
"flag_bq",
"flag_br",
"flag_bs",
"flag_bt",
"flag_bv",
"flag_bw",
"flag_by",
"flag_bz",
"flag_ca",
"flag_cc",
"flag_cd",
"flag_cf",
"flag_cg",
"flag_ch",
"flag_ci",
"flag_ck",
"flag_cl",
"flag_cm",
"flag_cn",
"flag_co",
"flag_cp",
"flag_cr",
"flag_cu",
"flag_cv",
"flag_cw",
"flag_cx",
"flag_cy",
"flag_cz",
"flag_de",
"flag_dg",
"flag_dj",
"flag_dk",
"flag_dm",
"flag_do",
"flag_dz",
"flag_ea",
"flag_ec",
"flag_ee",
"flag_eg",
"flag_eh",
"flag_er",
"flag_es",
"flag_et",
"flag_eu",
"flag_fi",
"flag_fj",
"flag_fk",
"flag_fm",
"flag_fo",
"flag_fr",
"flag_ga",
"flag_gb",
"flag_gd",
"flag_ge",
"flag_gf",
"flag_gg",
"flag_gh",
"flag_gi",
"flag_gl",
"flag_gm",
"flag_gn",
"flag_gp",
"flag_gq",
"flag_gr",
"flag_gs",
"flag_gt",
"flag_gu",
"flag_gw",
"flag_gy",
"flag_hk",
"flag_hm",
"flag_hn",
"flag_hr",
"flag_ht",
"flag_hu",
"flag_ic",
"flag_id",
"flag_ie",
"flag_il",
"flag_im",
"flag_in",
"flag_io",
"flag_iq",
"flag_ir",
"flag_is",
"flag_it",
"flag_je",
"flag_jm",
"flag_jo",
"flag_jp",
"flag_ke",
"flag_kg",
"flag_kh",
"flag_ki",
"flag_km",
"flag_kn",
"flag_kp",
"flag_kr",
"flag_kw",
"flag_ky",
"flag_kz",
"flag_la",
"flag_lb",
"flag_lc",
"flag_li",
"flag_lk",
"flag_lr",
"flag_ls",
"flag_lt",
"flag_lu",
"flag_lv",
"flag_ly",
"flag_ma",
"flag_mc",
"flag_md",
"flag_me",
"flag_mf",
"flag_mg",
"flag_mh",
"flag_mk",
"flag_ml",
"flag_mm",
"flag_mn",
"flag_mo",
"flag_mp",
"flag_mq",
"flag_mr",
"flag_ms",
"flag_mt",
"flag_mu",
"flag_mv",
"flag_mw",
"flag_mx",
"flag_my",
"flag_mz",
"flag_na",
"flag_nc",
"flag_ne",
"flag_nf",
"flag_ng",
"flag_ni",
"flag_nl",
"flag_no",
"flag_np",
"flag_nr",
"flag_nu",
"flag_nz",
"flag_om",
"flag_pa",
"flag_pe",
"flag_pf",
"flag_pg",
"flag_ph",
"flag_pk",
"flag_pl",
"flag_pm",
"flag_pn",
"flag_pr",
"flag_ps",
"flag_pt",
"flag_pw",
"flag_py",
"flag_qa",
"flag_re",
"flag_ro",
"flag_rs",
"flag_ru",
"flag_rw",
"flag_sa",
"flag_sb",
"flag_sc",
"flag_sd",
"flag_se",
"flag_sg",
"flag_sh",
"flag_si",
"flag_sj",
"flag_sk",
"flag_sl",
"flag_sm",
"flag_sn",
"flag_so",
"flag_sr",
"flag_ss",
"flag_st",
"flag_sv",
"flag_sx",
"flag_sy",
"flag_sz",
"flag_ta",
"flag_tc",
"flag_td",
"flag_tf",
"flag_tg",
"flag_th",
"flag_tj",
"flag_tk",
"flag_tl",
"flag_tm",
"flag_tn",
"flag_to",
"flag_tr",
"flag_tt",
"flag_tv",
"flag_tw",
"flag_tz",
"flag_ua",
"flag_ug",
"flag_um",
"flag_us",
"flag_uy",
"flag_uz",
"flag_va",
"flag_vc",
"flag_ve",
"flag_vg",
"flag_vi",
"flag_vn",
"flag_vu",
"flag_wf",
"flag_white",
"flag_ws",
"flag_xk",
"flag_ye",
"flag_yt",
"flag_za",
"flag_zm",
"flag_zw",
"flags",
"flame",
"flan",
"flashlight",
"fleur-de-lis",
"floppy_disk",
"flower_playing_cards",
"flushed",
"fm",
"fo",
"fog",
"foggy",
"football",
"footprints",
"fork_and_knife",
"fork_and_knife_with_plate",
"fork_knife_plate",
"fountain",
"four",
"four_leaf_clover",
"fr",
"frame_photo",
"frame_with_picture",
"free",
"fried_shrimp",
"fries",
"frog",
"frowning",
"frowning2",
"fuelpump",
"full_moon",
"full_moon_with_face",
"funeral_urn",
"ga",
"game_die",
"gb",
"gd",
"ge",
"gear",
"gem",
"gemini",
"gf",
"gg",
"gh",
"ghost",
"gi",
"gift",
"gift_heart",
"girl",
"girl_tone1",
"girl_tone2",
"girl_tone3",
"girl_tone4",
"girl_tone5",
"gl",
"globe_with_meridians",
"gm",
"gn",
"goat",
"golf",
"golfer",
"gp",
"gq",
"gr",
"grandma",
"grandma_tone1",
"grandma_tone2",
"grandma_tone3",
"grandma_tone4",
"grandma_tone5",
"grapes",
"green_apple",
"green_book",
"green_heart",
"grey_exclamation",
"grey_question",
"grimacing",
"grin",
"grinning",
"gs",
"gt",
"gu",
"PI:NAME:<NAME>END_PIman",
"guardsman_tone1",
"guardsman_tone2",
"guardsman_tone3",
"guardsman_tone4",
"guardsman_tone5",
"guitar",
"gun",
"gw",
"gy",
"haircut",
"haircut_tone1",
"haircut_tone2",
"haircut_tone3",
"haircut_tone4",
"haircut_tone5",
"hamburger",
"hammer",
"hammer_and_pick",
"hammer_and_wrench",
"hammer_pick",
"hamster",
"hand_splayed",
"hand_splayed_tone1",
"hand_splayed_tone2",
"hand_splayed_tone3",
"hand_splayed_tone4",
"hand_splayed_tone5",
"handbag",
"hankey",
"hash",
"hatched_chick",
"hatching_chick",
"head_bandage",
"headphones",
"hear_no_evil",
"heart",
"heart_decoration",
"heart_exclamation",
"heart_eyes",
"heart_eyes_cat",
"heartbeat",
"heartpulse",
"hearts",
"heavy_check_mark",
"heavy_division_sign",
"heavy_dollar_sign",
"heavy_heart_exclamation_mark_ornament",
"heavy_minus_sign",
"heavy_multiplication_x",
"heavy_plus_sign",
"helicopter",
"helmet_with_cross",
"helmet_with_white_cross",
"herb",
"hibPI:NAME:<NAME>END_PI",
"high_brightness",
"high_heel",
"hk",
"hm",
"hn",
"hockey",
"hole",
"homes",
"honey_pot",
"horse",
"horse_racing",
"horse_racing_tone1",
"horse_racing_tone2",
"horse_racing_tone3",
"horse_racing_tone4",
"horse_racing_tone5",
"hospital",
"hot_dog",
"hot_pepper",
"hotdog",
"hotel",
"hotsprings",
"hourglass",
"hourglass_flowing_sand",
"house",
"house_abandoned",
"house_buildings",
"house_with_garden",
"hr",
"ht",
"hu",
"hugging",
"hugging_face",
"hushed",
"ic",
"ice_cream",
"ice_skate",
"icecream",
"id",
"ideograph_advantage",
"ie",
"il",
"im",
"imp",
"in",
"inbox_tray",
"incoming_envelope",
"indonesia",
"information_desk_person",
"information_desk_person_tone1",
"information_desk_person_tone2",
"information_desk_person_tone3",
"information_desk_person_tone4",
"information_desk_person_tone5",
"information_source",
"innocent",
"interrobang",
"io",
"iphone",
"iq",
"ir",
"is",
"island",
"it",
"izakaya_lantern",
"jack_o_lantern",
"japan",
"japanese_castle",
"japanese_goblin",
"japanese_ogre",
"je",
"jeans",
"jm",
"jo",
"joy",
"joy_cat",
"joystick",
"jp",
"kaaba",
"ke",
"key",
"key2",
"keyboard",
"keycap_asterisk",
"kg",
"kh",
"ki",
"kimono",
"kiss",
"kiss_mm",
"kiss_ww",
"kissing",
"kissing_cat",
"kissing_closed_eyes",
"kissing_heart",
"kissing_smiling_eyes",
"km",
"kn",
"knife",
"koala",
"koko",
"kp",
"kr",
"kw",
"ky",
"kz",
"la",
"label",
"large_blue_circle",
"large_blue_diamond",
"large_orange_diamond",
"last_quarter_moon",
"last_quarter_moon_with_face",
"latin_cross",
"laughing",
"lb",
"lc",
"leaves",
"ledger",
"left_luggage",
"left_right_arrow",
"leftwards_arrow_with_hook",
"lemon",
"leo",
"leopard",
"level_slider",
"levitate",
"li",
"libra",
"lifter",
"lifter_tone1",
"lifter_tone2",
"lifter_tone3",
"lifter_tone4",
"lifter_tone5",
"light_rail",
"link",
"linked_paperclips",
"lion",
"lion_face",
"lips",
"lipstick",
"lk",
"lock",
"lock_with_ink_pen",
"lollipop",
"loop",
"loud_sound",
"loudspeaker",
"love_hotel",
"love_letter",
"low_brightness",
"lower_left_ballpoint_pen",
"lower_left_crayon",
"lower_left_fountain_pen",
"lower_left_paintbrush",
"lr",
"ls",
"lt",
"lu",
"lv",
"ly",
"m",
"ma",
"mag",
"mag_right",
"mahjong",
"mailbox",
"mailbox_closed",
"mailbox_with_mail",
"mailbox_with_no_mail",
"man",
"man_in_business_suit_levitating",
"man_tone1",
"man_tone2",
"man_tone3",
"man_tone4",
"man_tone5",
"man_with_gua_pi_mao",
"man_with_gua_pi_mao_tone1",
"man_with_gua_pi_mao_tone2",
"man_with_gua_pi_mao_tone3",
"man_with_gua_pi_mao_tone4",
"man_with_gua_pi_mao_tone5",
"man_with_turban",
"man_with_turban_tone1",
"man_with_turban_tone2",
"man_with_turban_tone3",
"man_with_turban_tone4",
"man_with_turban_tone5",
"mans_shoe",
"mantlepiece_clock",
"map",
"maple_leaf",
"mask",
"massage",
"massage_tone1",
"massage_tone2",
"massage_tone3",
"massage_tone4",
"massage_tone5",
"mc",
"md",
"me",
"meat_on_bone",
"medal",
"mega",
"melon",
"menorah",
"mens",
"metal",
"metal_tone1",
"metal_tone2",
"metal_tone3",
"metal_tone4",
"metal_tone5",
"metro",
"mf",
"mg",
"mh",
"microphone",
"microphone2",
"microscope",
"middle_finger",
"middle_finger_tone1",
"middle_finger_tone2",
"middle_finger_tone3",
"middle_finger_tone4",
"middle_finger_tone5",
"military_medal",
"milky_way",
"minibus",
"minidisc",
"mk",
"ml",
"mm",
"mn",
"mo",
"mobile_phone_off",
"money_mouth",
"money_mouth_face",
"money_with_wings",
"moneybag",
"monkey",
"monkey_face",
"monorail",
"mortar_board",
"mosque",
"motorboat",
"motorcycle",
"motorway",
"mount_fuji",
"mountain",
"mountain_bicyclist",
"mountain_bicyclist_tone1",
"mountain_bicyclist_tone2",
"mountain_bicyclist_tone3",
"mountain_bicyclist_tone4",
"mountain_bicyclist_tone5",
"mountain_cableway",
"mountain_railway",
"mountain_snow",
"mouse",
"mouse2",
"mouse_three_button",
"movie_camera",
"moyai",
"mp",
"mq",
"mr",
"ms",
"mt",
"mu",
"muscle",
"muscle_tone1",
"muscle_tone2",
"muscle_tone3",
"muscle_tone4",
"muscle_tone5",
"mushroom",
"musical_keyboard",
"musical_note",
"musical_score",
"mute",
"mv",
"mw",
"mx",
"my",
"mz",
"na",
"nail_care",
"nail_care_tone1",
"nail_care_tone2",
"nail_care_tone3",
"nail_care_tone4",
"nail_care_tone5",
"name_badge",
"national_park",
"nc",
"ne",
"necktie",
"negative_squared_cross_mark",
"nerd",
"nerd_face",
"neutral_face",
"new",
"new_moon",
"new_moon_with_face",
"newspaper",
"newspaper2",
"next_track",
"nf",
"ng",
"ni",
"nigeria",
"night_with_stars",
"nine",
"nl",
"no",
"no_bell",
"no_bicycles",
"no_entry",
"no_entry_sign",
"no_good",
"no_good_tone1",
"no_good_tone2",
"no_good_tone3",
"no_good_tone4",
"no_good_tone5",
"no_mobile_phones",
"no_mouth",
"no_pedestrians",
"no_smoking",
"non-potable_water",
"nose",
"nose_tone1",
"nose_tone2",
"nose_tone3",
"nose_tone4",
"nose_tone5",
"notebook",
"notebook_with_decorative_cover",
"notepad_spiral",
"notes",
"np",
"nr",
"nu",
"nut_and_bolt",
"nz",
"o",
"o2",
"ocean",
"octopus",
"oden",
"office",
"oil",
"oil_drum",
"ok",
"ok_hand",
"ok_hand_tone1",
"ok_hand_tone2",
"ok_hand_tone3",
"ok_hand_tone4",
"ok_hand_tone5",
"ok_woman",
"ok_woman_tone1",
"ok_woman_tone2",
"ok_woman_tone3",
"ok_woman_tone4",
"ok_woman_tone5",
"old_key",
"older_man",
"older_man_tone1",
"older_man_tone2",
"older_man_tone3",
"older_man_tone4",
"older_man_tone5",
"older_woman",
"older_woman_tone1",
"older_woman_tone2",
"older_woman_tone3",
"older_woman_tone4",
"older_woman_tone5",
"om",
"om_symbol",
"on",
"oncoming_automobile",
"oncoming_bus",
"oncoming_police_car",
"oncoming_taxi",
"one",
"open_file_folder",
"open_hands",
"open_hands_tone1",
"open_hands_tone2",
"open_hands_tone3",
"open_hands_tone4",
"open_hands_tone5",
"open_mouth",
"ophiuchus",
"orange_book",
"orthodox_cross",
"outbox_tray",
"ox",
"pa",
"package",
"page_facing_up",
"page_with_curl",
"pager",
"paintbrush",
"palm_tree",
"panda_face",
"paperclip",
"paperclips",
"park",
"parking",
"part_alternation_mark",
"partly_sunny",
"passenger_ship",
"passport_control",
"pause_button",
"paw_prints",
"pe",
"peace",
"peace_symbol",
"peach",
"pear",
"pen_ballpoint",
"pen_fountain",
"pencil",
"pencil2",
"penguin",
"pensive",
"performing_arts",
"persevere",
"person_frowning",
"person_frowning_tone1",
"person_frowning_tone2",
"person_frowning_tone3",
"person_frowning_tone4",
"person_frowning_tone5",
"person_with_ball",
"person_with_ball_tone1",
"person_with_ball_tone2",
"person_with_ball_tone3",
"person_with_ball_tone4",
"person_with_ball_tone5",
"person_with_blond_hair",
"person_with_blond_hair_tone1",
"person_with_blond_hair_tone2",
"person_with_blond_hair_tone3",
"person_with_blond_hair_tone4",
"person_with_blond_hair_tone5",
"person_with_pouting_face",
"person_with_pouting_face_tone1",
"person_with_pouting_face_tone2",
"person_with_pouting_face_tone3",
"person_with_pouting_face_tone4",
"person_with_pouting_face_tone5",
"pf",
"pg",
"ph",
"pick",
"pig",
"pig2",
"pig_nose",
"pill",
"pineapple",
"ping_pong",
"pisces",
"pizza",
"pk",
"pl",
"place_of_worship",
"play_pause",
"pm",
"pn",
"point_down",
"point_down_tone1",
"point_down_tone2",
"point_down_tone3",
"point_down_tone4",
"point_down_tone5",
"point_left",
"point_left_tone1",
"point_left_tone2",
"point_left_tone3",
"point_left_tone4",
"point_left_tone5",
"point_right",
"point_right_tone1",
"point_right_tone2",
"point_right_tone3",
"point_right_tone4",
"point_right_tone5",
"point_up",
"point_up_2",
"point_up_2_tone1",
"point_up_2_tone2",
"point_up_2_tone3",
"point_up_2_tone4",
"point_up_2_tone5",
"point_up_tone1",
"point_up_tone2",
"point_up_tone3",
"point_up_tone4",
"point_up_tone5",
"police_car",
"poo",
"poodle",
"poop",
"popcorn",
"post_office",
"postal_horn",
"postbox",
"potable_water",
"pouch",
"poultry_leg",
"pound",
"pouting_cat",
"pr",
"pray",
"pray_tone1",
"pray_tone2",
"pray_tone3",
"pray_tone4",
"pray_tone5",
"prayer_beads",
"previous_track",
"princess",
"princess_tone1",
"princess_tone2",
"princess_tone3",
"princess_tone4",
"princess_tone5",
"printer",
"projector",
"ps",
"pt",
"pudding",
"punch",
"punch_tone1",
"punch_tone2",
"punch_tone3",
"punch_tone4",
"punch_tone5",
"purple_heart",
"purse",
"pushpin",
"put_litter_in_its_place",
"pw",
"py",
"qa",
"question",
"rabbit",
"rabbit2",
"race_car",
"racehorse",
"racing_car",
"racing_motorcycle",
"radio",
"radio_button",
"radioactive",
"radioactive_sign",
"rage",
"railroad_track",
"railway_car",
"railway_track",
"rainbow",
"raised_hand",
"raised_hand_tone1",
"raised_hand_tone2",
"raised_hand_tone3",
"raised_hand_tone4",
"raised_hand_tone5",
"raised_hand_with_fingers_splayed",
"raised_hand_with_fingers_splayed_tone1",
"raised_hand_with_fingers_splayed_tone2",
"raised_hand_with_fingers_splayed_tone3",
"raised_hand_with_fingers_splayed_tone4",
"raised_hand_with_fingers_splayed_tone5",
"raised_hand_with_part_between_middle_and_ring_fingers",
"raised_hand_with_part_between_middle_and_ring_fingers_tone1",
"raised_hand_with_part_between_middle_and_ring_fingers_tone2",
"raised_hand_with_part_between_middle_and_ring_fingers_tone3",
"raised_hand_with_part_between_middle_and_ring_fingers_tone4",
"raised_hand_with_part_between_middle_and_ring_fingers_tone5",
"raised_hands",
"raised_hands_tone1",
"raised_hands_tone2",
"raised_hands_tone3",
"raised_hands_tone4",
"raised_hands_tone5",
"raising_hand",
"raising_hand_tone1",
"raising_hand_tone2",
"raising_hand_tone3",
"raising_hand_tone4",
"raising_hand_tone5",
"ram",
"ramen",
"rat",
"re",
"record_button",
"recycle",
"red_car",
"red_circle",
"registered",
"relaxed",
"relieved",
"reminder_ribbon",
"repeat",
"repeat_one",
"restroom",
"reversed_hand_with_middle_finger_extended",
"reversed_hand_with_middle_finger_extended_tone1",
"reversed_hand_with_middle_finger_extended_tone2",
"reversed_hand_with_middle_finger_extended_tone3",
"reversed_hand_with_middle_finger_extended_tone4",
"reversed_hand_with_middle_finger_extended_tone5",
"revolving_hearts",
"rewind",
"ribbon",
"rice",
"rice_ball",
"rice_cracker",
"rice_scene",
"right_anger_bubble",
"ring",
"ro",
"robot",
"robot_face",
"rocket",
"rolled_up_newspaper",
"roller_coaster",
"rolling_eyes",
"rooster",
"rose",
"rosette",
"rotating_light",
"round_pushpin",
"rowboat",
"rowboat_tone1",
"rowboat_tone2",
"rowboat_tone3",
"rowboat_tone4",
"rowboat_tone5",
"rs",
"ru",
"rugby_football",
"runner",
"runner_tone1",
"runner_tone2",
"runner_tone3",
"runner_tone4",
"runner_tone5",
"running_shirt_with_sash",
"rw",
"sa",
"sagittarius",
"sailboat",
"sake",
"sandal",
"santa",
"santa_tone1",
"santa_tone2",
"santa_tone3",
"santa_tone4",
"santa_tone5",
"satellite",
"satellite_orbital",
"satisfied",
"saudi",
"saudiarabia",
"saxophone",
"sb",
"sc",
"scales",
"school",
"school_satchel",
"scissors",
"scorpion",
"scorpius",
"scream",
"scream_cat",
"scroll",
"sd",
"se",
"seat",
"secret",
"see_no_evil",
"seedling",
"seven",
"sg",
"sh",
"shamrock",
"shaved_ice",
"sheep",
"shell",
"shield",
"shinto_shrine",
"ship",
"shirt",
"shit",
"shopping_bags",
"shower",
"si",
"sign_of_the_horns",
"sign_of_the_horns_tone1",
"sign_of_the_horns_tone2",
"sign_of_the_horns_tone3",
"sign_of_the_horns_tone4",
"sign_of_the_horns_tone5",
"signal_strength",
"six",
"six_pointed_star",
"sj",
"sk",
"skeleton",
"ski",
"skier",
"skull",
"skull_and_crossbones",
"skull_crossbones",
"sl",
"sleeping",
"sleeping_accommodation",
"sleepy",
"sleuth_or_spy",
"sleuth_or_spy_tone1",
"sleuth_or_spy_tone2",
"sleuth_or_spy_tone3",
"sleuth_or_spy_tone4",
"sleuth_or_spy_tone5",
"slight_frown",
"slight_smile",
"slightly_frowning_face",
"slightly_smiling_face",
"slot_machine",
"sm",
"small_airplane",
"small_blue_diamond",
"small_orange_diamond",
"small_red_triangle",
"small_red_triangle_down",
"smile",
"smile_cat",
"smiley",
"smiley_cat",
"smiling_imp",
"smirk",
"smirk_cat",
"smoking",
"sn",
"snail",
"snake",
"snow_capped_mountain",
"snowboarder",
"snowflake",
"snowman",
"snowman2",
"so",
"sob",
"soccer",
"soon",
"sos",
"sound",
"space_invader",
"spades",
"spaghetti",
"sparkle",
"sparkler",
"sparkles",
"sparkling_heart",
"speak_no_evil",
"speaker",
"speaking_head",
"speaking_head_in_silhouette",
"speech_balloon",
"speedboat",
"spider",
"spider_web",
"spiral_calendar_pad",
"spiral_note_pad",
"sports_medal",
"spy",
"spy_tone1",
"spy_tone2",
"spy_tone3",
"spy_tone4",
"spy_tone5",
"sr",
"ss",
"st",
"stadium",
"star",
"star2",
"star_and_crescent",
"star_of_david",
"stars",
"station",
"statue_of_liberty",
"steam_locomotive",
"stew",
"stop_button",
"stopwatch",
"straight_ruler",
"strawberry",
"stuck_out_tongue",
"stuck_out_tongue_closed_eyes",
"stuck_out_tongue_winking_eye",
"studio_microphone",
"sun_with_face",
"sunflower",
"sunglasses",
"sunny",
"sunrise",
"sunrise_over_mountains",
"surfer",
"surfer_tone1",
"surfer_tone2",
"surfer_tone3",
"surfer_tone4",
"surfer_tone5",
"sushi",
"suspension_railway",
"sv",
"sweat",
"sweat_drops",
"sweat_smile",
"sweet_potato",
"swimmer",
"swimmer_tone1",
"swimmer_tone2",
"swimmer_tone3",
"swimmer_tone4",
"swimmer_tone5",
"sx",
"sy",
"symbols",
"synagogue",
"syringe",
"sz",
"ta",
"table_tennis",
"taco",
"tada",
"tanabata_tree",
"tangerine",
"taurus",
"taxi",
"tc",
"td",
"tea",
"telephone",
"telephone_receiver",
"telescope",
"ten",
"tennis",
"tent",
"tf",
"tg",
"th",
"thermometer",
"thermometer_face",
"thinking",
"thinking_face",
"thought_balloon",
"three",
"three_button_mouse",
"thumbdown",
"thumbdown_tone1",
"thumbdown_tone2",
"thumbdown_tone3",
"thumbdown_tone4",
"thumbdown_tone5",
"thumbsdown",
"thumbsdown_tone1",
"thumbsdown_tone2",
"thumbsdown_tone3",
"thumbsdown_tone4",
"thumbsdown_tone5",
"thumbsup",
"thumbsup_tone1",
"thumbsup_tone2",
"thumbsup_tone3",
"thumbsup_tone4",
"thumbsup_tone5",
"thumbup",
"thumbup_tone1",
"thumbup_tone2",
"thumbup_tone3",
"thumbup_tone4",
"thumbup_tone5",
"thunder_cloud_and_rain",
"thunder_cloud_rain",
"ticket",
"tickets",
"tiger",
"tiger2",
"timer",
"timer_clock",
"tired_face",
"tj",
"tk",
"tl",
"tm",
"tn",
"to",
"toilet",
"tokyo_tower",
"tomato",
"tongue",
"tools",
"top",
"tophat",
"tr",
"track_next",
"track_previous",
"trackball",
"tractor",
"traffic_light",
"train",
"train2",
"tram",
"triangular_flag_on_post",
"triangular_ruler",
"trident",
"triumph",
"trolleybus",
"trophy",
"tropical_drink",
"tropical_fish",
"truck",
"trumpet",
"tt",
"tulip",
"turkey",
"turkmenistan",
"turtle",
"tuvalu",
"tv",
"tw",
"twisted_rightwards_arrows",
"two",
"two_hearts",
"two_men_holding_hands",
"two_women_holding_hands",
"tz",
"u5272",
"u5408",
"u55b6",
"u6307",
"u6708",
"u6709",
"u6e80",
"u7121",
"u7533",
"u7981",
"u7a7a",
"ua",
"ug",
"um",
"umbrella",
"umbrella2",
"umbrella_on_ground",
"unamused",
"underage",
"unicorn",
"unicorn_face",
"unlock",
"up",
"upside_down",
"upside_down_face",
"urn",
"us",
"uy",
"uz",
"v",
"v_tone1",
"v_tone2",
"v_tone3",
"v_tone4",
"v_tone5",
"va",
"vc",
"ve",
"vertical_traffic_light",
"vg",
"vhs",
"vi",
"vibration_mode",
"video_camera",
"video_game",
"violin",
"virgo",
"vn",
"volcano",
"volleyball",
"vs",
"vu",
"vulcan",
"vulcan_tone1",
"vulcan_tone2",
"vulcan_tone3",
"vulcan_tone4",
"vulcan_tone5",
"walking",
"walking_tone1",
"walking_tone2",
"walking_tone3",
"walking_tone4",
"walking_tone5",
"waning_crescent_moon",
"waning_gibbous_moon",
"warning",
"wastebasket",
"watch",
"water_buffalo",
"watermelon",
"wave",
"wave_tone1",
"wave_tone2",
"wave_tone3",
"wave_tone4",
"wave_tone5",
"waving_black_flag",
"waving_white_flag",
"wavy_dash",
"waxing_crescent_moon",
"waxing_gibbous_moon",
"wc",
"weary",
"wedding",
"weight_lifter",
"weight_lifter_tone1",
"weight_lifter_tone2",
"weight_lifter_tone3",
"weight_lifter_tone4",
"weight_lifter_tone5",
"wf",
"whale",
"whale2",
"wheel_of_dharma",
"wheelchair",
"white_check_mark",
"white_circle",
"white_flower",
"white_frowning_face",
"white_large_square",
"white_medium_small_square",
"white_medium_square",
"white_small_square",
"white_square_button",
"white_sun_behind_cloud",
"white_sun_behind_cloud_with_rain",
"white_sun_cloud",
"white_sun_rain_cloud",
"white_sun_small_cloud",
"white_sun_with_small_cloud",
"wind_blowing_face",
"wind_chime",
"wine_glass",
"wink",
"wolf",
"woman",
"woman_tone1",
"woman_tone2",
"woman_tone3",
"woman_tone4",
"woman_tone5",
"womans_clothes",
"womans_hat",
"womens",
"world_map",
"worried",
"worship_symbol",
"wrench",
"writing_hand",
"writing_hand_tone1",
"writing_hand_tone2",
"writing_hand_tone3",
"writing_hand_tone4",
"writing_hand_tone5",
"ws",
"x",
"xk",
"ye",
"yellow_heart",
"yen",
"yin_yang",
"yt",
"yum",
"za",
"zap",
"zero",
"zipper_mouth",
"zipper_mouth_face",
"zm",
"zw",
"zzz"
]
|
[
{
"context": " ( done ) ->\n AgentModel.create(\n email: 'test+persona+graph+connection+routes@joukou.com'\n name: 'test/persona/graph/connection/route",
"end": 1179,
"score": 0.9997261762619019,
"start": 1132,
"tag": "EMAIL",
"value": "test+persona+graph+connection+routes@jouko... | test/persona/graph/connection/routes.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
assert = require( 'assert' )
chai = require( 'chai' )
should = chai.should()
chai.use( require( 'chai-http' ) )
async = require( 'async' )
server = require( '../../../../dist/server' )
riakpbc = require( '../../../../dist/riak/pbc' )
AgentModel = require( '../../../../dist/agent/Model' )
GraphModel = require( '../../../../dist/persona/graph/Model' )
PersonaModel = require( '../../../../dist/persona/Model' )
describe 'persona/graph/connection/routes', ->
agentKey = null
personaKey = null
before ( done ) ->
AgentModel.create(
email: 'test+persona+graph+connection+routes@joukou.com'
name: 'test/persona/graph/connection/routes'
password: 'password'
).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
agentKey = agent.getKey()
PersonaModel.create(
name: 'test/persona/graph/connection/routes'
agents: [
{
key: agentKey
role: 'creator'
}
]
)
)
.then( ( persona ) ->
persona.save()
)
.then( ( persona ) ->
personaKey = persona.getKey()
done()
)
.fail( ( err ) -> done( err ) )
after ( done ) ->
async.parallel([
( next ) ->
riakpbc.del(
type: 'agent'
bucket: 'agent'
key: agentKey
, ( err, reply ) -> next( err ) )
( next ) ->
riakpbc.del(
type: 'persona'
bucket: 'persona'
key: personaKey
, ( err, reply ) -> next( err ) )
], ( err ) -> done( err ) ) | 84535 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
assert = require( 'assert' )
chai = require( 'chai' )
should = chai.should()
chai.use( require( 'chai-http' ) )
async = require( 'async' )
server = require( '../../../../dist/server' )
riakpbc = require( '../../../../dist/riak/pbc' )
AgentModel = require( '../../../../dist/agent/Model' )
GraphModel = require( '../../../../dist/persona/graph/Model' )
PersonaModel = require( '../../../../dist/persona/Model' )
describe 'persona/graph/connection/routes', ->
agentKey = null
personaKey = null
before ( done ) ->
AgentModel.create(
email: '<EMAIL>'
name: 'test/persona/graph/connection/routes'
password: '<PASSWORD>'
).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
agentKey = agent.getKey()
PersonaModel.create(
name: 'test/persona/graph/connection/routes'
agents: [
{
key: agentKey
role: 'creator'
}
]
)
)
.then( ( persona ) ->
persona.save()
)
.then( ( persona ) ->
personaKey = persona.getKey()
done()
)
.fail( ( err ) -> done( err ) )
after ( done ) ->
async.parallel([
( next ) ->
riakpbc.del(
type: 'agent'
bucket: 'agent'
key: agentKey
, ( err, reply ) -> next( err ) )
( next ) ->
riakpbc.del(
type: 'persona'
bucket: 'persona'
key: personaKey
, ( err, reply ) -> next( err ) )
], ( err ) -> done( err ) ) | true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
assert = require( 'assert' )
chai = require( 'chai' )
should = chai.should()
chai.use( require( 'chai-http' ) )
async = require( 'async' )
server = require( '../../../../dist/server' )
riakpbc = require( '../../../../dist/riak/pbc' )
AgentModel = require( '../../../../dist/agent/Model' )
GraphModel = require( '../../../../dist/persona/graph/Model' )
PersonaModel = require( '../../../../dist/persona/Model' )
describe 'persona/graph/connection/routes', ->
agentKey = null
personaKey = null
before ( done ) ->
AgentModel.create(
email: 'PI:EMAIL:<EMAIL>END_PI'
name: 'test/persona/graph/connection/routes'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
).then( ( agent ) ->
agent.save()
)
.then( ( agent ) ->
agentKey = agent.getKey()
PersonaModel.create(
name: 'test/persona/graph/connection/routes'
agents: [
{
key: agentKey
role: 'creator'
}
]
)
)
.then( ( persona ) ->
persona.save()
)
.then( ( persona ) ->
personaKey = persona.getKey()
done()
)
.fail( ( err ) -> done( err ) )
after ( done ) ->
async.parallel([
( next ) ->
riakpbc.del(
type: 'agent'
bucket: 'agent'
key: agentKey
, ( err, reply ) -> next( err ) )
( next ) ->
riakpbc.del(
type: 'persona'
bucket: 'persona'
key: personaKey
, ( err, reply ) -> next( err ) )
], ( err ) -> done( err ) ) |
[
{
"context": " client = client_id: 'uuid', client_secret: 'h4sh'\n verifier = () ->\n strategy = new OAut",
"end": 1034,
"score": 0.9986848831176758,
"start": 1030,
"tag": "KEY",
"value": "h4sh"
},
{
"context": " .reply(200, { uid: 1234, name: 'Dude' }... | test/unit/strategies/oauth2/userInfo.coffee | LorianeE/connect | 331 | # Test dependencies
_ = require 'lodash'
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
util = require 'util'
# Assertions
chai.use sinonChai
chai.should()
# Code under test
OAuth2Strategy = require '../../../../protocols/OAuth2'
providers = require '../../../../providers'
# We need to test two things.
# 1. That the request is being formed correctly given the
# properties of a provider and client. For this we'll
# return a superagent request object to assert against.
# 2. That the response is handled correctly. For this we'll
# use `nock`, to mock the HTTP service in question.
describe 'OAuth2Strategy userInfo', ->
{err,res,req,provider,client,strategy,headers,token} = {}
describe 'with a missing token argument', ->
describe 'with defaults', ->
before (done) ->
provider = _.clone providers.oauth2test, true
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { uid: 1234, name: 'Dude' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified endpoint', ->
req.url.should.equal provider.endpoints.user.url
it 'should use the "GET" method', ->
req.method.should.equal 'GET'
it 'should set the accept header', ->
req.req._headers['accept'].should.equal 'application/json'
it 'should set the user agent', ->
req.req._headers['user-agent'].should.contain 'Anvil Connect/'
describe 'with custom HTTP method', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.method = 'PATCH'
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.patch('/user')
.reply(200, { uid: '1234', fullname: 'Dude' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified HTTP method', ->
req.method.should.equal 'PATCH'
describe 'with authorization header (bearer token)', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth =
header: 'Authorization'
scheme: 'Bearer'
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { name: 'Dude' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set the Authorization header', ->
expect(headers.authorization).to.not.be.undefined
it 'should use the Basic scheme', ->
expect(headers.authorization).to.contain 'Bearer '
it 'should set the credentials', ->
expect(headers.authorization).to.contain 'r4nd0m'
describe 'with authorization header (custom)', ->
before (done) ->
auth =
header: 'X-Custom-Header'
scheme: 'OAuth'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { fullname: 'Dude' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set a custom header', ->
expect(headers['x-custom-header']).to.not.be.undefined
it 'should use a custom scheme', ->
expect(headers['x-custom-header']).to.contain 'OAuth '
it 'should set token as credentials', ->
expect(headers['x-custom-header']).to.contain 'r4nd0m'
describe 'with access token (querystring)', ->
before (done) ->
auth =
query: 'oauth_token'
token = 'r4nd0m'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?' + auth.query + '=' + token)
.reply(200, { fullname: 'Dude' })
req = strategy.userInfo token, () -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'oauth_token=r4nd0m'
describe 'with custom params', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.params = foo: 'bar'
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?foo=bar')
.reply(200, { fullname: 'Dude' })
req = strategy.userInfo token, -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'foo=bar'
describe 'with error response', ->
before (done) ->
provider = _.clone providers.oauth2test, true
# Specifically setting the method, was getting holdover from other tests.
provider.endpoints.user.method = 'get'
provider.endpoints.user.auth = query: 'entropy'
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entropy=t0k3n&foo=bar')
.reply(400, { error: 'oops' })
req = strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should provide an error', ->
err.message.should.equal 'oops'
it 'should not provide a profile', ->
expect(res).to.be.undefined
describe 'with user profile', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = query: 'entr0py'
client = client_id: 'uuid', client_secret: 'h4sh'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entr0py=t0k3n&foo=bar')
.reply(200, { uid: 1234, fullname: 'Yoda' })
strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should not provide an error', ->
expect(err).to.be.null
it 'should provide the provider name', ->
res.provider.should.equal provider.id
it 'should normalize the provider user id', ->
res.id.should.equal '1234'
| 137003 | # Test dependencies
_ = require 'lodash'
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
util = require 'util'
# Assertions
chai.use sinonChai
chai.should()
# Code under test
OAuth2Strategy = require '../../../../protocols/OAuth2'
providers = require '../../../../providers'
# We need to test two things.
# 1. That the request is being formed correctly given the
# properties of a provider and client. For this we'll
# return a superagent request object to assert against.
# 2. That the response is handled correctly. For this we'll
# use `nock`, to mock the HTTP service in question.
describe 'OAuth2Strategy userInfo', ->
{err,res,req,provider,client,strategy,headers,token} = {}
describe 'with a missing token argument', ->
describe 'with defaults', ->
before (done) ->
provider = _.clone providers.oauth2test, true
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { uid: 1234, name: '<NAME>' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified endpoint', ->
req.url.should.equal provider.endpoints.user.url
it 'should use the "GET" method', ->
req.method.should.equal 'GET'
it 'should set the accept header', ->
req.req._headers['accept'].should.equal 'application/json'
it 'should set the user agent', ->
req.req._headers['user-agent'].should.contain 'Anvil Connect/'
describe 'with custom HTTP method', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.method = 'PATCH'
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.patch('/user')
.reply(200, { uid: '1234', fullname: '<NAME>' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified HTTP method', ->
req.method.should.equal 'PATCH'
describe 'with authorization header (bearer token)', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth =
header: 'Authorization'
scheme: 'Bearer'
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { name: '<NAME>' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set the Authorization header', ->
expect(headers.authorization).to.not.be.undefined
it 'should use the Basic scheme', ->
expect(headers.authorization).to.contain 'Bearer '
it 'should set the credentials', ->
expect(headers.authorization).to.contain 'r4nd0m'
describe 'with authorization header (custom)', ->
before (done) ->
auth =
header: 'X-Custom-Header'
scheme: 'OAuth'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { fullname: '<NAME>' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set a custom header', ->
expect(headers['x-custom-header']).to.not.be.undefined
it 'should use a custom scheme', ->
expect(headers['x-custom-header']).to.contain 'OAuth '
it 'should set token as credentials', ->
expect(headers['x-custom-header']).to.contain 'r4nd0m'
describe 'with access token (querystring)', ->
before (done) ->
auth =
query: 'oauth_token'
token = '<PASSWORD>'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?' + auth.query + '=' + token)
.reply(200, { fullname: '<NAME>' })
req = strategy.userInfo token, () -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'oauth_token=<PASSWORD>'
describe 'with custom params', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.params = foo: 'bar'
client = client_id: 'uuid', client_secret: 'h<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?foo=bar')
.reply(200, { fullname: '<NAME>' })
req = strategy.userInfo token, -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'foo=bar'
describe 'with error response', ->
before (done) ->
provider = _.clone providers.oauth2test, true
# Specifically setting the method, was getting holdover from other tests.
provider.endpoints.user.method = 'get'
provider.endpoints.user.auth = query: 'entropy'
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entropy=t0k3n&foo=bar')
.reply(400, { error: 'oops' })
req = strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should provide an error', ->
err.message.should.equal 'oops'
it 'should not provide a profile', ->
expect(res).to.be.undefined
describe 'with user profile', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = query: 'entr0py'
client = client_id: 'uuid', client_secret: '<KEY>'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entr0py=t0k3n&foo=bar')
.reply(200, { uid: 1234, fullname: '<NAME>' })
strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should not provide an error', ->
expect(err).to.be.null
it 'should provide the provider name', ->
res.provider.should.equal provider.id
it 'should normalize the provider user id', ->
res.id.should.equal '1234'
| true | # Test dependencies
_ = require 'lodash'
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
util = require 'util'
# Assertions
chai.use sinonChai
chai.should()
# Code under test
OAuth2Strategy = require '../../../../protocols/OAuth2'
providers = require '../../../../providers'
# We need to test two things.
# 1. That the request is being formed correctly given the
# properties of a provider and client. For this we'll
# return a superagent request object to assert against.
# 2. That the response is handled correctly. For this we'll
# use `nock`, to mock the HTTP service in question.
describe 'OAuth2Strategy userInfo', ->
{err,res,req,provider,client,strategy,headers,token} = {}
describe 'with a missing token argument', ->
describe 'with defaults', ->
before (done) ->
provider = _.clone providers.oauth2test, true
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { uid: 1234, name: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified endpoint', ->
req.url.should.equal provider.endpoints.user.url
it 'should use the "GET" method', ->
req.method.should.equal 'GET'
it 'should set the accept header', ->
req.req._headers['accept'].should.equal 'application/json'
it 'should set the user agent', ->
req.req._headers['user-agent'].should.contain 'Anvil Connect/'
describe 'with custom HTTP method', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.method = 'PATCH'
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.patch('/user')
.reply(200, { uid: '1234', fullname: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo 'r4nd0m', -> done()
return
it 'should use the specified HTTP method', ->
req.method.should.equal 'PATCH'
describe 'with authorization header (bearer token)', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth =
header: 'Authorization'
scheme: 'Bearer'
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { name: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set the Authorization header', ->
expect(headers.authorization).to.not.be.undefined
it 'should use the Basic scheme', ->
expect(headers.authorization).to.contain 'Bearer '
it 'should set the credentials', ->
expect(headers.authorization).to.contain 'r4nd0m'
describe 'with authorization header (custom)', ->
before (done) ->
auth =
header: 'X-Custom-Header'
scheme: 'OAuth'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user')
.reply(200, { fullname: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo 'r4nd0m', -> done()
headers = req.req._headers
it 'should set a custom header', ->
expect(headers['x-custom-header']).to.not.be.undefined
it 'should use a custom scheme', ->
expect(headers['x-custom-header']).to.contain 'OAuth '
it 'should set token as credentials', ->
expect(headers['x-custom-header']).to.contain 'r4nd0m'
describe 'with access token (querystring)', ->
before (done) ->
auth =
query: 'oauth_token'
token = 'PI:PASSWORD:<PASSWORD>END_PI'
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = auth
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?' + auth.query + '=' + token)
.reply(200, { fullname: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo token, () -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'oauth_token=PI:PASSWORD:<PASSWORD>END_PI'
describe 'with custom params', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.params = foo: 'bar'
client = client_id: 'uuid', client_secret: 'hPI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?foo=bar')
.reply(200, { fullname: 'PI:NAME:<NAME>END_PI' })
req = strategy.userInfo token, -> done()
return
it 'should set a custom parameter', ->
req.req.path.should.contain 'foo=bar'
describe 'with error response', ->
before (done) ->
provider = _.clone providers.oauth2test, true
# Specifically setting the method, was getting holdover from other tests.
provider.endpoints.user.method = 'get'
provider.endpoints.user.auth = query: 'entropy'
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entropy=t0k3n&foo=bar')
.reply(400, { error: 'oops' })
req = strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should provide an error', ->
err.message.should.equal 'oops'
it 'should not provide a profile', ->
expect(res).to.be.undefined
describe 'with user profile', ->
before (done) ->
provider = _.clone providers.oauth2test, true
provider.endpoints.user.auth = query: 'entr0py'
client = client_id: 'uuid', client_secret: 'PI:KEY:<KEY>END_PI'
verifier = () ->
strategy = new OAuth2Strategy provider, client, verifier
scope = nock(provider.url)
.get('/user?entr0py=t0k3n&foo=bar')
.reply(200, { uid: 1234, fullname: 'PI:NAME:<NAME>END_PI' })
strategy.userInfo 't0k3n', (error, response) ->
err = error
res = response
done()
return
it 'should not provide an error', ->
expect(err).to.be.null
it 'should provide the provider name', ->
res.provider.should.equal provider.id
it 'should normalize the provider user id', ->
res.id.should.equal '1234'
|
[
{
"context": "@executablePath, params, options, {uniqueKey: \"aiml-linter:#{textEditor.getPath()}\"})\n .then (output) =>\n",
"end": 2365,
"score": 0.6792492866516113,
"start": 2357,
"tag": "KEY",
"value": "l-linter"
},
{
"context": "ec(@executablePath, params, options, {uniqueKey... | lib/linterV2-provider.coffee | docomo-dialog/xaiml-editor | 0 | CompositeDisposable = null
Util = null
Helpers = null
Path = null
SchemaLoader = null
XRegExp = null
module.exports =
provider:
name: "AIML Linter"
grammarScopes: ['text.aiml']
scope: 'file'
lintsOnChange: false
subscriptions: null
executablePath: ''
internalPath: "resources/xaiml1.0.0.xsd"
builtinSchemaPath: null
initialize: ->
require('atom-package-deps').install('xaiml-editor')
Helpers ?= require 'atom-linter'
Path ?= require "path"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
{CompositeDisposable} = require 'atom'
@subscriptions ?= new CompositeDisposable()
@subscriptions.add atom.commands.add 'atom-workspace',
'xaiml-editor:executeLinter': => @executeLinter()
@subscriptions.add atom.config.observe 'xaiml-editor.executablePath',
(executablePath) => @executablePath = executablePath
@subscriptions.add atom.config.observe 'xaiml-editor.lintOnFly',
(lintOnFly) => @provider.lintsOnChange = lintOnFly
@subscriptions.add atom.config.observe 'xaiml-editor.suggestOption.builtinSchemaVersion',
(version) =>
@internalPath = "resources/#{version}.xsd"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
return @
dispose: ->
@subscriptions?.dispose()
getProvider: ->
@provider.lint = (textEditor) =>
return @lintEditor textEditor
return @provider
executeLinter: (textEditor = null) ->
Util ?= require './util'
textEditor ?= atom.workspace.getActiveTextEditor()
return unless Util.isAimlEditor(textEditor)
view = atom.views.getView(textEditor)
atom.commands.dispatch(view, "linter:lint")
lintEditor: (textEditor) ->
SchemaLoader ?= require "./schema-loader"
if SchemaLoader.isDownload()
schemaPath = SchemaLoader.schemaPath
else
schemaPath = @builtinSchemaPath
return @checkWellFormed textEditor
.then (messages) =>
if messages.length
return messages
return @checkValid(textEditor, schemaPath)
.then (messages) ->
return messages
checkWellFormed: (textEditor) ->
params = ['--noout', '-']
options = {
stdin: textEditor.getText()
stream: 'stderr'
allowEmptyStderr: true
}
return Helpers.exec(@executablePath, params, options, {uniqueKey: "aiml-linter:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseMessages(textEditor, output)
for message in messages
message.location.file = textEditor.getPath()
return messages
checkValid: (textEditor, schemaPath) ->
return @validateSchema(textEditor, '--schema', schemaPath)
validateSchema: (textEditor, argSchemaType, schemaPath) ->
params = []
params.push('--noout')
params = params.concat([argSchemaType, schemaPath, '-'])
options = {
cwd: Path.dirname(textEditor.getPath())
stdin: textEditor.getText()
stream: 'stderr'
}
if @builtinSchemaPath is schemaPath
msgSchemaUrl = "Builtin Schema"
else
msgSchemaUrl = schemaPath
return Helpers.exec(@executablePath, params, options, {uniqueKey: "aiml-linter:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseSchemaMessages(textEditor, output)
if messages.length
for message in messages
message.severity = 'error'
message.excerpt = message.excerpt + ' (' + msgSchemaUrl + ')'
message.location.file = textEditor.getPath()
else if output.indexOf('- validates') is -1
messages.push({
severity: 'error'
excerpt: output
location: {
file: textEditor.getPath()
position: Helpers.generateRange(textEditor, 0, 0)
}
})
return messages
parseMessages: (textEditor, output) ->
XRegExp ?= require('xregexp')
messages = []
regex = XRegExp(
'^(?<file>.+):' +
'(?<line>\\d+): ' +
'(?<severity>.+) : ' +
'(?<message>.+)' +
'(' +
'\\r?\\n' +
'(?<source_line>.*)\\r?\\n' +
'(?<marker>.*)\\^' +
')?' +
'$', 'm')
XRegExp.forEach output, regex, (match, i) ->
line = parseInt(match.line) - 1
column = if match.marker then match.marker.length else 0
messages.push({
severity: 'error'
excerpt: match.message
location: {
file: match.file
position: Helpers.generateRange(textEditor, line, column)
}
})
return messages
parseSchemaMessages: (textEditor, output) ->
regex = '(?<file>.+):(?<line>\\d+): .*: .* : (?<message>.+)'
messages = Helpers.parse(output, regex)
for message in messages
message.severity = message.type
delete message.type
message.excerpt = message.text
delete message.text
message.location = {
file: message.filePath
}
delete message.filePath
message.location.position = Helpers.generateRange(textEditor, message.range[0][0])
delete message.range
return messages
| 166387 | CompositeDisposable = null
Util = null
Helpers = null
Path = null
SchemaLoader = null
XRegExp = null
module.exports =
provider:
name: "AIML Linter"
grammarScopes: ['text.aiml']
scope: 'file'
lintsOnChange: false
subscriptions: null
executablePath: ''
internalPath: "resources/xaiml1.0.0.xsd"
builtinSchemaPath: null
initialize: ->
require('atom-package-deps').install('xaiml-editor')
Helpers ?= require 'atom-linter'
Path ?= require "path"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
{CompositeDisposable} = require 'atom'
@subscriptions ?= new CompositeDisposable()
@subscriptions.add atom.commands.add 'atom-workspace',
'xaiml-editor:executeLinter': => @executeLinter()
@subscriptions.add atom.config.observe 'xaiml-editor.executablePath',
(executablePath) => @executablePath = executablePath
@subscriptions.add atom.config.observe 'xaiml-editor.lintOnFly',
(lintOnFly) => @provider.lintsOnChange = lintOnFly
@subscriptions.add atom.config.observe 'xaiml-editor.suggestOption.builtinSchemaVersion',
(version) =>
@internalPath = "resources/#{version}.xsd"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
return @
dispose: ->
@subscriptions?.dispose()
getProvider: ->
@provider.lint = (textEditor) =>
return @lintEditor textEditor
return @provider
executeLinter: (textEditor = null) ->
Util ?= require './util'
textEditor ?= atom.workspace.getActiveTextEditor()
return unless Util.isAimlEditor(textEditor)
view = atom.views.getView(textEditor)
atom.commands.dispatch(view, "linter:lint")
lintEditor: (textEditor) ->
SchemaLoader ?= require "./schema-loader"
if SchemaLoader.isDownload()
schemaPath = SchemaLoader.schemaPath
else
schemaPath = @builtinSchemaPath
return @checkWellFormed textEditor
.then (messages) =>
if messages.length
return messages
return @checkValid(textEditor, schemaPath)
.then (messages) ->
return messages
checkWellFormed: (textEditor) ->
params = ['--noout', '-']
options = {
stdin: textEditor.getText()
stream: 'stderr'
allowEmptyStderr: true
}
return Helpers.exec(@executablePath, params, options, {uniqueKey: "aim<KEY>:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseMessages(textEditor, output)
for message in messages
message.location.file = textEditor.getPath()
return messages
checkValid: (textEditor, schemaPath) ->
return @validateSchema(textEditor, '--schema', schemaPath)
validateSchema: (textEditor, argSchemaType, schemaPath) ->
params = []
params.push('--noout')
params = params.concat([argSchemaType, schemaPath, '-'])
options = {
cwd: Path.dirname(textEditor.getPath())
stdin: textEditor.getText()
stream: 'stderr'
}
if @builtinSchemaPath is schemaPath
msgSchemaUrl = "Builtin Schema"
else
msgSchemaUrl = schemaPath
return Helpers.exec(@executablePath, params, options, {uniqueKey: "<KEY>:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseSchemaMessages(textEditor, output)
if messages.length
for message in messages
message.severity = 'error'
message.excerpt = message.excerpt + ' (' + msgSchemaUrl + ')'
message.location.file = textEditor.getPath()
else if output.indexOf('- validates') is -1
messages.push({
severity: 'error'
excerpt: output
location: {
file: textEditor.getPath()
position: Helpers.generateRange(textEditor, 0, 0)
}
})
return messages
parseMessages: (textEditor, output) ->
XRegExp ?= require('xregexp')
messages = []
regex = XRegExp(
'^(?<file>.+):' +
'(?<line>\\d+): ' +
'(?<severity>.+) : ' +
'(?<message>.+)' +
'(' +
'\\r?\\n' +
'(?<source_line>.*)\\r?\\n' +
'(?<marker>.*)\\^' +
')?' +
'$', 'm')
XRegExp.forEach output, regex, (match, i) ->
line = parseInt(match.line) - 1
column = if match.marker then match.marker.length else 0
messages.push({
severity: 'error'
excerpt: match.message
location: {
file: match.file
position: Helpers.generateRange(textEditor, line, column)
}
})
return messages
parseSchemaMessages: (textEditor, output) ->
regex = '(?<file>.+):(?<line>\\d+): .*: .* : (?<message>.+)'
messages = Helpers.parse(output, regex)
for message in messages
message.severity = message.type
delete message.type
message.excerpt = message.text
delete message.text
message.location = {
file: message.filePath
}
delete message.filePath
message.location.position = Helpers.generateRange(textEditor, message.range[0][0])
delete message.range
return messages
| true | CompositeDisposable = null
Util = null
Helpers = null
Path = null
SchemaLoader = null
XRegExp = null
module.exports =
provider:
name: "AIML Linter"
grammarScopes: ['text.aiml']
scope: 'file'
lintsOnChange: false
subscriptions: null
executablePath: ''
internalPath: "resources/xaiml1.0.0.xsd"
builtinSchemaPath: null
initialize: ->
require('atom-package-deps').install('xaiml-editor')
Helpers ?= require 'atom-linter'
Path ?= require "path"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
{CompositeDisposable} = require 'atom'
@subscriptions ?= new CompositeDisposable()
@subscriptions.add atom.commands.add 'atom-workspace',
'xaiml-editor:executeLinter': => @executeLinter()
@subscriptions.add atom.config.observe 'xaiml-editor.executablePath',
(executablePath) => @executablePath = executablePath
@subscriptions.add atom.config.observe 'xaiml-editor.lintOnFly',
(lintOnFly) => @provider.lintsOnChange = lintOnFly
@subscriptions.add atom.config.observe 'xaiml-editor.suggestOption.builtinSchemaVersion',
(version) =>
@internalPath = "resources/#{version}.xsd"
@builtinSchemaPath = Path.resolve(__dirname, '..', @internalPath)
return @
dispose: ->
@subscriptions?.dispose()
getProvider: ->
@provider.lint = (textEditor) =>
return @lintEditor textEditor
return @provider
executeLinter: (textEditor = null) ->
Util ?= require './util'
textEditor ?= atom.workspace.getActiveTextEditor()
return unless Util.isAimlEditor(textEditor)
view = atom.views.getView(textEditor)
atom.commands.dispatch(view, "linter:lint")
lintEditor: (textEditor) ->
SchemaLoader ?= require "./schema-loader"
if SchemaLoader.isDownload()
schemaPath = SchemaLoader.schemaPath
else
schemaPath = @builtinSchemaPath
return @checkWellFormed textEditor
.then (messages) =>
if messages.length
return messages
return @checkValid(textEditor, schemaPath)
.then (messages) ->
return messages
checkWellFormed: (textEditor) ->
params = ['--noout', '-']
options = {
stdin: textEditor.getText()
stream: 'stderr'
allowEmptyStderr: true
}
return Helpers.exec(@executablePath, params, options, {uniqueKey: "aimPI:KEY:<KEY>END_PI:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseMessages(textEditor, output)
for message in messages
message.location.file = textEditor.getPath()
return messages
checkValid: (textEditor, schemaPath) ->
return @validateSchema(textEditor, '--schema', schemaPath)
validateSchema: (textEditor, argSchemaType, schemaPath) ->
params = []
params.push('--noout')
params = params.concat([argSchemaType, schemaPath, '-'])
options = {
cwd: Path.dirname(textEditor.getPath())
stdin: textEditor.getText()
stream: 'stderr'
}
if @builtinSchemaPath is schemaPath
msgSchemaUrl = "Builtin Schema"
else
msgSchemaUrl = schemaPath
return Helpers.exec(@executablePath, params, options, {uniqueKey: "PI:KEY:<KEY>END_PI:#{textEditor.getPath()}"})
.then (output) =>
messages = @parseSchemaMessages(textEditor, output)
if messages.length
for message in messages
message.severity = 'error'
message.excerpt = message.excerpt + ' (' + msgSchemaUrl + ')'
message.location.file = textEditor.getPath()
else if output.indexOf('- validates') is -1
messages.push({
severity: 'error'
excerpt: output
location: {
file: textEditor.getPath()
position: Helpers.generateRange(textEditor, 0, 0)
}
})
return messages
parseMessages: (textEditor, output) ->
XRegExp ?= require('xregexp')
messages = []
regex = XRegExp(
'^(?<file>.+):' +
'(?<line>\\d+): ' +
'(?<severity>.+) : ' +
'(?<message>.+)' +
'(' +
'\\r?\\n' +
'(?<source_line>.*)\\r?\\n' +
'(?<marker>.*)\\^' +
')?' +
'$', 'm')
XRegExp.forEach output, regex, (match, i) ->
line = parseInt(match.line) - 1
column = if match.marker then match.marker.length else 0
messages.push({
severity: 'error'
excerpt: match.message
location: {
file: match.file
position: Helpers.generateRange(textEditor, line, column)
}
})
return messages
parseSchemaMessages: (textEditor, output) ->
regex = '(?<file>.+):(?<line>\\d+): .*: .* : (?<message>.+)'
messages = Helpers.parse(output, regex)
for message in messages
message.severity = message.type
delete message.type
message.excerpt = message.text
delete message.text
message.location = {
file: message.filePath
}
delete message.filePath
message.location.position = Helpers.generateRange(textEditor, message.range[0][0])
delete message.range
return messages
|
[
{
"context": " provId = values.pop()\n params[@key] = (\"G#{v}-#{provId}\" for v in values)\n\n class CwicGranuleIdLi",
"end": 2780,
"score": 0.7979442477226257,
"start": 2772,
"tag": "KEY",
"value": "G#{v}-#{"
},
{
"context": "values.pop()\n params[@key] = (\"G#{v}-... | app/assets/javascripts/util/url.js.coffee | johnmelodyme/earthdata-search | 1 | #= require util/xhr
this.edsc.util.url = do(window
document
History
extend = jQuery.extend
param = jQuery.param
deparam = @edsc.util.deparam
murmurhash3 = @edsc.util.murmurhash3
config = @edsc.config
ajax = @edsc.util.xhr.ajax
) ->
class ParamNameCompressor
constructor: (@from, @to) ->
swap: (params, from, to) ->
return if from == to
value = params[from]
delete params[from]
params[to] = value if value?
compress: (params) -> @swap(params, @from, @to)
inflate: (params) -> @swap(params, @to, @from)
class ArrayJoiner extends ParamNameCompressor
compress: (params) ->
value = params[@from]
params[@from] = value.join('!') if value?
super(params)
inflate: (params) ->
super(params)
value = params[@from]
value = value.toString() if typeof value == 'number'
params[@from] = value.split('!') if value?
class ParamFlattener extends ArrayJoiner
constructor: (@path, @to, @isArray) ->
@from = @to
compress: (params) ->
path = @path
parent = params
for key in path[...-1]
parent = parent[key]
return unless parent?
value = parent[path[path.length - 1]]
delete parent[path[path.length - 1]]
params[@to] = value if value?
super(params) if @isArray
inflate: (params) ->
return unless params[@to]
super(params) if @isArray
path = @path
parent = params
for key in path[...-1]
parent[key] ?= {}
parent = parent[key]
parent[path[path.length - 1]] = params[@to]
delete(params[@to])
class ChildCompressor
constructor: (@key, @compressor) ->
eachChild: (params, method) ->
children = params[@key]
compressor = @compressor
if children? && Array.isArray(children)
compressor[method](child) for child in children when child?
null
compress: (params) -> @eachChild(params, 'compress')
inflate: (params) -> @eachChild(params, 'inflate')
# Specific compression for granule ids
class CmrGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
if values && values.length > 0
provId = values[0].split('-')[1]
compressedValues = (v.split('-')[0][1...] for v in values)
compressedValues.push(provId)
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
if value
values = value.split('!')
provId = values.pop()
params[@key] = ("G#{v}-#{provId}" for v in values)
class CwicGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
compressedValues = []
if values && values.length > 0
values.map (v) -> if v.match(/^[0-9]+$/) then compressedValues.push(v) else compressedValues.push(murmurhash3(v))
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
value = value.toString() if value?.constructor == Number
params[@key] = value?.split('!')
# The order here matters
compressors = [
new ParamNameCompressor('placename', 'qp')
new ParamNameCompressor('temporal', 'qt')
new ParamNameCompressor('override_temporal', 'ot')
new ParamNameCompressor('free_text', 'q')
new ParamNameCompressor('original_keyword', 'ok')
new ParamNameCompressor('point', 'sp')
new ParamNameCompressor('bounding_box', 'sb')
new ParamNameCompressor('line', 'sl')
new ParamNameCompressor('line', 'sg')
new ParamNameCompressor('all_collections', 'ac')
new ParamFlattener(['two_d_coordinate_system', 'name'], 's2n')
new ParamFlattener(['two_d_coordinate_system', 'coordinates'], 's2c')
new ArrayJoiner('features', 'ff')
new ArrayJoiner('data_center_h', 'fdc')
new ArrayJoiner('project_h', 'fpj')
new ArrayJoiner('platform_h', 'fp')
new ArrayJoiner('instrument_h', 'fi')
# new ArrayJoiner('sensor_h', 'fs')
new ArrayJoiner('processing_level_id_h', 'fl')
new ChildCompressor('pg', new ParamNameCompressor('temporal', 'qt'))
new ChildCompressor('pg', new ParamNameCompressor('day_night_flag', 'dnf'))
new ChildCompressor('pg', new ParamNameCompressor('browse_only', 'bo'))
new ChildCompressor('pg', new ParamNameCompressor('online_only', 'oo'))
new ChildCompressor('pg', new ParamNameCompressor('cloud_cover', 'cc'))
new ChildCompressor('pg', new ParamNameCompressor('orbit_number', 'on'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_longitude', 'ecl'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_date', 'ecd'))
new ChildCompressor('pg', new ParamNameCompressor('variables', 'uv'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'id'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'ur'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'echo_granule_id'], 'x'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'cwic_granule_id'], 'cx'))
new ChildCompressor('pg', new CmrGranuleIdListCompressor('x'))
new ChildCompressor('pg', new CwicGranuleIdListCompressor('cx'))
]
# new ParamFlattener(['science_keywords_h', '0', 'category'], 'fsc', false)
keywords = []
for index in [0...30]
keywords.push new ParamFlattener(['science_keywords_h', index, 'topic'], "fst#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'term'], "fsm#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_1'], "fs1#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_2'], "fs2#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_3'], "fs3#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'detailed_variable'], "fsd#{index}", false)
compressors = compressors.concat keywords
alter = (params, method) ->
_removeNullScienceKeywords = (params) ->
if params['science_keywords_h']
(tmp or tmp = []).push sk for sk in params['science_keywords_h'] when sk
params['science_keywords_h'] = tmp
compress = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressor.compress(params) for compressor in compressors
params
inflate = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressors[i].inflate(params) for i in [compressors.length-1..0]
params
realPath = ->
# Remove everything up to the third slash
History.getState().cleanUrl.replace(/^[^\/]*\/\/[^\/]*/, '').replace(/%5B/g, '[').replace(/%5D/g, ']')
realQuery = ->
realPath().split('?')[1] ? ''
projectId = ->
return deparam(realQuery()).projectId if realQuery() != ''
realPath().match(/\/projects\/(\d+)$/)?[1]
savedPath = null
savedId = null
savedName = null
getProjectName = ->
savedName
fullPath = (path) ->
return '' unless path?
path = path.replace(/^\/portal\/[\w]+/, '')
path = path.replace(/([?&])portal=[^&]*&?/g, '$1')
path = path.replace(/\?$/, '')
portalPrefix = window.location.pathname.match(/^\/?portal\/[\w]+/)?[0] || ''
portalPrefix = '/' + portalPrefix if portalPrefix.length > 0 && portalPrefix.indexOf('/') != 0
"#{portalPrefix}#{path}".replace(/\/\//g, '/')
fetchId = (id, params) ->
return if savedId == id
console.log "Fetching project #{id}"
savedId = id
ajax
method: 'get'
dataType: 'json'
url: "/projects/#{id}.json"
success: (data) ->
if params.length > 0
prefix = '&'
prefix = '?' if data.path.indexOf('?') == -1
data.path += prefix + params
if data.new_id?
savedId = data.new_id
History.pushState('', '', "/#{data.path.split('?')[0]}?projectId=#{savedId}")
if data.user_id? && data.user_id == -1
History.pushState('', '', data.path)
savedPath = data.path
savedName = data.name
console.log "Fetched project #{id}"
console.log "Path: #{data.path}"
console.log "Project Name: #{data.name}"
$(window).trigger('edsc.pagechange')
shortenPath = (path, state, workspaceName = null) ->
id = savedId ? ''
savedPath = path
console.log "Saving project #{id}"
console.log "Path: #{path}"
console.log "Workspace Name: #{workspaceName}"
data = {path: fullPath(path), workspace_name: workspaceName}
ajax
method: 'post'
dataType: 'text'
url: "/projects?id=#{id}"
data: data
success: (data) ->
console.log "Saved project #{id}"
console.log "Path: #{path}"
savedId = data
if path.split('?')[0].match(/\/projects\/\d+/)
History.pushState(state, document.title, fullPath("/projects/#{savedId}"))
else
History.pushState(state, document.title, fullPath("/#{path.split('?')[0]}?projectId=#{savedId}"))
$(document).trigger('edsc.saved') if workspaceName?
cleanPathWithPortal = ->
path = realPath()
if path.indexOf("projectId=") != -1
params = deparam(path.split('?')[1])
id = params.projectId + ''
delete params.projectId
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, param(params))
else if path.match(/\/projects\/(\d+)\??.*/)
id = path.match(/\/projects\/(\d+)\??.*/)[1]
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, [])
else
result = path
result = result.replace(/^\/#/, '/') if result? # IE 9 bug with URL hashes
result
cleanPath = ->
path = cleanPathWithPortal()
path.replace(/^\/portal\/[\w]+/, '') if path
pushPath = (path, title=document.title, data=null) ->
clean = cleanPath()
if clean?
# Replace everything before the first ?
path = cleanPath().replace(/^[^\?]*/, path)
History.pushState(data, title, fullPath(path))
saveState = (path, state, push = false, workspaceName = null) ->
paramStr = param(compress(state)).replace(/%5B/g, '[').replace(/%5D/g, ']')
paramStr = '?' + paramStr if paramStr.length > 0
regex = /([?&])(m=[^&]*&?)|(tl=[^&]*&?)/g
tempNewParams = paramStr.replace(regex, '$1')
tempOldParams = "?#{realQuery()}".replace(regex, '$1')
tempNewParams = '?' if tempNewParams.length == 0 && tempOldParams == '?'
if realPath().split('?')[0] != path || tempOldParams != tempNewParams
$(document).trigger('pageview', [path, state])
path = path.replace(/^\/#/, '/') # IE 9 bug with URL hashes
path = path + paramStr
# Avoid shortening urls when cmr_env is set
isTooLong = path.length > config.urlLimit && path.indexOf('cmr_env=') == -1
if workspaceName || isTooLong
if path != savedPath || (workspaceName && savedName != workspaceName)
# assign a guid
shortenPath(path, state, workspaceName)
return
if cleanPath() && cleanPath() != path
savedPath = path
savedId = null
if push
History.pushState(state, document.title, fullPath(path))
else
History.replaceState(state, document.title, fullPath(path))
true
else
false
# Raise a new event to avoid getting a statechange event when we ourselves change the state
$(window).on 'statechange anchorchange', ->
if cleanPath() != savedPath
$(window).trigger('edsc.pagechange')
savedPath = cleanPath()
currentQuery = ->
path = cleanPathWithPortal()?.split('?')
return '' unless path?
portal = path[0].match(/^\/portal\/([\w]+)/)?[1]
result = path[1] ? ''
if portal
portalParam = "portal=#{portal}"
portalParam = "&#{portalParam}" if result.length > 0
result += portalParam
result
currentParams = ->
inflate(deparam(currentQuery()))
exports =
getProjectName: getProjectName
pushPath: pushPath
saveState: saveState
realQuery: realQuery
projectId: projectId
cleanPath: cleanPath
currentParams: currentParams
currentQuery: currentQuery
fullPath: fullPath
| 86901 | #= require util/xhr
this.edsc.util.url = do(window
document
History
extend = jQuery.extend
param = jQuery.param
deparam = @edsc.util.deparam
murmurhash3 = @edsc.util.murmurhash3
config = @edsc.config
ajax = @edsc.util.xhr.ajax
) ->
class ParamNameCompressor
constructor: (@from, @to) ->
swap: (params, from, to) ->
return if from == to
value = params[from]
delete params[from]
params[to] = value if value?
compress: (params) -> @swap(params, @from, @to)
inflate: (params) -> @swap(params, @to, @from)
class ArrayJoiner extends ParamNameCompressor
compress: (params) ->
value = params[@from]
params[@from] = value.join('!') if value?
super(params)
inflate: (params) ->
super(params)
value = params[@from]
value = value.toString() if typeof value == 'number'
params[@from] = value.split('!') if value?
class ParamFlattener extends ArrayJoiner
constructor: (@path, @to, @isArray) ->
@from = @to
compress: (params) ->
path = @path
parent = params
for key in path[...-1]
parent = parent[key]
return unless parent?
value = parent[path[path.length - 1]]
delete parent[path[path.length - 1]]
params[@to] = value if value?
super(params) if @isArray
inflate: (params) ->
return unless params[@to]
super(params) if @isArray
path = @path
parent = params
for key in path[...-1]
parent[key] ?= {}
parent = parent[key]
parent[path[path.length - 1]] = params[@to]
delete(params[@to])
class ChildCompressor
constructor: (@key, @compressor) ->
eachChild: (params, method) ->
children = params[@key]
compressor = @compressor
if children? && Array.isArray(children)
compressor[method](child) for child in children when child?
null
compress: (params) -> @eachChild(params, 'compress')
inflate: (params) -> @eachChild(params, 'inflate')
# Specific compression for granule ids
class CmrGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
if values && values.length > 0
provId = values[0].split('-')[1]
compressedValues = (v.split('-')[0][1...] for v in values)
compressedValues.push(provId)
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
if value
values = value.split('!')
provId = values.pop()
params[@key] = ("<KEY>prov<KEY> for v in values)
class CwicGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
compressedValues = []
if values && values.length > 0
values.map (v) -> if v.match(/^[0-9]+$/) then compressedValues.push(v) else compressedValues.push(murmurhash3(v))
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
value = value.toString() if value?.constructor == Number
params[@key] = value?.split('!')
# The order here matters
compressors = [
new ParamNameCompressor('placename', 'qp')
new ParamNameCompressor('temporal', 'qt')
new ParamNameCompressor('override_temporal', 'ot')
new ParamNameCompressor('free_text', 'q')
new ParamNameCompressor('original_keyword', 'ok')
new ParamNameCompressor('point', 'sp')
new ParamNameCompressor('bounding_box', 'sb')
new ParamNameCompressor('line', 'sl')
new ParamNameCompressor('line', 'sg')
new ParamNameCompressor('all_collections', 'ac')
new ParamFlattener(['two_d_coordinate_system', 'name'], 's2n')
new ParamFlattener(['two_d_coordinate_system', 'coordinates'], 's2c')
new ArrayJoiner('features', 'ff')
new ArrayJoiner('data_center_h', 'fdc')
new ArrayJoiner('project_h', 'fpj')
new ArrayJoiner('platform_h', 'fp')
new ArrayJoiner('instrument_h', 'fi')
# new ArrayJoiner('sensor_h', 'fs')
new ArrayJoiner('processing_level_id_h', 'fl')
new ChildCompressor('pg', new ParamNameCompressor('temporal', 'qt'))
new ChildCompressor('pg', new ParamNameCompressor('day_night_flag', 'dnf'))
new ChildCompressor('pg', new ParamNameCompressor('browse_only', 'bo'))
new ChildCompressor('pg', new ParamNameCompressor('online_only', 'oo'))
new ChildCompressor('pg', new ParamNameCompressor('cloud_cover', 'cc'))
new ChildCompressor('pg', new ParamNameCompressor('orbit_number', 'on'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_longitude', 'ecl'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_date', 'ecd'))
new ChildCompressor('pg', new ParamNameCompressor('variables', 'uv'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'id'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'ur'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'echo_granule_id'], 'x'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'cwic_granule_id'], 'cx'))
new ChildCompressor('pg', new CmrGranuleIdListCompressor('x'))
new ChildCompressor('pg', new CwicGranuleIdListCompressor('cx'))
]
# new ParamFlattener(['science_keywords_h', '0', 'category'], 'fsc', false)
keywords = []
for index in [0...30]
keywords.push new ParamFlattener(['science_keywords_h', index, 'topic'], "fst#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'term'], "fsm#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_1'], "fs1#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_2'], "fs2#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_3'], "fs3#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'detailed_variable'], "fsd#{index}", false)
compressors = compressors.concat keywords
alter = (params, method) ->
_removeNullScienceKeywords = (params) ->
if params['science_keywords_h']
(tmp or tmp = []).push sk for sk in params['science_keywords_h'] when sk
params['science_keywords_h'] = tmp
compress = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressor.compress(params) for compressor in compressors
params
inflate = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressors[i].inflate(params) for i in [compressors.length-1..0]
params
realPath = ->
# Remove everything up to the third slash
History.getState().cleanUrl.replace(/^[^\/]*\/\/[^\/]*/, '').replace(/%5B/g, '[').replace(/%5D/g, ']')
realQuery = ->
realPath().split('?')[1] ? ''
projectId = ->
return deparam(realQuery()).projectId if realQuery() != ''
realPath().match(/\/projects\/(\d+)$/)?[1]
savedPath = null
savedId = null
savedName = null
getProjectName = ->
savedName
fullPath = (path) ->
return '' unless path?
path = path.replace(/^\/portal\/[\w]+/, '')
path = path.replace(/([?&])portal=[^&]*&?/g, '$1')
path = path.replace(/\?$/, '')
portalPrefix = window.location.pathname.match(/^\/?portal\/[\w]+/)?[0] || ''
portalPrefix = '/' + portalPrefix if portalPrefix.length > 0 && portalPrefix.indexOf('/') != 0
"#{portalPrefix}#{path}".replace(/\/\//g, '/')
fetchId = (id, params) ->
return if savedId == id
console.log "Fetching project #{id}"
savedId = id
ajax
method: 'get'
dataType: 'json'
url: "/projects/#{id}.json"
success: (data) ->
if params.length > 0
prefix = '&'
prefix = '?' if data.path.indexOf('?') == -1
data.path += prefix + params
if data.new_id?
savedId = data.new_id
History.pushState('', '', "/#{data.path.split('?')[0]}?projectId=#{savedId}")
if data.user_id? && data.user_id == -1
History.pushState('', '', data.path)
savedPath = data.path
savedName = data.name
console.log "Fetched project #{id}"
console.log "Path: #{data.path}"
console.log "Project Name: #{data.name}"
$(window).trigger('edsc.pagechange')
shortenPath = (path, state, workspaceName = null) ->
id = savedId ? ''
savedPath = path
console.log "Saving project #{id}"
console.log "Path: #{path}"
console.log "Workspace Name: #{workspaceName}"
data = {path: fullPath(path), workspace_name: workspaceName}
ajax
method: 'post'
dataType: 'text'
url: "/projects?id=#{id}"
data: data
success: (data) ->
console.log "Saved project #{id}"
console.log "Path: #{path}"
savedId = data
if path.split('?')[0].match(/\/projects\/\d+/)
History.pushState(state, document.title, fullPath("/projects/#{savedId}"))
else
History.pushState(state, document.title, fullPath("/#{path.split('?')[0]}?projectId=#{savedId}"))
$(document).trigger('edsc.saved') if workspaceName?
cleanPathWithPortal = ->
path = realPath()
if path.indexOf("projectId=") != -1
params = deparam(path.split('?')[1])
id = params.projectId + ''
delete params.projectId
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, param(params))
else if path.match(/\/projects\/(\d+)\??.*/)
id = path.match(/\/projects\/(\d+)\??.*/)[1]
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, [])
else
result = path
result = result.replace(/^\/#/, '/') if result? # IE 9 bug with URL hashes
result
cleanPath = ->
path = cleanPathWithPortal()
path.replace(/^\/portal\/[\w]+/, '') if path
pushPath = (path, title=document.title, data=null) ->
clean = cleanPath()
if clean?
# Replace everything before the first ?
path = cleanPath().replace(/^[^\?]*/, path)
History.pushState(data, title, fullPath(path))
saveState = (path, state, push = false, workspaceName = null) ->
paramStr = param(compress(state)).replace(/%5B/g, '[').replace(/%5D/g, ']')
paramStr = '?' + paramStr if paramStr.length > 0
regex = /([?&])(m=[^&]*&?)|(tl=[^&]*&?)/g
tempNewParams = paramStr.replace(regex, '$1')
tempOldParams = "?#{realQuery()}".replace(regex, '$1')
tempNewParams = '?' if tempNewParams.length == 0 && tempOldParams == '?'
if realPath().split('?')[0] != path || tempOldParams != tempNewParams
$(document).trigger('pageview', [path, state])
path = path.replace(/^\/#/, '/') # IE 9 bug with URL hashes
path = path + paramStr
# Avoid shortening urls when cmr_env is set
isTooLong = path.length > config.urlLimit && path.indexOf('cmr_env=') == -1
if workspaceName || isTooLong
if path != savedPath || (workspaceName && savedName != workspaceName)
# assign a guid
shortenPath(path, state, workspaceName)
return
if cleanPath() && cleanPath() != path
savedPath = path
savedId = null
if push
History.pushState(state, document.title, fullPath(path))
else
History.replaceState(state, document.title, fullPath(path))
true
else
false
# Raise a new event to avoid getting a statechange event when we ourselves change the state
$(window).on 'statechange anchorchange', ->
if cleanPath() != savedPath
$(window).trigger('edsc.pagechange')
savedPath = cleanPath()
currentQuery = ->
path = cleanPathWithPortal()?.split('?')
return '' unless path?
portal = path[0].match(/^\/portal\/([\w]+)/)?[1]
result = path[1] ? ''
if portal
portalParam = "portal=#{portal}"
portalParam = "&#{portalParam}" if result.length > 0
result += portalParam
result
currentParams = ->
inflate(deparam(currentQuery()))
exports =
getProjectName: getProjectName
pushPath: pushPath
saveState: saveState
realQuery: realQuery
projectId: projectId
cleanPath: cleanPath
currentParams: currentParams
currentQuery: currentQuery
fullPath: fullPath
| true | #= require util/xhr
this.edsc.util.url = do(window
document
History
extend = jQuery.extend
param = jQuery.param
deparam = @edsc.util.deparam
murmurhash3 = @edsc.util.murmurhash3
config = @edsc.config
ajax = @edsc.util.xhr.ajax
) ->
class ParamNameCompressor
constructor: (@from, @to) ->
swap: (params, from, to) ->
return if from == to
value = params[from]
delete params[from]
params[to] = value if value?
compress: (params) -> @swap(params, @from, @to)
inflate: (params) -> @swap(params, @to, @from)
class ArrayJoiner extends ParamNameCompressor
compress: (params) ->
value = params[@from]
params[@from] = value.join('!') if value?
super(params)
inflate: (params) ->
super(params)
value = params[@from]
value = value.toString() if typeof value == 'number'
params[@from] = value.split('!') if value?
class ParamFlattener extends ArrayJoiner
constructor: (@path, @to, @isArray) ->
@from = @to
compress: (params) ->
path = @path
parent = params
for key in path[...-1]
parent = parent[key]
return unless parent?
value = parent[path[path.length - 1]]
delete parent[path[path.length - 1]]
params[@to] = value if value?
super(params) if @isArray
inflate: (params) ->
return unless params[@to]
super(params) if @isArray
path = @path
parent = params
for key in path[...-1]
parent[key] ?= {}
parent = parent[key]
parent[path[path.length - 1]] = params[@to]
delete(params[@to])
class ChildCompressor
constructor: (@key, @compressor) ->
eachChild: (params, method) ->
children = params[@key]
compressor = @compressor
if children? && Array.isArray(children)
compressor[method](child) for child in children when child?
null
compress: (params) -> @eachChild(params, 'compress')
inflate: (params) -> @eachChild(params, 'inflate')
# Specific compression for granule ids
class CmrGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
if values && values.length > 0
provId = values[0].split('-')[1]
compressedValues = (v.split('-')[0][1...] for v in values)
compressedValues.push(provId)
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
if value
values = value.split('!')
provId = values.pop()
params[@key] = ("PI:KEY:<KEY>END_PIprovPI:KEY:<KEY>END_PI for v in values)
class CwicGranuleIdListCompressor
constructor: (@key) ->
compress: (params) ->
values = params[@key]
compressedValues = []
if values && values.length > 0
values.map (v) -> if v.match(/^[0-9]+$/) then compressedValues.push(v) else compressedValues.push(murmurhash3(v))
params[@key] = compressedValues.join('!')
inflate: (params) ->
value = params[@key]
value = value.toString() if value?.constructor == Number
params[@key] = value?.split('!')
# The order here matters
compressors = [
new ParamNameCompressor('placename', 'qp')
new ParamNameCompressor('temporal', 'qt')
new ParamNameCompressor('override_temporal', 'ot')
new ParamNameCompressor('free_text', 'q')
new ParamNameCompressor('original_keyword', 'ok')
new ParamNameCompressor('point', 'sp')
new ParamNameCompressor('bounding_box', 'sb')
new ParamNameCompressor('line', 'sl')
new ParamNameCompressor('line', 'sg')
new ParamNameCompressor('all_collections', 'ac')
new ParamFlattener(['two_d_coordinate_system', 'name'], 's2n')
new ParamFlattener(['two_d_coordinate_system', 'coordinates'], 's2c')
new ArrayJoiner('features', 'ff')
new ArrayJoiner('data_center_h', 'fdc')
new ArrayJoiner('project_h', 'fpj')
new ArrayJoiner('platform_h', 'fp')
new ArrayJoiner('instrument_h', 'fi')
# new ArrayJoiner('sensor_h', 'fs')
new ArrayJoiner('processing_level_id_h', 'fl')
new ChildCompressor('pg', new ParamNameCompressor('temporal', 'qt'))
new ChildCompressor('pg', new ParamNameCompressor('day_night_flag', 'dnf'))
new ChildCompressor('pg', new ParamNameCompressor('browse_only', 'bo'))
new ChildCompressor('pg', new ParamNameCompressor('online_only', 'oo'))
new ChildCompressor('pg', new ParamNameCompressor('cloud_cover', 'cc'))
new ChildCompressor('pg', new ParamNameCompressor('orbit_number', 'on'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_longitude', 'ecl'))
new ChildCompressor('pg', new ParamNameCompressor('equator_crossing_date', 'ecd'))
new ChildCompressor('pg', new ParamNameCompressor('variables', 'uv'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'id'))
new ChildCompressor('pg', new ArrayJoiner('readable_granule_name', 'ur'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'echo_granule_id'], 'x'))
new ChildCompressor('pg', new ParamFlattener(['exclude', 'cwic_granule_id'], 'cx'))
new ChildCompressor('pg', new CmrGranuleIdListCompressor('x'))
new ChildCompressor('pg', new CwicGranuleIdListCompressor('cx'))
]
# new ParamFlattener(['science_keywords_h', '0', 'category'], 'fsc', false)
keywords = []
for index in [0...30]
keywords.push new ParamFlattener(['science_keywords_h', index, 'topic'], "fst#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'term'], "fsm#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_1'], "fs1#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_2'], "fs2#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'variable_level_3'], "fs3#{index}", false)
keywords.push new ParamFlattener(['science_keywords_h', index, 'detailed_variable'], "fsd#{index}", false)
compressors = compressors.concat keywords
alter = (params, method) ->
_removeNullScienceKeywords = (params) ->
if params['science_keywords_h']
(tmp or tmp = []).push sk for sk in params['science_keywords_h'] when sk
params['science_keywords_h'] = tmp
compress = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressor.compress(params) for compressor in compressors
params
inflate = (params) ->
params = extend(true, {}, params)
_removeNullScienceKeywords params
compressors[i].inflate(params) for i in [compressors.length-1..0]
params
realPath = ->
# Remove everything up to the third slash
History.getState().cleanUrl.replace(/^[^\/]*\/\/[^\/]*/, '').replace(/%5B/g, '[').replace(/%5D/g, ']')
realQuery = ->
realPath().split('?')[1] ? ''
projectId = ->
return deparam(realQuery()).projectId if realQuery() != ''
realPath().match(/\/projects\/(\d+)$/)?[1]
savedPath = null
savedId = null
savedName = null
getProjectName = ->
savedName
fullPath = (path) ->
return '' unless path?
path = path.replace(/^\/portal\/[\w]+/, '')
path = path.replace(/([?&])portal=[^&]*&?/g, '$1')
path = path.replace(/\?$/, '')
portalPrefix = window.location.pathname.match(/^\/?portal\/[\w]+/)?[0] || ''
portalPrefix = '/' + portalPrefix if portalPrefix.length > 0 && portalPrefix.indexOf('/') != 0
"#{portalPrefix}#{path}".replace(/\/\//g, '/')
fetchId = (id, params) ->
return if savedId == id
console.log "Fetching project #{id}"
savedId = id
ajax
method: 'get'
dataType: 'json'
url: "/projects/#{id}.json"
success: (data) ->
if params.length > 0
prefix = '&'
prefix = '?' if data.path.indexOf('?') == -1
data.path += prefix + params
if data.new_id?
savedId = data.new_id
History.pushState('', '', "/#{data.path.split('?')[0]}?projectId=#{savedId}")
if data.user_id? && data.user_id == -1
History.pushState('', '', data.path)
savedPath = data.path
savedName = data.name
console.log "Fetched project #{id}"
console.log "Path: #{data.path}"
console.log "Project Name: #{data.name}"
$(window).trigger('edsc.pagechange')
shortenPath = (path, state, workspaceName = null) ->
id = savedId ? ''
savedPath = path
console.log "Saving project #{id}"
console.log "Path: #{path}"
console.log "Workspace Name: #{workspaceName}"
data = {path: fullPath(path), workspace_name: workspaceName}
ajax
method: 'post'
dataType: 'text'
url: "/projects?id=#{id}"
data: data
success: (data) ->
console.log "Saved project #{id}"
console.log "Path: #{path}"
savedId = data
if path.split('?')[0].match(/\/projects\/\d+/)
History.pushState(state, document.title, fullPath("/projects/#{savedId}"))
else
History.pushState(state, document.title, fullPath("/#{path.split('?')[0]}?projectId=#{savedId}"))
$(document).trigger('edsc.saved') if workspaceName?
cleanPathWithPortal = ->
path = realPath()
if path.indexOf("projectId=") != -1
params = deparam(path.split('?')[1])
id = params.projectId + ''
delete params.projectId
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, param(params))
else if path.match(/\/projects\/(\d+)\??.*/)
id = path.match(/\/projects\/(\d+)\??.*/)[1]
if savedPath? && savedId == id
result = savedPath
else
fetchId(id, [])
else
result = path
result = result.replace(/^\/#/, '/') if result? # IE 9 bug with URL hashes
result
cleanPath = ->
path = cleanPathWithPortal()
path.replace(/^\/portal\/[\w]+/, '') if path
pushPath = (path, title=document.title, data=null) ->
clean = cleanPath()
if clean?
# Replace everything before the first ?
path = cleanPath().replace(/^[^\?]*/, path)
History.pushState(data, title, fullPath(path))
saveState = (path, state, push = false, workspaceName = null) ->
paramStr = param(compress(state)).replace(/%5B/g, '[').replace(/%5D/g, ']')
paramStr = '?' + paramStr if paramStr.length > 0
regex = /([?&])(m=[^&]*&?)|(tl=[^&]*&?)/g
tempNewParams = paramStr.replace(regex, '$1')
tempOldParams = "?#{realQuery()}".replace(regex, '$1')
tempNewParams = '?' if tempNewParams.length == 0 && tempOldParams == '?'
if realPath().split('?')[0] != path || tempOldParams != tempNewParams
$(document).trigger('pageview', [path, state])
path = path.replace(/^\/#/, '/') # IE 9 bug with URL hashes
path = path + paramStr
# Avoid shortening urls when cmr_env is set
isTooLong = path.length > config.urlLimit && path.indexOf('cmr_env=') == -1
if workspaceName || isTooLong
if path != savedPath || (workspaceName && savedName != workspaceName)
# assign a guid
shortenPath(path, state, workspaceName)
return
if cleanPath() && cleanPath() != path
savedPath = path
savedId = null
if push
History.pushState(state, document.title, fullPath(path))
else
History.replaceState(state, document.title, fullPath(path))
true
else
false
# Raise a new event to avoid getting a statechange event when we ourselves change the state
$(window).on 'statechange anchorchange', ->
if cleanPath() != savedPath
$(window).trigger('edsc.pagechange')
savedPath = cleanPath()
currentQuery = ->
path = cleanPathWithPortal()?.split('?')
return '' unless path?
portal = path[0].match(/^\/portal\/([\w]+)/)?[1]
result = path[1] ? ''
if portal
portalParam = "portal=#{portal}"
portalParam = "&#{portalParam}" if result.length > 0
result += portalParam
result
currentParams = ->
inflate(deparam(currentQuery()))
exports =
getProjectName: getProjectName
pushPath: pushPath
saveState: saveState
realQuery: realQuery
projectId: projectId
cleanPath: cleanPath
currentParams: currentParams
currentQuery: currentQuery
fullPath: fullPath
|
[
{
"context": "__/_/_/|_|\\\\___/\n\n By Chris Reuter\n\n Age 12\n\n\n\n ",
"end": 1403,
"score": 0.9988706707954407,
"start": 1391,
"tag": "NAME",
"value": "Chris Reuter"
},
{
"context": "__/_/_/|_|\\\\___/\n\n ... | screens.coffee | suetanvil/exobike | 1 |
class FixedScreens
constructor: (pre) ->
NUMLINES = 21
@show = (doc, noIndent = false) =>
lines = doc.split("\n")
lines = lines[0..NUMLINES-1] if lines.length > NUMLINES
lines.push "" while lines.length < NUMLINES
lines = lines.map( (l) -> " " + l) unless noIndent
pre.textContent = lines.join("\n")
compositeLine = (fg, bg) ->
result = ''
for n in [0 .. Math.max(fg.length, bg.length)]
fc = fg[n] || ' '
bc = bg[n] || ' '
nc = bc
if fc == '.'
nc = ' '
else if fc != ' '
nc = fc
result += nc
return result
@superimpose = (doc) =>
bglines = pre.textContent.split("\n")
fglines = doc.split("\n")
newText = []
for i in [0 .. Math.max(bglines.length, fglines.length) - 1]
newline = compositeLine(fglines[i] || "", bglines[i] || "")
newText.push(newline)
@show(newText.join("\n"), true)
showTitle: ->
# ASCII art: http://patorjk.com/software/taag/
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By Chris Reuter
Age 12
*** ONE KEYSTROKE ONE PLAY ***
0 O
+|
O
''')
showTitleAlt: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By Chris Reuter
Age 12
0 O
`y
O
''')
showInstructions: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
A, D -> left and right
S -> wheelie
* puddles slow you down
/=/ ramps make you jump IF
DOING A WHEELIE. Otherwise,
they make you CRASH
Jumping speeds you up; wheelies
slow you down a bit.
''')
showCrashed: ->
crashed = '''
______ __ ________
/.____/________.______/./_..___..____/././././
/./.../.___/.__.`/.___/.__.\\/._.\\/.__.././././
/./___/./.././_/.(__..)./././..__/./_/./_/_/_/
\\____/_/...\\__,_/____/_/./_/\\___/\\__,_(_|_|_)
'''
# Put back the leading spacess coffeescript removes:
crashed = crashed.split("\n").map( (l) -> " " + l).join("\n")
@superimpose(crashed)
showWin: (time) ->
win = """
_______ _ __ __
__/|___/|___/|_ / ____(_)___ (_)____/ /_ ___ ____/ / __/|___/|___/|_
| / / / / /_ / / __ \\/ / ___/ __ \\/ _ \\/ __ / | / / /
/_ __/_ __/_ __| / __/ / / / / / (__ ) / / / __/ /_/ / /_ __/_ __/_ __|
|/ |/ |/ /_/ /_/_/ /_/_/____/_/ /_/\\___/\\__,_/ |/ |/ |/
Your time: #{time}
"""
@show(win)
# _______ _ _ _____ ______ _____ _ _ _______
# |______ \___/ | | |_____] | |____/ |______
# |______ _/ \_ |_____| |_____] __|__ | \_ |______
| 109721 |
class FixedScreens
constructor: (pre) ->
NUMLINES = 21
@show = (doc, noIndent = false) =>
lines = doc.split("\n")
lines = lines[0..NUMLINES-1] if lines.length > NUMLINES
lines.push "" while lines.length < NUMLINES
lines = lines.map( (l) -> " " + l) unless noIndent
pre.textContent = lines.join("\n")
compositeLine = (fg, bg) ->
result = ''
for n in [0 .. Math.max(fg.length, bg.length)]
fc = fg[n] || ' '
bc = bg[n] || ' '
nc = bc
if fc == '.'
nc = ' '
else if fc != ' '
nc = fc
result += nc
return result
@superimpose = (doc) =>
bglines = pre.textContent.split("\n")
fglines = doc.split("\n")
newText = []
for i in [0 .. Math.max(bglines.length, fglines.length) - 1]
newline = compositeLine(fglines[i] || "", bglines[i] || "")
newText.push(newline)
@show(newText.join("\n"), true)
showTitle: ->
# ASCII art: http://patorjk.com/software/taag/
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By <NAME>
Age 12
*** ONE KEYSTROKE ONE PLAY ***
0 O
+|
O
''')
showTitleAlt: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By <NAME>
Age 12
0 O
`y
O
''')
showInstructions: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
A, D -> left and right
S -> wheelie
* puddles slow you down
/=/ ramps make you jump IF
DOING A WHEELIE. Otherwise,
they make you CRASH
Jumping speeds you up; wheelies
slow you down a bit.
''')
showCrashed: ->
crashed = '''
______ __ ________
/.____/________.______/./_..___..____/././././
/./.../.___/.__.`/.___/.__.\\/._.\\/.__.././././
/./___/./.././_/.(__..)./././..__/./_/./_/_/_/
\\____/_/...\\__,_/____/_/./_/\\___/\\__,_(_|_|_)
'''
# Put back the leading spacess coffeescript removes:
crashed = crashed.split("\n").map( (l) -> " " + l).join("\n")
@superimpose(crashed)
showWin: (time) ->
win = """
_______ _ __ __
__/|___/|___/|_ / ____(_)___ (_)____/ /_ ___ ____/ / __/|___/|___/|_
| / / / / /_ / / __ \\/ / ___/ __ \\/ _ \\/ __ / | / / /
/_ __/_ __/_ __| / __/ / / / / / (__ ) / / / __/ /_/ / /_ __/_ __/_ __|
|/ |/ |/ /_/ /_/_/ /_/_/____/_/ /_/\\___/\\__,_/ |/ |/ |/
Your time: #{time}
"""
@show(win)
# _______ _ _ _____ ______ _____ _ _ _______
# |______ \___/ | | |_____] | |____/ |______
# |______ _/ \_ |_____| |_____] __|__ | \_ |______
| true |
class FixedScreens
constructor: (pre) ->
NUMLINES = 21
@show = (doc, noIndent = false) =>
lines = doc.split("\n")
lines = lines[0..NUMLINES-1] if lines.length > NUMLINES
lines.push "" while lines.length < NUMLINES
lines = lines.map( (l) -> " " + l) unless noIndent
pre.textContent = lines.join("\n")
compositeLine = (fg, bg) ->
result = ''
for n in [0 .. Math.max(fg.length, bg.length)]
fc = fg[n] || ' '
bc = bg[n] || ' '
nc = bc
if fc == '.'
nc = ' '
else if fc != ' '
nc = fc
result += nc
return result
@superimpose = (doc) =>
bglines = pre.textContent.split("\n")
fglines = doc.split("\n")
newText = []
for i in [0 .. Math.max(bglines.length, fglines.length) - 1]
newline = compositeLine(fglines[i] || "", bglines[i] || "")
newText.push(newline)
@show(newText.join("\n"), true)
showTitle: ->
# ASCII art: http://patorjk.com/software/taag/
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By PI:NAME:<NAME>END_PI
Age 12
*** ONE KEYSTROKE ONE PLAY ***
0 O
+|
O
''')
showTitleAlt: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
By PI:NAME:<NAME>END_PI
Age 12
0 O
`y
O
''')
showInstructions: ->
@show('''
______ __ _ __
/ ____/ ______ / /_ (_) /_____
/ __/ | |/_/ __ \\/ __ \\/ / //_/ _ \\
/ /____> </ /_/ / /_/ / / ,< / __/
/_____/_/|_|\\____/_.___/_/_/|_|\\___/
A, D -> left and right
S -> wheelie
* puddles slow you down
/=/ ramps make you jump IF
DOING A WHEELIE. Otherwise,
they make you CRASH
Jumping speeds you up; wheelies
slow you down a bit.
''')
showCrashed: ->
crashed = '''
______ __ ________
/.____/________.______/./_..___..____/././././
/./.../.___/.__.`/.___/.__.\\/._.\\/.__.././././
/./___/./.././_/.(__..)./././..__/./_/./_/_/_/
\\____/_/...\\__,_/____/_/./_/\\___/\\__,_(_|_|_)
'''
# Put back the leading spacess coffeescript removes:
crashed = crashed.split("\n").map( (l) -> " " + l).join("\n")
@superimpose(crashed)
showWin: (time) ->
win = """
_______ _ __ __
__/|___/|___/|_ / ____(_)___ (_)____/ /_ ___ ____/ / __/|___/|___/|_
| / / / / /_ / / __ \\/ / ___/ __ \\/ _ \\/ __ / | / / /
/_ __/_ __/_ __| / __/ / / / / / (__ ) / / / __/ /_/ / /_ __/_ __/_ __|
|/ |/ |/ /_/ /_/_/ /_/_/____/_/ /_/\\___/\\__,_/ |/ |/ |/
Your time: #{time}
"""
@show(win)
# _______ _ _ _____ ______ _____ _ _ _______
# |______ \___/ | | |_____] | |____/ |______
# |______ _/ \_ |_____| |_____] __|__ | \_ |______
|
[
{
"context": "ne) ->\n expect(xpubInstance._xpub58).toBe('xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or')\n done()\n\n it \"should get the first ",
"end": 885,
"score": 0.9997238516807556,
"start": 774,
"tag": "... | app/spec/wallet/extended_public_key_spec.coffee | romanornr/ledger-wallet-crw | 173 | describe "Extended public key", ->
xpubInstance = null
dongleInst = null
init = (pin, seed, pairingKey, callback) ->
chrome.storage.local.clear()
dongleInst = new ledger.dongle.MockDongle pin, seed, pairingKey
ledger.app.dongle = dongleInst
dongleInst.unlockWithPinCode '0000', callback
describe "Bitcoin MainNet", ->
if ledger.config.network.name is 'bitcoin'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.dongle1
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
expect(xpubInstance._xpub58).toBe('xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('151krzHgfkNoH3XHBzEVi6tSn4db7pVjmR')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
describe "Bitcoin TestNet", ->
if ledger.config.network.name is 'testnet'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.bitcoin_testnet
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
# 44'/0'/0'/0 => tpubDFkDnPNZGJLgqgPtRACibsu2Jb5U6qqEPpf3CuzCUNVnyx3Gg7CGo31mC6P1nyArzjtCzkiZTAEzUHwmBu84bYKQ9oyfuDh3XXaJj9ejYix
expect(xpubInstance._xpub58).toBe('tpubDC5zQEehVX1nttyguXZoLpJL6wCbf47jASUjdff6yJeeJqKmhkXBRjwjHKJHz1r74uidpjXh2zQoX2wrwJv1fnsEzth35qtUECV2qZDWqfV')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('mmc6jah6sCb9W8cia14MtY51QFytVsZ8Nk')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
| 37086 | describe "Extended public key", ->
xpubInstance = null
dongleInst = null
init = (pin, seed, pairingKey, callback) ->
chrome.storage.local.clear()
dongleInst = new ledger.dongle.MockDongle pin, seed, pairingKey
ledger.app.dongle = dongleInst
dongleInst.unlockWithPinCode '0000', callback
describe "Bitcoin MainNet", ->
if ledger.config.network.name is 'bitcoin'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.dongle1
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
expect(xpubInstance._xpub58).toBe('<KEY>')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('<KEY>')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
describe "Bitcoin TestNet", ->
if ledger.config.network.name is 'testnet'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.bitcoin_testnet
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
# 44'/0'/0'/0 => <KEY>
expect(xpubInstance._xpub58).toBe('<KEY>')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('<KEY>')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
| true | describe "Extended public key", ->
xpubInstance = null
dongleInst = null
init = (pin, seed, pairingKey, callback) ->
chrome.storage.local.clear()
dongleInst = new ledger.dongle.MockDongle pin, seed, pairingKey
ledger.app.dongle = dongleInst
dongleInst.unlockWithPinCode '0000', callback
describe "Bitcoin MainNet", ->
if ledger.config.network.name is 'bitcoin'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.dongle1
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
expect(xpubInstance._xpub58).toBe('PI:KEY:<KEY>END_PI')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('PI:KEY:<KEY>END_PI')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
describe "Bitcoin TestNet", ->
if ledger.config.network.name is 'testnet'
beforeAll (done) ->
dongle = ledger.specs.fixtures.dongles.bitcoin_testnet
init dongle.pin, dongle.masterSeed, dongle.pairingKeyHex, ->
xpubInstance = new ledger.wallet.ExtendedPublicKey(ledger.app.dongle, "44'/0'/0'", false)
xpubInstance.initialize(done)
it "should create an xPub", (done) ->
# 44'/0'/0'/0 => PI:KEY:<KEY>END_PI
expect(xpubInstance._xpub58).toBe('PI:KEY:<KEY>END_PI')
done()
it "should get the first public address", (done) ->
expect(xpubInstance.getPublicAddress("0/0")).toBe('PI:KEY:<KEY>END_PI')
done()
afterAll ->
chrome.storage.local.clear()
dongleInst = null
|
[
{
"context": " \"http://test.com/test.xml\"\n \"authors\": \"Danny Swaby\"\n )\n feed.on \"invalid\", (model, err",
"end": 1173,
"score": 0.999814510345459,
"start": 1162,
"tag": "NAME",
"value": "Danny Swaby"
}
] | test/coffee/feed.coffee | dswaby/swabcast-webapp | 0 | define ["app",
"./../../app/assets/bower_components/chai/chai",
"entities/feed"
], (Swabcast, chai) ->
#feeds = require("app","entities/feed")
expect = chai.expect;
describe "Feed Model", ->
describe "Creation", ->
feed = new Swabcast.Entities.Feed()
it "expect \"/feed\" to have default values", ->
expect(feed).to.be.ok
expect(feed.urlRoot).to.equal("feeds")
return
describe "Validation", ->
it "expect \'Invalid\' if the feed does not contain title", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": ""
"episodes": []
)
feed.on "invalid", (model, error) ->
throw Error(error[0].message)
return
fn = ->
feed.save()
return
expect(fn).to.throw("Feed Entity must have valid title")
it "expect \'Invalid\' if the feed does not contain episodes", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": "Test Feed"
"albumArt": "test.jpg"
"summary": "This is a test feed"
"feedUrl": "http://test.com/test.xml"
"authors": "Danny Swaby"
)
feed.on "invalid", (model, error) ->
throw Error(model.get("subscriptionTitle") + " " + error[0].message)
return
fn = ->
feed.save episodes: ""
return
expect(fn).to.throw("Test Feed must contain episodes attribute")
return
return
return | 212221 | define ["app",
"./../../app/assets/bower_components/chai/chai",
"entities/feed"
], (Swabcast, chai) ->
#feeds = require("app","entities/feed")
expect = chai.expect;
describe "Feed Model", ->
describe "Creation", ->
feed = new Swabcast.Entities.Feed()
it "expect \"/feed\" to have default values", ->
expect(feed).to.be.ok
expect(feed.urlRoot).to.equal("feeds")
return
describe "Validation", ->
it "expect \'Invalid\' if the feed does not contain title", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": ""
"episodes": []
)
feed.on "invalid", (model, error) ->
throw Error(error[0].message)
return
fn = ->
feed.save()
return
expect(fn).to.throw("Feed Entity must have valid title")
it "expect \'Invalid\' if the feed does not contain episodes", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": "Test Feed"
"albumArt": "test.jpg"
"summary": "This is a test feed"
"feedUrl": "http://test.com/test.xml"
"authors": "<NAME>"
)
feed.on "invalid", (model, error) ->
throw Error(model.get("subscriptionTitle") + " " + error[0].message)
return
fn = ->
feed.save episodes: ""
return
expect(fn).to.throw("Test Feed must contain episodes attribute")
return
return
return | true | define ["app",
"./../../app/assets/bower_components/chai/chai",
"entities/feed"
], (Swabcast, chai) ->
#feeds = require("app","entities/feed")
expect = chai.expect;
describe "Feed Model", ->
describe "Creation", ->
feed = new Swabcast.Entities.Feed()
it "expect \"/feed\" to have default values", ->
expect(feed).to.be.ok
expect(feed.urlRoot).to.equal("feeds")
return
describe "Validation", ->
it "expect \'Invalid\' if the feed does not contain title", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": ""
"episodes": []
)
feed.on "invalid", (model, error) ->
throw Error(error[0].message)
return
fn = ->
feed.save()
return
expect(fn).to.throw("Feed Entity must have valid title")
it "expect \'Invalid\' if the feed does not contain episodes", ->
feed = new Swabcast.Entities.Feed(
"subscriptionTitle": "Test Feed"
"albumArt": "test.jpg"
"summary": "This is a test feed"
"feedUrl": "http://test.com/test.xml"
"authors": "PI:NAME:<NAME>END_PI"
)
feed.on "invalid", (model, error) ->
throw Error(model.get("subscriptionTitle") + " " + error[0].message)
return
fn = ->
feed.save episodes: ""
return
expect(fn).to.throw("Test Feed must contain episodes attribute")
return
return
return |
[
{
"context": "###################################\n#\n# Created by Markus on 26/10/2016.\n#\n################################",
"end": 77,
"score": 0.9987019896507263,
"start": 71,
"tag": "NAME",
"value": "Markus"
}
] | server/3_database/field.coffee | agottschalk10/worklearn | 0 | #######################################################
#
# Created by Markus on 26/10/2016.
#
#######################################################
#######################################################
@modify_field = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
deny_action('modify', collection, id, field)
check value, Match.OneOf String, Number, Boolean
res = modify_field_unprotected collection, id, field, value
#if typeof value == "string"
#predaid_add_text collection, id, field
return res
#######################################################
@modify_field_unprotected = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
check id, String
s = {}
s[field] = value
s['modified'] = new Date
mod =
$set:s
n = collection.update(id, mod)
msg = "[collection_name] " + collection._name
msg += " [field] " + field + ' of ' +
msg += " [item] " + id
msg += " [value] " + value.toString().substr(0, 50)
log_event msg, event_edit, event_info
return n
#######################################################
@visible_fields = (collection, user_id, filter) ->
owner = false
if filter.owner_id
if filter.owner_id == user_id
owner = true
roles = ['all']
if owner
roles.push 'owner'
if user_id
user = Meteor.users.findOne(user_id)
if user
roles.push user.roles ...
roles.push 'anonymous'
res = {}
edit_fields = Permissions.find({}, {fields:{field:1}}).fetch()
for field in edit_fields
filter =
role:
$in: roles
field: field.field
collection_name: collection._name
permissions = Permissions.find(filter)
if permissions.count() == 0
continue
for permission in permissions.fetch()
if action_permitted permission, 'read'
res[field["field"]] = 1
mod =
fields: res
return mod
| 89284 | #######################################################
#
# Created by <NAME> on 26/10/2016.
#
#######################################################
#######################################################
@modify_field = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
deny_action('modify', collection, id, field)
check value, Match.OneOf String, Number, Boolean
res = modify_field_unprotected collection, id, field, value
#if typeof value == "string"
#predaid_add_text collection, id, field
return res
#######################################################
@modify_field_unprotected = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
check id, String
s = {}
s[field] = value
s['modified'] = new Date
mod =
$set:s
n = collection.update(id, mod)
msg = "[collection_name] " + collection._name
msg += " [field] " + field + ' of ' +
msg += " [item] " + id
msg += " [value] " + value.toString().substr(0, 50)
log_event msg, event_edit, event_info
return n
#######################################################
@visible_fields = (collection, user_id, filter) ->
owner = false
if filter.owner_id
if filter.owner_id == user_id
owner = true
roles = ['all']
if owner
roles.push 'owner'
if user_id
user = Meteor.users.findOne(user_id)
if user
roles.push user.roles ...
roles.push 'anonymous'
res = {}
edit_fields = Permissions.find({}, {fields:{field:1}}).fetch()
for field in edit_fields
filter =
role:
$in: roles
field: field.field
collection_name: collection._name
permissions = Permissions.find(filter)
if permissions.count() == 0
continue
for permission in permissions.fetch()
if action_permitted permission, 'read'
res[field["field"]] = 1
mod =
fields: res
return mod
| true | #######################################################
#
# Created by PI:NAME:<NAME>END_PI on 26/10/2016.
#
#######################################################
#######################################################
@modify_field = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
deny_action('modify', collection, id, field)
check value, Match.OneOf String, Number, Boolean
res = modify_field_unprotected collection, id, field, value
#if typeof value == "string"
#predaid_add_text collection, id, field
return res
#######################################################
@modify_field_unprotected = (collection, id, field, value) ->
if not collection
throw new Meteor.Error "Collection undefined."
check id, String
s = {}
s[field] = value
s['modified'] = new Date
mod =
$set:s
n = collection.update(id, mod)
msg = "[collection_name] " + collection._name
msg += " [field] " + field + ' of ' +
msg += " [item] " + id
msg += " [value] " + value.toString().substr(0, 50)
log_event msg, event_edit, event_info
return n
#######################################################
@visible_fields = (collection, user_id, filter) ->
owner = false
if filter.owner_id
if filter.owner_id == user_id
owner = true
roles = ['all']
if owner
roles.push 'owner'
if user_id
user = Meteor.users.findOne(user_id)
if user
roles.push user.roles ...
roles.push 'anonymous'
res = {}
edit_fields = Permissions.find({}, {fields:{field:1}}).fetch()
for field in edit_fields
filter =
role:
$in: roles
field: field.field
collection_name: collection._name
permissions = Permissions.find(filter)
if permissions.count() == 0
continue
for permission in permissions.fetch()
if action_permitted permission, 'read'
res[field["field"]] = 1
mod =
fields: res
return mod
|
[
{
"context": "place(/\\n/g, \"<br>\")\n\n\t\tmailer.sendMail\n\t\t\tfrom: \"ops@pdf.yt\"\n\t\t\tto: \"admin@cryto.net\"\n\t\t\tsubject: \"Automatic ",
"end": 886,
"score": 0.9999282956123352,
"start": 876,
"tag": "EMAIL",
"value": "ops@pdf.yt"
},
{
"context": "\n\n\t\tmailer.sendMail... | error-reporter.coffee | joepie91/pdfy2 | 11 | chokidar = require "chokidar"
nodemailer = require "nodemailer"
path = require "path"
fs = require "fs"
watcher = chokidar.watch "./errors", depth: 0, ignoreInitial: true
mailer = nodemailer.createTransport()
processFile = (filePath) ->
fs.readFile filePath, (err, data) ->
try
parsedData = JSON.parse(data)
catch error
console.log "Error report not complete yet, retrying #{filePath} in 1 second..."
setTimeout (->
processFile(filePath)
), 1000
return
errorMessage = parsedData?.message ? "UNKNOWN ERROR"
textStack = parsedData?.stack?.replace(/\u001b(?:\[\??(?:\d\;*)*[A-HJKSTfminsulh])?/g, "") ? ""
message = """
A failure occurred. #{filePath} is attached.
#{textStack}
"""
htmlMessage = """
A failure occurred. #{filePath} is attached.
<pre>#{textStack}</pre>
""".replace(/\n/g, "<br>")
mailer.sendMail
from: "ops@pdf.yt"
to: "admin@cryto.net"
subject: "Automatic failure report: #{errorMessage}"
text: message
html: htmlMessage
attachments: [
filename: path.basename(filePath)
path: filePath
contentType: "application/json"
]
watcher.on "add", (filePath) ->
console.log "PANIC! Sending report:", filePath
processFile(filePath)
console.log "Running..."
| 212154 | chokidar = require "chokidar"
nodemailer = require "nodemailer"
path = require "path"
fs = require "fs"
watcher = chokidar.watch "./errors", depth: 0, ignoreInitial: true
mailer = nodemailer.createTransport()
processFile = (filePath) ->
fs.readFile filePath, (err, data) ->
try
parsedData = JSON.parse(data)
catch error
console.log "Error report not complete yet, retrying #{filePath} in 1 second..."
setTimeout (->
processFile(filePath)
), 1000
return
errorMessage = parsedData?.message ? "UNKNOWN ERROR"
textStack = parsedData?.stack?.replace(/\u001b(?:\[\??(?:\d\;*)*[A-HJKSTfminsulh])?/g, "") ? ""
message = """
A failure occurred. #{filePath} is attached.
#{textStack}
"""
htmlMessage = """
A failure occurred. #{filePath} is attached.
<pre>#{textStack}</pre>
""".replace(/\n/g, "<br>")
mailer.sendMail
from: "<EMAIL>"
to: "<EMAIL>"
subject: "Automatic failure report: #{errorMessage}"
text: message
html: htmlMessage
attachments: [
filename: path.basename(filePath)
path: filePath
contentType: "application/json"
]
watcher.on "add", (filePath) ->
console.log "PANIC! Sending report:", filePath
processFile(filePath)
console.log "Running..."
| true | chokidar = require "chokidar"
nodemailer = require "nodemailer"
path = require "path"
fs = require "fs"
watcher = chokidar.watch "./errors", depth: 0, ignoreInitial: true
mailer = nodemailer.createTransport()
processFile = (filePath) ->
fs.readFile filePath, (err, data) ->
try
parsedData = JSON.parse(data)
catch error
console.log "Error report not complete yet, retrying #{filePath} in 1 second..."
setTimeout (->
processFile(filePath)
), 1000
return
errorMessage = parsedData?.message ? "UNKNOWN ERROR"
textStack = parsedData?.stack?.replace(/\u001b(?:\[\??(?:\d\;*)*[A-HJKSTfminsulh])?/g, "") ? ""
message = """
A failure occurred. #{filePath} is attached.
#{textStack}
"""
htmlMessage = """
A failure occurred. #{filePath} is attached.
<pre>#{textStack}</pre>
""".replace(/\n/g, "<br>")
mailer.sendMail
from: "PI:EMAIL:<EMAIL>END_PI"
to: "PI:EMAIL:<EMAIL>END_PI"
subject: "Automatic failure report: #{errorMessage}"
text: message
html: htmlMessage
attachments: [
filename: path.basename(filePath)
path: filePath
contentType: "application/json"
]
watcher.on "add", (filePath) ->
console.log "PANIC! Sending report:", filePath
processFile(filePath)
console.log "Running..."
|
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998874664306641,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/kanban/main.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/kanban/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
timeout = @.taiga.timeout
bindMethods = @.taiga.bindMethods
module = angular.module("taigaKanban")
# Vars
defaultViewMode = "maximized"
defaultViewModes = {
maximized: {
cardClass: "kanban-task-maximized"
}
minimized: {
cardClass: "kanban-task-minimized"
}
}
#############################################################################
## Kanban Controller
#############################################################################
class KanbanController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$appTitle",
"$tgNavUrls",
"$tgEvents",
"$tgAnalytics",
"tgLoader"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location,
@appTitle, @navUrls, @events, @analytics, tgLoader) ->
bindMethods(@)
@scope.sectionName = "Kanban"
@scope.statusViewModes = {}
@.initializeEventHandlers()
promise = @.loadInitialData()
# On Success
promise.then =>
@appTitle.set("Kanban - " + @scope.project.name)
tgLoader.pageLoaded()
# On Error
promise.then null, @.onInitialDataError.bind(@)
initializeEventHandlers: ->
@scope.$on "usform:new:success", =>
@.loadUserstories()
@.refreshTagsColors()
@analytics.trackEvent("userstory", "create", "create userstory on kanban", 1)
@scope.$on "usform:bulk:success", =>
@.loadUserstories()
@analytics.trackEvent("userstory", "create", "bulk create userstory on kanban", 1)
@scope.$on "usform:edit:success", =>
@.loadUserstories()
@.refreshTagsColors()
@scope.$on("assigned-to:added", @.onAssignedToChanged)
@scope.$on("kanban:us:move", @.moveUs)
# Template actions
addNewUs: (type, statusId) ->
switch type
when "standard" then @rootscope.$broadcast("usform:new", @scope.projectId, statusId, @scope.usStatusList)
when "bulk" then @rootscope.$broadcast("usform:bulk", @scope.projectId, statusId)
changeUsAssignedTo: (us) ->
@rootscope.$broadcast("assigned-to:add", us)
# Scope Events Handlers
onAssignedToChanged: (ctx, userid, us) ->
us.assigned_to = userid
promise = @repo.save(us)
promise.then null, ->
console.log "FAIL" # TODO
# Load data methods
loadProjectStats: ->
return @rs.projects.stats(@scope.projectId).then (stats) =>
@scope.stats = stats
if stats.total_points
completedPercentage = Math.round(100 * stats.closed_points / stats.total_points)
else
completedPercentage = 0
@scope.stats.completedPercentage = "#{completedPercentage}%"
return stats
refreshTagsColors: ->
return @rs.projects.tagsColors(@scope.projectId).then (tags_colors) =>
@scope.project.tags_colors = tags_colors
loadUserstories: ->
return @rs.userstories.listAll(@scope.projectId).then (userstories) =>
@scope.userstories = userstories
@scope.usByStatus = _.groupBy(userstories, "status")
for status in @scope.usStatusList
if not @scope.usByStatus[status.id]?
@scope.usByStatus[status.id] = []
@scope.usByStatus[status.id] = _.sortBy(@scope.usByStatus[status.id], "kanban_order")
# The broadcast must be executed when the DOM has been fully reloaded.
# We can't assure when this exactly happens so we need a defer
scopeDefer @scope, =>
@scope.$broadcast("userstories:loaded")
return userstories
loadKanban: ->
return @q.all([
@.refreshTagsColors(),
@.loadProjectStats(),
@.loadUserstories()
])
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.points = _.sortBy(project.points, "order")
@scope.pointsById = groupBy(project.points, (x) -> x.id)
@scope.usStatusById = groupBy(project.us_statuses, (x) -> x.id)
@scope.usStatusList = _.sortBy(project.us_statuses, "order")
@.generateStatusViewModes()
@scope.$emit("project:loaded", project)
return project
initializeSubscription: ->
routingKey1 = "changes.project.#{@scope.projectId}.userstories"
@events.subscribe @scope, routingKey1, (message) =>
@.loadUserstories()
loadInitialData: ->
# Resolve project slug
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
@.initializeSubscription()
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadKanban())
.then(=> @scope.$broadcast("redraw:wip"))
## View Mode methods
generateStatusViewModes: ->
storedStatusViewModes = @rs.kanban.getStatusViewModes(@scope.projectId)
@scope.statusViewModes = {}
for status in @scope.usStatusList
mode = storedStatusViewModes[status.id]
@scope.statusViewModes[status.id] = if _.has(defaultViewModes, mode) then mode else defaultViewMode
@.storeStatusViewModes()
storeStatusViewModes: ->
@rs.kanban.storeStatusViewModes(@scope.projectId, @scope.statusViewModes)
updateStatusViewMode: (statusId, newViewMode) ->
@scope.statusViewModes[statusId] = newViewMode
@.storeStatusViewModes()
getCardClass: (statusId)->
mode = @scope.statusViewModes[statusId] or defaultViewMode
return defaultViewModes[mode].cardClass or defaultViewModes[defaultViewMode].cardClass
# Utils methods
prepareBulkUpdateData: (uses, field="kanban_order") ->
return _.map(uses, (x) -> {"us_id": x.id, "order": x[field]})
resortUserStories: (uses) ->
items = []
for item, index in uses
item.kanban_order = index
if item.isModified()
items.push(item)
return items
moveUs: (ctx, us, statusId, index) ->
if us.status != statusId
# Remove us from old status column
r = @scope.usByStatus[us.status].indexOf(us)
@scope.usByStatus[us.status].splice(r, 1)
# Add us to new status column.
@scope.usByStatus[statusId].splice(index, 0, us)
us.status = statusId
else
r = @scope.usByStatus[statusId].indexOf(us)
@scope.usByStatus[statusId].splice(r, 1)
@scope.usByStatus[statusId].splice(index, 0, us)
itemsToSave = @.resortUserStories(@scope.usByStatus[statusId])
@scope.usByStatus[statusId] = _.sortBy(@scope.usByStatus[statusId], "kanban_order")
# Persist the userstory
promise = @repo.save(us)
# Rehash userstories order field
# and persist in bulk all changes.
promise = promise.then =>
itemsToSave = _.reject(itemsToSave, {"id": us.id})
data = @.prepareBulkUpdateData(itemsToSave)
return @rs.userstories.bulkUpdateKanbanOrder(us.project, data).then =>
return itemsToSave
return promise
module.controller("KanbanController", KanbanController)
#############################################################################
## Kanban Directive
#############################################################################
KanbanDirective = ($repo, $rootscope) ->
link = ($scope, $el, $attrs) ->
tableBodyDom = $el.find(".kanban-table-body")
tableBodyDom.on "scroll", (event) ->
target = angular.element(event.currentTarget)
tableHeaderDom = $el.find(".kanban-table-header .kanban-table-inner")
tableHeaderDom.css("left", -1 * target.scrollLeft())
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanban", ["$tgRepo", "$rootScope", KanbanDirective])
#############################################################################
## Kanban Column Height Fixer Directive
#############################################################################
KanbanColumnHeightFixerDirective = ->
mainPadding = 32 # px
scrollPadding = 0 # px
renderSize = ($el) ->
elementOffset = $el.parent().parent().offset().top
windowHeight = angular.element(window).height()
columnHeight = windowHeight - elementOffset - mainPadding - scrollPadding
$el.css("height", "#{columnHeight}px")
link = ($scope, $el, $attrs) ->
timeout(500, -> renderSize($el))
$scope.$on "resize", ->
renderSize($el)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgKanbanColumnHeightFixer", KanbanColumnHeightFixerDirective)
#############################################################################
## Kanban User Story Directive
#############################################################################
KanbanUserstoryDirective = ($rootscope) ->
link = ($scope, $el, $attrs, $model) ->
$el.disableSelection()
$scope.$watch "us", (us) ->
if us.is_blocked and not $el.hasClass("blocked")
$el.addClass("blocked")
else if not us.is_blocked and $el.hasClass("blocked")
$el.removeClass("blocked")
$el.find(".icon-edit").on "click", (event) ->
if $el.find(".icon-edit").hasClass("noclick")
return
$scope.$apply ->
$rootscope.$broadcast("usform:edit", $model.$modelValue)
$scope.$on "$destroy", ->
$el.off()
return {
templateUrl: "/partials/views/components/kanban-task.html"
link: link
require: "ngModel"
}
module.directive("tgKanbanUserstory", ["$rootScope", KanbanUserstoryDirective])
#############################################################################
## Kanban Squish Column Directive
#############################################################################
KanbanSquishColumnDirective = (rs) ->
link = ($scope, $el, $attrs) ->
$scope.$on "project:loaded", (event, project) ->
$scope.folds = rs.kanban.getStatusColumnModes(project.id)
updateTableWidth()
$scope.foldStatus = (status) ->
$scope.folds[status.id] = !!!$scope.folds[status.id]
rs.kanban.storeStatusColumnModes($scope.projectId, $scope.folds)
updateTableWidth()
return
updateTableWidth = ->
columnWidths = _.map $scope.usStatusList, (status) ->
if $scope.folds[status.id]
return 40
else
return 310
totalWidth = _.reduce columnWidths, (total, width) ->
return total + width
$el.find('.kanban-table-inner').css("width", totalWidth)
return {link: link}
module.directive("tgKanbanSquishColumn", ["$tgResources", KanbanSquishColumnDirective])
#############################################################################
## Kanban WIP Limit Directive
#############################################################################
KanbanWipLimitDirective = ->
link = ($scope, $el, $attrs) ->
$el.disableSelection()
redrawWipLimit = ->
$el.find(".kanban-wip-limit").remove()
timeout 200, ->
element = $el.find(".kanban-task")[$scope.$eval($attrs.tgKanbanWipLimit)]
if element
angular.element(element).before("<div class='kanban-wip-limit'></div>")
$scope.$on "redraw:wip", redrawWipLimit
$scope.$on "kanban:us:move", redrawWipLimit
$scope.$on "usform:new:success", redrawWipLimit
$scope.$on "usform:bulk:success", redrawWipLimit
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanbanWipLimit", KanbanWipLimitDirective)
#############################################################################
## Kanban User Directive
#############################################################################
KanbanUserDirective = ($log) ->
template = _.template("""
<figure class="avatar">
<a href="#" title="Assign User Story" <% if (!clickable) {%>class="not-clickable"<% } %>>
<img src="<%- imgurl %>" alt="<%- name %>" class="avatar">
</a>
</figure>
""") # TODO: i18n
clickable = false
link = ($scope, $el, $attrs, $model) ->
if not $attrs.tgKanbanUserAvatar
return $log.error "KanbanUserDirective: no attr is defined"
wtid = $scope.$watch $attrs.tgKanbanUserAvatar, (v) ->
if not $scope.usersById?
$log.error "KanbanUserDirective requires userById set in scope."
wtid()
else
user = $scope.usersById[v]
render(user)
render = (user) ->
if user is undefined
ctx = {name: "Unassigned", imgurl: "/images/unnamed.png", clickable: clickable}
else
ctx = {name: user.full_name_display, imgurl: user.photo, clickable: clickable}
html = template(ctx)
$el.html(html)
username_label = $el.parent().find("a.task-assigned")
username_label.html(ctx.name)
username_label.on "click", (event) ->
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
bindOnce $scope, "project", (project) ->
if project.my_permissions.indexOf("modify_us") > -1
clickable = true
$el.on "click", (event) =>
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
$scope.$on "$destroy", ->
$el.off()
return {link: link, require:"ngModel"}
module.directive("tgKanbanUserAvatar", ["$log", KanbanUserDirective])
| 29871 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/kanban/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
timeout = @.taiga.timeout
bindMethods = @.taiga.bindMethods
module = angular.module("taigaKanban")
# Vars
defaultViewMode = "maximized"
defaultViewModes = {
maximized: {
cardClass: "kanban-task-maximized"
}
minimized: {
cardClass: "kanban-task-minimized"
}
}
#############################################################################
## Kanban Controller
#############################################################################
class KanbanController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$appTitle",
"$tgNavUrls",
"$tgEvents",
"$tgAnalytics",
"tgLoader"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location,
@appTitle, @navUrls, @events, @analytics, tgLoader) ->
bindMethods(@)
@scope.sectionName = "Kanban"
@scope.statusViewModes = {}
@.initializeEventHandlers()
promise = @.loadInitialData()
# On Success
promise.then =>
@appTitle.set("Kanban - " + @scope.project.name)
tgLoader.pageLoaded()
# On Error
promise.then null, @.onInitialDataError.bind(@)
initializeEventHandlers: ->
@scope.$on "usform:new:success", =>
@.loadUserstories()
@.refreshTagsColors()
@analytics.trackEvent("userstory", "create", "create userstory on kanban", 1)
@scope.$on "usform:bulk:success", =>
@.loadUserstories()
@analytics.trackEvent("userstory", "create", "bulk create userstory on kanban", 1)
@scope.$on "usform:edit:success", =>
@.loadUserstories()
@.refreshTagsColors()
@scope.$on("assigned-to:added", @.onAssignedToChanged)
@scope.$on("kanban:us:move", @.moveUs)
# Template actions
addNewUs: (type, statusId) ->
switch type
when "standard" then @rootscope.$broadcast("usform:new", @scope.projectId, statusId, @scope.usStatusList)
when "bulk" then @rootscope.$broadcast("usform:bulk", @scope.projectId, statusId)
changeUsAssignedTo: (us) ->
@rootscope.$broadcast("assigned-to:add", us)
# Scope Events Handlers
onAssignedToChanged: (ctx, userid, us) ->
us.assigned_to = userid
promise = @repo.save(us)
promise.then null, ->
console.log "FAIL" # TODO
# Load data methods
loadProjectStats: ->
return @rs.projects.stats(@scope.projectId).then (stats) =>
@scope.stats = stats
if stats.total_points
completedPercentage = Math.round(100 * stats.closed_points / stats.total_points)
else
completedPercentage = 0
@scope.stats.completedPercentage = "#{completedPercentage}%"
return stats
refreshTagsColors: ->
return @rs.projects.tagsColors(@scope.projectId).then (tags_colors) =>
@scope.project.tags_colors = tags_colors
loadUserstories: ->
return @rs.userstories.listAll(@scope.projectId).then (userstories) =>
@scope.userstories = userstories
@scope.usByStatus = _.groupBy(userstories, "status")
for status in @scope.usStatusList
if not @scope.usByStatus[status.id]?
@scope.usByStatus[status.id] = []
@scope.usByStatus[status.id] = _.sortBy(@scope.usByStatus[status.id], "kanban_order")
# The broadcast must be executed when the DOM has been fully reloaded.
# We can't assure when this exactly happens so we need a defer
scopeDefer @scope, =>
@scope.$broadcast("userstories:loaded")
return userstories
loadKanban: ->
return @q.all([
@.refreshTagsColors(),
@.loadProjectStats(),
@.loadUserstories()
])
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.points = _.sortBy(project.points, "order")
@scope.pointsById = groupBy(project.points, (x) -> x.id)
@scope.usStatusById = groupBy(project.us_statuses, (x) -> x.id)
@scope.usStatusList = _.sortBy(project.us_statuses, "order")
@.generateStatusViewModes()
@scope.$emit("project:loaded", project)
return project
initializeSubscription: ->
routingKey1 = "changes.project.#{@scope.projectId}.userstories"
@events.subscribe @scope, routingKey1, (message) =>
@.loadUserstories()
loadInitialData: ->
# Resolve project slug
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
@.initializeSubscription()
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadKanban())
.then(=> @scope.$broadcast("redraw:wip"))
## View Mode methods
generateStatusViewModes: ->
storedStatusViewModes = @rs.kanban.getStatusViewModes(@scope.projectId)
@scope.statusViewModes = {}
for status in @scope.usStatusList
mode = storedStatusViewModes[status.id]
@scope.statusViewModes[status.id] = if _.has(defaultViewModes, mode) then mode else defaultViewMode
@.storeStatusViewModes()
storeStatusViewModes: ->
@rs.kanban.storeStatusViewModes(@scope.projectId, @scope.statusViewModes)
updateStatusViewMode: (statusId, newViewMode) ->
@scope.statusViewModes[statusId] = newViewMode
@.storeStatusViewModes()
getCardClass: (statusId)->
mode = @scope.statusViewModes[statusId] or defaultViewMode
return defaultViewModes[mode].cardClass or defaultViewModes[defaultViewMode].cardClass
# Utils methods
prepareBulkUpdateData: (uses, field="kanban_order") ->
return _.map(uses, (x) -> {"us_id": x.id, "order": x[field]})
resortUserStories: (uses) ->
items = []
for item, index in uses
item.kanban_order = index
if item.isModified()
items.push(item)
return items
moveUs: (ctx, us, statusId, index) ->
if us.status != statusId
# Remove us from old status column
r = @scope.usByStatus[us.status].indexOf(us)
@scope.usByStatus[us.status].splice(r, 1)
# Add us to new status column.
@scope.usByStatus[statusId].splice(index, 0, us)
us.status = statusId
else
r = @scope.usByStatus[statusId].indexOf(us)
@scope.usByStatus[statusId].splice(r, 1)
@scope.usByStatus[statusId].splice(index, 0, us)
itemsToSave = @.resortUserStories(@scope.usByStatus[statusId])
@scope.usByStatus[statusId] = _.sortBy(@scope.usByStatus[statusId], "kanban_order")
# Persist the userstory
promise = @repo.save(us)
# Rehash userstories order field
# and persist in bulk all changes.
promise = promise.then =>
itemsToSave = _.reject(itemsToSave, {"id": us.id})
data = @.prepareBulkUpdateData(itemsToSave)
return @rs.userstories.bulkUpdateKanbanOrder(us.project, data).then =>
return itemsToSave
return promise
module.controller("KanbanController", KanbanController)
#############################################################################
## Kanban Directive
#############################################################################
KanbanDirective = ($repo, $rootscope) ->
link = ($scope, $el, $attrs) ->
tableBodyDom = $el.find(".kanban-table-body")
tableBodyDom.on "scroll", (event) ->
target = angular.element(event.currentTarget)
tableHeaderDom = $el.find(".kanban-table-header .kanban-table-inner")
tableHeaderDom.css("left", -1 * target.scrollLeft())
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanban", ["$tgRepo", "$rootScope", KanbanDirective])
#############################################################################
## Kanban Column Height Fixer Directive
#############################################################################
KanbanColumnHeightFixerDirective = ->
mainPadding = 32 # px
scrollPadding = 0 # px
renderSize = ($el) ->
elementOffset = $el.parent().parent().offset().top
windowHeight = angular.element(window).height()
columnHeight = windowHeight - elementOffset - mainPadding - scrollPadding
$el.css("height", "#{columnHeight}px")
link = ($scope, $el, $attrs) ->
timeout(500, -> renderSize($el))
$scope.$on "resize", ->
renderSize($el)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgKanbanColumnHeightFixer", KanbanColumnHeightFixerDirective)
#############################################################################
## Kanban User Story Directive
#############################################################################
KanbanUserstoryDirective = ($rootscope) ->
link = ($scope, $el, $attrs, $model) ->
$el.disableSelection()
$scope.$watch "us", (us) ->
if us.is_blocked and not $el.hasClass("blocked")
$el.addClass("blocked")
else if not us.is_blocked and $el.hasClass("blocked")
$el.removeClass("blocked")
$el.find(".icon-edit").on "click", (event) ->
if $el.find(".icon-edit").hasClass("noclick")
return
$scope.$apply ->
$rootscope.$broadcast("usform:edit", $model.$modelValue)
$scope.$on "$destroy", ->
$el.off()
return {
templateUrl: "/partials/views/components/kanban-task.html"
link: link
require: "ngModel"
}
module.directive("tgKanbanUserstory", ["$rootScope", KanbanUserstoryDirective])
#############################################################################
## Kanban Squish Column Directive
#############################################################################
KanbanSquishColumnDirective = (rs) ->
link = ($scope, $el, $attrs) ->
$scope.$on "project:loaded", (event, project) ->
$scope.folds = rs.kanban.getStatusColumnModes(project.id)
updateTableWidth()
$scope.foldStatus = (status) ->
$scope.folds[status.id] = !!!$scope.folds[status.id]
rs.kanban.storeStatusColumnModes($scope.projectId, $scope.folds)
updateTableWidth()
return
updateTableWidth = ->
columnWidths = _.map $scope.usStatusList, (status) ->
if $scope.folds[status.id]
return 40
else
return 310
totalWidth = _.reduce columnWidths, (total, width) ->
return total + width
$el.find('.kanban-table-inner').css("width", totalWidth)
return {link: link}
module.directive("tgKanbanSquishColumn", ["$tgResources", KanbanSquishColumnDirective])
#############################################################################
## Kanban WIP Limit Directive
#############################################################################
KanbanWipLimitDirective = ->
link = ($scope, $el, $attrs) ->
$el.disableSelection()
redrawWipLimit = ->
$el.find(".kanban-wip-limit").remove()
timeout 200, ->
element = $el.find(".kanban-task")[$scope.$eval($attrs.tgKanbanWipLimit)]
if element
angular.element(element).before("<div class='kanban-wip-limit'></div>")
$scope.$on "redraw:wip", redrawWipLimit
$scope.$on "kanban:us:move", redrawWipLimit
$scope.$on "usform:new:success", redrawWipLimit
$scope.$on "usform:bulk:success", redrawWipLimit
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanbanWipLimit", KanbanWipLimitDirective)
#############################################################################
## Kanban User Directive
#############################################################################
KanbanUserDirective = ($log) ->
template = _.template("""
<figure class="avatar">
<a href="#" title="Assign User Story" <% if (!clickable) {%>class="not-clickable"<% } %>>
<img src="<%- imgurl %>" alt="<%- name %>" class="avatar">
</a>
</figure>
""") # TODO: i18n
clickable = false
link = ($scope, $el, $attrs, $model) ->
if not $attrs.tgKanbanUserAvatar
return $log.error "KanbanUserDirective: no attr is defined"
wtid = $scope.$watch $attrs.tgKanbanUserAvatar, (v) ->
if not $scope.usersById?
$log.error "KanbanUserDirective requires userById set in scope."
wtid()
else
user = $scope.usersById[v]
render(user)
render = (user) ->
if user is undefined
ctx = {name: "Unassigned", imgurl: "/images/unnamed.png", clickable: clickable}
else
ctx = {name: user.full_name_display, imgurl: user.photo, clickable: clickable}
html = template(ctx)
$el.html(html)
username_label = $el.parent().find("a.task-assigned")
username_label.html(ctx.name)
username_label.on "click", (event) ->
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
bindOnce $scope, "project", (project) ->
if project.my_permissions.indexOf("modify_us") > -1
clickable = true
$el.on "click", (event) =>
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
$scope.$on "$destroy", ->
$el.off()
return {link: link, require:"ngModel"}
module.directive("tgKanbanUserAvatar", ["$log", KanbanUserDirective])
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/kanban/main.coffee
###
taiga = @.taiga
mixOf = @.taiga.mixOf
toggleText = @.taiga.toggleText
scopeDefer = @.taiga.scopeDefer
bindOnce = @.taiga.bindOnce
groupBy = @.taiga.groupBy
timeout = @.taiga.timeout
bindMethods = @.taiga.bindMethods
module = angular.module("taigaKanban")
# Vars
defaultViewMode = "maximized"
defaultViewModes = {
maximized: {
cardClass: "kanban-task-maximized"
}
minimized: {
cardClass: "kanban-task-minimized"
}
}
#############################################################################
## Kanban Controller
#############################################################################
class KanbanController extends mixOf(taiga.Controller, taiga.PageMixin, taiga.FiltersMixin)
@.$inject = [
"$scope",
"$rootScope",
"$tgRepo",
"$tgConfirm",
"$tgResources",
"$routeParams",
"$q",
"$tgLocation",
"$appTitle",
"$tgNavUrls",
"$tgEvents",
"$tgAnalytics",
"tgLoader"
]
constructor: (@scope, @rootscope, @repo, @confirm, @rs, @params, @q, @location,
@appTitle, @navUrls, @events, @analytics, tgLoader) ->
bindMethods(@)
@scope.sectionName = "Kanban"
@scope.statusViewModes = {}
@.initializeEventHandlers()
promise = @.loadInitialData()
# On Success
promise.then =>
@appTitle.set("Kanban - " + @scope.project.name)
tgLoader.pageLoaded()
# On Error
promise.then null, @.onInitialDataError.bind(@)
initializeEventHandlers: ->
@scope.$on "usform:new:success", =>
@.loadUserstories()
@.refreshTagsColors()
@analytics.trackEvent("userstory", "create", "create userstory on kanban", 1)
@scope.$on "usform:bulk:success", =>
@.loadUserstories()
@analytics.trackEvent("userstory", "create", "bulk create userstory on kanban", 1)
@scope.$on "usform:edit:success", =>
@.loadUserstories()
@.refreshTagsColors()
@scope.$on("assigned-to:added", @.onAssignedToChanged)
@scope.$on("kanban:us:move", @.moveUs)
# Template actions
addNewUs: (type, statusId) ->
switch type
when "standard" then @rootscope.$broadcast("usform:new", @scope.projectId, statusId, @scope.usStatusList)
when "bulk" then @rootscope.$broadcast("usform:bulk", @scope.projectId, statusId)
changeUsAssignedTo: (us) ->
@rootscope.$broadcast("assigned-to:add", us)
# Scope Events Handlers
onAssignedToChanged: (ctx, userid, us) ->
us.assigned_to = userid
promise = @repo.save(us)
promise.then null, ->
console.log "FAIL" # TODO
# Load data methods
loadProjectStats: ->
return @rs.projects.stats(@scope.projectId).then (stats) =>
@scope.stats = stats
if stats.total_points
completedPercentage = Math.round(100 * stats.closed_points / stats.total_points)
else
completedPercentage = 0
@scope.stats.completedPercentage = "#{completedPercentage}%"
return stats
refreshTagsColors: ->
return @rs.projects.tagsColors(@scope.projectId).then (tags_colors) =>
@scope.project.tags_colors = tags_colors
loadUserstories: ->
return @rs.userstories.listAll(@scope.projectId).then (userstories) =>
@scope.userstories = userstories
@scope.usByStatus = _.groupBy(userstories, "status")
for status in @scope.usStatusList
if not @scope.usByStatus[status.id]?
@scope.usByStatus[status.id] = []
@scope.usByStatus[status.id] = _.sortBy(@scope.usByStatus[status.id], "kanban_order")
# The broadcast must be executed when the DOM has been fully reloaded.
# We can't assure when this exactly happens so we need a defer
scopeDefer @scope, =>
@scope.$broadcast("userstories:loaded")
return userstories
loadKanban: ->
return @q.all([
@.refreshTagsColors(),
@.loadProjectStats(),
@.loadUserstories()
])
loadProject: ->
return @rs.projects.get(@scope.projectId).then (project) =>
@scope.project = project
@scope.points = _.sortBy(project.points, "order")
@scope.pointsById = groupBy(project.points, (x) -> x.id)
@scope.usStatusById = groupBy(project.us_statuses, (x) -> x.id)
@scope.usStatusList = _.sortBy(project.us_statuses, "order")
@.generateStatusViewModes()
@scope.$emit("project:loaded", project)
return project
initializeSubscription: ->
routingKey1 = "changes.project.#{@scope.projectId}.userstories"
@events.subscribe @scope, routingKey1, (message) =>
@.loadUserstories()
loadInitialData: ->
# Resolve project slug
promise = @repo.resolve({pslug: @params.pslug}).then (data) =>
@scope.projectId = data.project
@.initializeSubscription()
return data
return promise.then(=> @.loadProject())
.then(=> @.loadUsersAndRoles())
.then(=> @.loadKanban())
.then(=> @scope.$broadcast("redraw:wip"))
## View Mode methods
generateStatusViewModes: ->
storedStatusViewModes = @rs.kanban.getStatusViewModes(@scope.projectId)
@scope.statusViewModes = {}
for status in @scope.usStatusList
mode = storedStatusViewModes[status.id]
@scope.statusViewModes[status.id] = if _.has(defaultViewModes, mode) then mode else defaultViewMode
@.storeStatusViewModes()
storeStatusViewModes: ->
@rs.kanban.storeStatusViewModes(@scope.projectId, @scope.statusViewModes)
updateStatusViewMode: (statusId, newViewMode) ->
@scope.statusViewModes[statusId] = newViewMode
@.storeStatusViewModes()
getCardClass: (statusId)->
mode = @scope.statusViewModes[statusId] or defaultViewMode
return defaultViewModes[mode].cardClass or defaultViewModes[defaultViewMode].cardClass
# Utils methods
prepareBulkUpdateData: (uses, field="kanban_order") ->
return _.map(uses, (x) -> {"us_id": x.id, "order": x[field]})
resortUserStories: (uses) ->
items = []
for item, index in uses
item.kanban_order = index
if item.isModified()
items.push(item)
return items
moveUs: (ctx, us, statusId, index) ->
if us.status != statusId
# Remove us from old status column
r = @scope.usByStatus[us.status].indexOf(us)
@scope.usByStatus[us.status].splice(r, 1)
# Add us to new status column.
@scope.usByStatus[statusId].splice(index, 0, us)
us.status = statusId
else
r = @scope.usByStatus[statusId].indexOf(us)
@scope.usByStatus[statusId].splice(r, 1)
@scope.usByStatus[statusId].splice(index, 0, us)
itemsToSave = @.resortUserStories(@scope.usByStatus[statusId])
@scope.usByStatus[statusId] = _.sortBy(@scope.usByStatus[statusId], "kanban_order")
# Persist the userstory
promise = @repo.save(us)
# Rehash userstories order field
# and persist in bulk all changes.
promise = promise.then =>
itemsToSave = _.reject(itemsToSave, {"id": us.id})
data = @.prepareBulkUpdateData(itemsToSave)
return @rs.userstories.bulkUpdateKanbanOrder(us.project, data).then =>
return itemsToSave
return promise
module.controller("KanbanController", KanbanController)
#############################################################################
## Kanban Directive
#############################################################################
KanbanDirective = ($repo, $rootscope) ->
link = ($scope, $el, $attrs) ->
tableBodyDom = $el.find(".kanban-table-body")
tableBodyDom.on "scroll", (event) ->
target = angular.element(event.currentTarget)
tableHeaderDom = $el.find(".kanban-table-header .kanban-table-inner")
tableHeaderDom.css("left", -1 * target.scrollLeft())
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanban", ["$tgRepo", "$rootScope", KanbanDirective])
#############################################################################
## Kanban Column Height Fixer Directive
#############################################################################
KanbanColumnHeightFixerDirective = ->
mainPadding = 32 # px
scrollPadding = 0 # px
renderSize = ($el) ->
elementOffset = $el.parent().parent().offset().top
windowHeight = angular.element(window).height()
columnHeight = windowHeight - elementOffset - mainPadding - scrollPadding
$el.css("height", "#{columnHeight}px")
link = ($scope, $el, $attrs) ->
timeout(500, -> renderSize($el))
$scope.$on "resize", ->
renderSize($el)
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgKanbanColumnHeightFixer", KanbanColumnHeightFixerDirective)
#############################################################################
## Kanban User Story Directive
#############################################################################
KanbanUserstoryDirective = ($rootscope) ->
link = ($scope, $el, $attrs, $model) ->
$el.disableSelection()
$scope.$watch "us", (us) ->
if us.is_blocked and not $el.hasClass("blocked")
$el.addClass("blocked")
else if not us.is_blocked and $el.hasClass("blocked")
$el.removeClass("blocked")
$el.find(".icon-edit").on "click", (event) ->
if $el.find(".icon-edit").hasClass("noclick")
return
$scope.$apply ->
$rootscope.$broadcast("usform:edit", $model.$modelValue)
$scope.$on "$destroy", ->
$el.off()
return {
templateUrl: "/partials/views/components/kanban-task.html"
link: link
require: "ngModel"
}
module.directive("tgKanbanUserstory", ["$rootScope", KanbanUserstoryDirective])
#############################################################################
## Kanban Squish Column Directive
#############################################################################
KanbanSquishColumnDirective = (rs) ->
link = ($scope, $el, $attrs) ->
$scope.$on "project:loaded", (event, project) ->
$scope.folds = rs.kanban.getStatusColumnModes(project.id)
updateTableWidth()
$scope.foldStatus = (status) ->
$scope.folds[status.id] = !!!$scope.folds[status.id]
rs.kanban.storeStatusColumnModes($scope.projectId, $scope.folds)
updateTableWidth()
return
updateTableWidth = ->
columnWidths = _.map $scope.usStatusList, (status) ->
if $scope.folds[status.id]
return 40
else
return 310
totalWidth = _.reduce columnWidths, (total, width) ->
return total + width
$el.find('.kanban-table-inner').css("width", totalWidth)
return {link: link}
module.directive("tgKanbanSquishColumn", ["$tgResources", KanbanSquishColumnDirective])
#############################################################################
## Kanban WIP Limit Directive
#############################################################################
KanbanWipLimitDirective = ->
link = ($scope, $el, $attrs) ->
$el.disableSelection()
redrawWipLimit = ->
$el.find(".kanban-wip-limit").remove()
timeout 200, ->
element = $el.find(".kanban-task")[$scope.$eval($attrs.tgKanbanWipLimit)]
if element
angular.element(element).before("<div class='kanban-wip-limit'></div>")
$scope.$on "redraw:wip", redrawWipLimit
$scope.$on "kanban:us:move", redrawWipLimit
$scope.$on "usform:new:success", redrawWipLimit
$scope.$on "usform:bulk:success", redrawWipLimit
$scope.$on "$destroy", ->
$el.off()
return {link: link}
module.directive("tgKanbanWipLimit", KanbanWipLimitDirective)
#############################################################################
## Kanban User Directive
#############################################################################
KanbanUserDirective = ($log) ->
template = _.template("""
<figure class="avatar">
<a href="#" title="Assign User Story" <% if (!clickable) {%>class="not-clickable"<% } %>>
<img src="<%- imgurl %>" alt="<%- name %>" class="avatar">
</a>
</figure>
""") # TODO: i18n
clickable = false
link = ($scope, $el, $attrs, $model) ->
if not $attrs.tgKanbanUserAvatar
return $log.error "KanbanUserDirective: no attr is defined"
wtid = $scope.$watch $attrs.tgKanbanUserAvatar, (v) ->
if not $scope.usersById?
$log.error "KanbanUserDirective requires userById set in scope."
wtid()
else
user = $scope.usersById[v]
render(user)
render = (user) ->
if user is undefined
ctx = {name: "Unassigned", imgurl: "/images/unnamed.png", clickable: clickable}
else
ctx = {name: user.full_name_display, imgurl: user.photo, clickable: clickable}
html = template(ctx)
$el.html(html)
username_label = $el.parent().find("a.task-assigned")
username_label.html(ctx.name)
username_label.on "click", (event) ->
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
bindOnce $scope, "project", (project) ->
if project.my_permissions.indexOf("modify_us") > -1
clickable = true
$el.on "click", (event) =>
if $el.find("a").hasClass("noclick")
return
us = $model.$modelValue
$ctrl = $el.controller()
$ctrl.changeUsAssignedTo(us)
$scope.$on "$destroy", ->
$el.off()
return {link: link, require:"ngModel"}
module.directive("tgKanbanUserAvatar", ["$log", KanbanUserDirective])
|
[
{
"context": "ype\" : \"code\",\n \"href\" : \"https://github.com/versionone/V1ClarityPPM\",\n \"title\" : \"Source Code\"\n ",
"end": 1814,
"score": 0.9991382360458374,
"start": 1804,
"tag": "USERNAME",
"value": "versionone"
},
{
"context": "ocumentation\",\n \"hre... | src/server/app/testData.coffee | versionone/VersionOne.AppCatalog.Web | 2 | data =
fullyValidEntry: ->
entry = {
id : 'someIdHere',
titleSection: @titleSectionComplete(),
descriptionSection: @descriptionSectionComplete(),
linksSection: @linksSectionComplete(),
updatesSection: @updatesSectionComplete(),
mediaSection: @mediaSectionComplete()
}
return entry
titleSectionMissing: ->
entry = @fullyValidEntry()
delete entry.titleSection
return entry
titleSectionEmpty: ->
entry = @fullyValidEntry()
entry.titleSection = {}
return entry
titleSectionComplete: ->
return {
name: 'String Name',
shortDescription: 'Free short description',
pricing: 'Free',
support: {
text: 'Text label'
href: 'http://somesite.com'
}
}
descriptionSectionMissing: ->
entry = @fullyValidEntry()
delete entry.descriptionSection
return entry
descriptionSectionEmpty: ->
entry = @fullyValidEntry()
entry.descriptionSection = {}
return entry
descriptionSectionComplete: ->
return {
description: 'This is a valid description for an entry'
}
linksSectionMissing: ->
entry = @fullyValidEntry()
delete entry.linksSection
return entry
linksSectionEmpty: ->
entry = @fullyValidEntry()
entry.linksSection = {}
return entry
linksSectionComplete: ->
return [{
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Stable Release"
}, {
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Nightly Build"
}, {
"type" : "code",
"href" : "https://github.com/versionone/V1ClarityPPM",
"title" : "Source Code"
}, {
"type" : "documentation",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/README.md",
"title" : "Documentation"
}, {
"type" : "license",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/LICENSE.md",
"title" : "Modified BSD (3-clause) License"
}, {
"type" : "foo",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master",
"title" : "Sample configuration"
}]
updatesSectionMissing: ->
entry = @fullyValidEntry()
delete entry.updatesSection
return entry
updatesSectionEmpty: ->
entry = @fullyValidEntry()
entry.updatesSection = {}
return entry
updatesSectionWithEmptyUpdates: ->
entry = @fullyValidEntry()
entry.updatesSection.updates = [{}]
entry
updatesSectionComplete: ->
obj =
updates: [
date: "2013-02-14T17:45:00Z"
description: "stabilizing timesheet workflow"
version: "0.3.2.13"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://example.com"
qualityBand: "mature"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip"
,
date: "2013-02-14T17:45:00Z"
description: "better timesheet support"
version: "0.3.3.5"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://more.exampleForYou.com"
qualityBand: "sapling"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.2.1.10.zip"
]
qualityBands: {
sapling: {
shortDescription: "The product is undergoing rapid growth. The code works but expect major changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#sapling"
},
mature: {
shortDescription: "The product is stable. The code will continue to evolve with minimum breaking changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#mature"
}
}
return obj
mediaSectionComplete: ->
return [{
"title": "Home",
"caption": "The home image",
"mimetype": "image/png",
"href": "http://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg",
"thumbhref": "https://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg"
}]
module.exports = data | 80723 | data =
fullyValidEntry: ->
entry = {
id : 'someIdHere',
titleSection: @titleSectionComplete(),
descriptionSection: @descriptionSectionComplete(),
linksSection: @linksSectionComplete(),
updatesSection: @updatesSectionComplete(),
mediaSection: @mediaSectionComplete()
}
return entry
titleSectionMissing: ->
entry = @fullyValidEntry()
delete entry.titleSection
return entry
titleSectionEmpty: ->
entry = @fullyValidEntry()
entry.titleSection = {}
return entry
titleSectionComplete: ->
return {
name: 'String Name',
shortDescription: 'Free short description',
pricing: 'Free',
support: {
text: 'Text label'
href: 'http://somesite.com'
}
}
descriptionSectionMissing: ->
entry = @fullyValidEntry()
delete entry.descriptionSection
return entry
descriptionSectionEmpty: ->
entry = @fullyValidEntry()
entry.descriptionSection = {}
return entry
descriptionSectionComplete: ->
return {
description: 'This is a valid description for an entry'
}
linksSectionMissing: ->
entry = @fullyValidEntry()
delete entry.linksSection
return entry
linksSectionEmpty: ->
entry = @fullyValidEntry()
entry.linksSection = {}
return entry
linksSectionComplete: ->
return [{
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Stable Release"
}, {
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Nightly Build"
}, {
"type" : "code",
"href" : "https://github.com/versionone/V1ClarityPPM",
"title" : "Source Code"
}, {
"type" : "documentation",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/README.md",
"title" : "Documentation"
}, {
"type" : "license",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/LICENSE.md",
"title" : "Modified BSD (3-clause) License"
}, {
"type" : "foo",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master",
"title" : "Sample configuration"
}]
updatesSectionMissing: ->
entry = @fullyValidEntry()
delete entry.updatesSection
return entry
updatesSectionEmpty: ->
entry = @fullyValidEntry()
entry.updatesSection = {}
return entry
updatesSectionWithEmptyUpdates: ->
entry = @fullyValidEntry()
entry.updatesSection.updates = [{}]
entry
updatesSectionComplete: ->
obj =
updates: [
date: "2013-02-14T17:45:00Z"
description: "stabilizing timesheet workflow"
version: "0.3.2.13"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://example.com"
qualityBand: "mature"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip"
,
date: "2013-02-14T17:45:00Z"
description: "better timesheet support"
version: "0.3.3.5"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://more.exampleForYou.com"
qualityBand: "sapling"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.2.1.10.zip"
]
qualityBands: {
sapling: {
shortDescription: "The product is undergoing rapid growth. The code works but expect major changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#sap<NAME>"
},
mature: {
shortDescription: "The product is stable. The code will continue to evolve with minimum breaking changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#mature"
}
}
return obj
mediaSectionComplete: ->
return [{
"title": "Home",
"caption": "The home image",
"mimetype": "image/png",
"href": "http://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg",
"thumbhref": "https://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg"
}]
module.exports = data | true | data =
fullyValidEntry: ->
entry = {
id : 'someIdHere',
titleSection: @titleSectionComplete(),
descriptionSection: @descriptionSectionComplete(),
linksSection: @linksSectionComplete(),
updatesSection: @updatesSectionComplete(),
mediaSection: @mediaSectionComplete()
}
return entry
titleSectionMissing: ->
entry = @fullyValidEntry()
delete entry.titleSection
return entry
titleSectionEmpty: ->
entry = @fullyValidEntry()
entry.titleSection = {}
return entry
titleSectionComplete: ->
return {
name: 'String Name',
shortDescription: 'Free short description',
pricing: 'Free',
support: {
text: 'Text label'
href: 'http://somesite.com'
}
}
descriptionSectionMissing: ->
entry = @fullyValidEntry()
delete entry.descriptionSection
return entry
descriptionSectionEmpty: ->
entry = @fullyValidEntry()
entry.descriptionSection = {}
return entry
descriptionSectionComplete: ->
return {
description: 'This is a valid description for an entry'
}
linksSectionMissing: ->
entry = @fullyValidEntry()
delete entry.linksSection
return entry
linksSectionEmpty: ->
entry = @fullyValidEntry()
entry.linksSection = {}
return entry
linksSectionComplete: ->
return [{
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Stable Release"
}, {
"type" : "download",
"href" : "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip",
"title" : "Download Latest Nightly Build"
}, {
"type" : "code",
"href" : "https://github.com/versionone/V1ClarityPPM",
"title" : "Source Code"
}, {
"type" : "documentation",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/README.md",
"title" : "Documentation"
}, {
"type" : "license",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master/LICENSE.md",
"title" : "Modified BSD (3-clause) License"
}, {
"type" : "foo",
"href" : "https://github.com/versionone/V1ClarityPPM/blob/master",
"title" : "Sample configuration"
}]
updatesSectionMissing: ->
entry = @fullyValidEntry()
delete entry.updatesSection
return entry
updatesSectionEmpty: ->
entry = @fullyValidEntry()
entry.updatesSection = {}
return entry
updatesSectionWithEmptyUpdates: ->
entry = @fullyValidEntry()
entry.updatesSection.updates = [{}]
entry
updatesSectionComplete: ->
obj =
updates: [
date: "2013-02-14T17:45:00Z"
description: "stabilizing timesheet workflow"
version: "0.3.2.13"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://example.com"
qualityBand: "mature"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.3.2.13.zip"
,
date: "2013-02-14T17:45:00Z"
description: "better timesheet support"
version: "0.3.3.5"
releaseNotes: "Florum gypsum dimsum"
moreInfoUrl: "http://more.exampleForYou.com"
qualityBand: "sapling"
downloadUrl: "http://platform.versionone.com.s3.amazonaws.com/downloads/v1clarityppm_0.2.1.10.zip"
]
qualityBands: {
sapling: {
shortDescription: "The product is undergoing rapid growth. The code works but expect major changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#sapPI:NAME:<NAME>END_PI"
},
mature: {
shortDescription: "The product is stable. The code will continue to evolve with minimum breaking changes.",
href: "https://github.com/versionone/V1ClarityPPM/blob/master/CONTRIBUTING.md#mature"
}
}
return obj
mediaSectionComplete: ->
return [{
"title": "Home",
"caption": "The home image",
"mimetype": "image/png",
"href": "http://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg",
"thumbhref": "https://absolute.content.com/gallery/Projekt_es_Projekt_portfolio_menedzsment_ca_clarity_ppm_masolata.jpg"
}]
module.exports = data |
[
{
"context": "on' it must contain:\n#\n# {\n# \"user\": \"USERNAME\",\n# \"password\":\"PASSWORD\",\n# \"hos",
"end": 182,
"score": 0.9995090961456299,
"start": 174,
"tag": "USERNAME",
"value": "USERNAME"
},
{
"context": " \"user\": \"USERNAME\",\n#... | src/jira.coffee | tebriel/jira-cli | 48 | # #Jira Command Line Client#
#
# This client depends on you having a json file in your home directory
# named '.jiraclirc.json' it must contain:
#
# {
# "user": "USERNAME",
# "password":"PASSWORD",
# "host":"www.jira.com",
# "port":80,
# "project": 10100
# }
#
# If not present, it will enter an interactive mode to create it with you
#
# JiraCli is on [github](https://github.com/tebriel/jira-cli)
fs = require 'fs'
path = require 'path'
# ## [JiraHelper docs/source](jira-cli.html)
JiraHelper = require('./jira-cli').JiraHelper
# ## [dutils docs/source](data-utils.html)
dutils = require('./data-utils')
# ## Create Config File ##
#
# Creates a config file when one doesn't exist
createConfigFile = (aConfigFile) ->
console.log "No config file found, answer these questions to create one!"
dutils.ask "Username", /.+/, (username) ->
dutils.ask "Password", /.+/, (password) ->
dutils.ask "Jira Host", /.+/, (host) ->
dutils.ask "Jira Port", /.+/, (port) ->
dutils.ask "Default Project", /.*/, (project) ->
config =
user:username
password:password
host:host
port:port
project:project
fs.writeFileSync aConfigFile,
JSON.stringify(config), 'utf8'
console.log "File created and saved as #{aConfigFile}"
process.exit()
# ## Check for Text Parameter ##
#
# Optimist returns a `bool` if the param is given but with nothing following it
paramIsText = (param)->
if typeof(param) is "boolean"
argv.showHelp()
return false
true
# ## Load the Config File ##
#
loadConfigFile = (configFilePath) ->
configFile = fs.readFileSync configFilePath
JSON.parse configFile
# ## Transition Item ##
#
# This takes the issueId, lists the transitions available for the item and then
# lets the user apply that transition to the item. Optionally the user can
# specify a comment which will then prompt for time spent. This adds a work log
# item to the item before the transition.
transitionItem = (issueId) ->
jiraCli.listTransitions issueId, (transitions) ->
transitions.sort dutils.itemSorter
for transition, index in transitions
jiraCli.pp.prettyPrintTransition transition, index + 1
allowedTypes = [1..transitions.length]
#allowedTypes = new RegExp "[#{allowedTypes.join '|'}]"
dutils.ask "Transtion Type ", allowedTypes, (type)->
dutils.ask "Comment for worklog (blank to skip)", /.*/, (comment)->
if comment.length is 0
jiraCli.transitionIssue issueId, transitions[type - 1].id
return
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, false
jiraCli.transitionIssue issueId, transitions[type - 1].id
, allowedTypes
# ## Add Work Log ##
#
# This will add a comment and time spent as a worklog item attached to the
# issue
addWorklog = (issueId) ->
dutils.ask "Comment for worklog", /.+/, (comment)->
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, true
# ## List Projects ##
#
# This will list all the projects available to you
listProjects = ->
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
# ## Get Project ##
#
# Here we ask the user for their project, giving them an option for the
# default, ? for a list, or they can type in a number directly
#
# It calls itself if we list the projects, so that it can still be used to for
# what it was called to do
getProject = (callback, defaultProj)->
dutils.ask "Project (Enter for Default/? for list) [#{defaultProj}] ",/.*/,
(project) ->
unless project is '?'
callback configFile.project
return
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
getProject callback, defaultProj
# ## Add Item ##
#
# Adds an item to Jira. The project passed in comes from getProject currently.
# Takes a summary and a description then lists the issue types for the user to
# choose from
addItem = (project)->
# Gather the summary, description, an type
dutils.ask "Summary", /.+/, (summary)->
dutils.ask "Description", /.+/, (description)->
jiraCli.getIssueTypes (issueTypes)->
issueTypes.sort dutils.itemSorter
for type, index in issueTypes
jiraCli.pp.prettyPrintIssueTypes type, index + 1
allowedTypes = [1..issueTypes.length]
addIssueCallback = (type)->
jiraCli.addIssue summary, description,
issueTypes[type - 1].id, project
dutils.ask "Type ", allowedTypes, addIssueCallback, allowedTypes
# ## Main entry point ##
#
# Parses the arguments and then calls a function above
if require.main is module
argv = (require 'optimist')
.options('f',
alias:'find'
describe:'Finds the specified Jira ID'
).options('a',
alias:'add'
describe:'Allows you to add a new Jira Task'
).options('t',
alias:'transition'
describe:'Allows you to resolve a specific Jira ID'
).options('l',
alias:'list'
describe:'Lists all your open issues'
).options('c',
alias:'list-all'
describe:'Lists all your issues'
).options('d',
alias:'details'
describe:'Shows extra details (currently only for list)'
).options('p',
alias:'projects'
describe:'Lists all your viewable projects'
).options('o',
describe:'Limits list to only this project'
).options('w',
alias:'worklog'
describe:'Adds work to your task'
).options('s',
alias:'search'
describe:'Pass a jql string to jira'
).options('h',
alias:'help'
describe:'Shows this help message'
).usage('Usage:\n\tjira -f EG-143\n\tjira -r EG-143')
.boolean('d')
.string('s')
.string('f')
.string('t')
.string('w')
if argv.argv.help
argv.showHelp()
return
args = argv.argv
configFilePath = path.join process.env.HOME, '.jiraclirc.json'
unless fs.existsSync configFilePath
createConfigFile configFilePath
return
configFile = loadConfigFile(configFilePath)
jiraCli = new JiraHelper configFile
if args.o?
if args.o instanceof Array
args.o = args.o.join ','
args.o = " AND project in (#{args.o})"
if args.l
jiraCli.getMyIssues true, args.d, args.o
else if args.c
jiraCli.getMyIssues false, args.d, args.o
else if args.s
return unless paramIsText args.s
if args.o?
args.s += args.o
jiraCli.searchJira args.s, args.d
else if args.p
listProjects()
else if args.a
getProject addItem, configFile.project
else if args.f?
return unless paramIsText args.f
jiraCli.getIssue args.f, args.d
else if args.w?
return unless paramIsText args.w
addWorklog args.w
else if args.t?
return unless paramIsText args.t
transitionItem args.t
else
argv.showHelp()
| 37605 | # #Jira Command Line Client#
#
# This client depends on you having a json file in your home directory
# named '.jiraclirc.json' it must contain:
#
# {
# "user": "USERNAME",
# "password":"<PASSWORD>",
# "host":"www.jira.com",
# "port":80,
# "project": 10100
# }
#
# If not present, it will enter an interactive mode to create it with you
#
# JiraCli is on [github](https://github.com/tebriel/jira-cli)
fs = require 'fs'
path = require 'path'
# ## [JiraHelper docs/source](jira-cli.html)
JiraHelper = require('./jira-cli').JiraHelper
# ## [dutils docs/source](data-utils.html)
dutils = require('./data-utils')
# ## Create Config File ##
#
# Creates a config file when one doesn't exist
createConfigFile = (aConfigFile) ->
console.log "No config file found, answer these questions to create one!"
dutils.ask "Username", /.+/, (username) ->
dutils.ask "Password", /.+/, (password) ->
dutils.ask "Jira Host", /.+/, (host) ->
dutils.ask "Jira Port", /.+/, (port) ->
dutils.ask "Default Project", /.*/, (project) ->
config =
user:username
password:<PASSWORD>
host:host
port:port
project:project
fs.writeFileSync aConfigFile,
JSON.stringify(config), 'utf8'
console.log "File created and saved as #{aConfigFile}"
process.exit()
# ## Check for Text Parameter ##
#
# Optimist returns a `bool` if the param is given but with nothing following it
paramIsText = (param)->
if typeof(param) is "boolean"
argv.showHelp()
return false
true
# ## Load the Config File ##
#
loadConfigFile = (configFilePath) ->
configFile = fs.readFileSync configFilePath
JSON.parse configFile
# ## Transition Item ##
#
# This takes the issueId, lists the transitions available for the item and then
# lets the user apply that transition to the item. Optionally the user can
# specify a comment which will then prompt for time spent. This adds a work log
# item to the item before the transition.
transitionItem = (issueId) ->
jiraCli.listTransitions issueId, (transitions) ->
transitions.sort dutils.itemSorter
for transition, index in transitions
jiraCli.pp.prettyPrintTransition transition, index + 1
allowedTypes = [1..transitions.length]
#allowedTypes = new RegExp "[#{allowedTypes.join '|'}]"
dutils.ask "Transtion Type ", allowedTypes, (type)->
dutils.ask "Comment for worklog (blank to skip)", /.*/, (comment)->
if comment.length is 0
jiraCli.transitionIssue issueId, transitions[type - 1].id
return
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, false
jiraCli.transitionIssue issueId, transitions[type - 1].id
, allowedTypes
# ## Add Work Log ##
#
# This will add a comment and time spent as a worklog item attached to the
# issue
addWorklog = (issueId) ->
dutils.ask "Comment for worklog", /.+/, (comment)->
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, true
# ## List Projects ##
#
# This will list all the projects available to you
listProjects = ->
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
# ## Get Project ##
#
# Here we ask the user for their project, giving them an option for the
# default, ? for a list, or they can type in a number directly
#
# It calls itself if we list the projects, so that it can still be used to for
# what it was called to do
getProject = (callback, defaultProj)->
dutils.ask "Project (Enter for Default/? for list) [#{defaultProj}] ",/.*/,
(project) ->
unless project is '?'
callback configFile.project
return
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
getProject callback, defaultProj
# ## Add Item ##
#
# Adds an item to Jira. The project passed in comes from getProject currently.
# Takes a summary and a description then lists the issue types for the user to
# choose from
addItem = (project)->
# Gather the summary, description, an type
dutils.ask "Summary", /.+/, (summary)->
dutils.ask "Description", /.+/, (description)->
jiraCli.getIssueTypes (issueTypes)->
issueTypes.sort dutils.itemSorter
for type, index in issueTypes
jiraCli.pp.prettyPrintIssueTypes type, index + 1
allowedTypes = [1..issueTypes.length]
addIssueCallback = (type)->
jiraCli.addIssue summary, description,
issueTypes[type - 1].id, project
dutils.ask "Type ", allowedTypes, addIssueCallback, allowedTypes
# ## Main entry point ##
#
# Parses the arguments and then calls a function above
if require.main is module
argv = (require 'optimist')
.options('f',
alias:'find'
describe:'Finds the specified Jira ID'
).options('a',
alias:'add'
describe:'Allows you to add a new Jira Task'
).options('t',
alias:'transition'
describe:'Allows you to resolve a specific Jira ID'
).options('l',
alias:'list'
describe:'Lists all your open issues'
).options('c',
alias:'list-all'
describe:'Lists all your issues'
).options('d',
alias:'details'
describe:'Shows extra details (currently only for list)'
).options('p',
alias:'projects'
describe:'Lists all your viewable projects'
).options('o',
describe:'Limits list to only this project'
).options('w',
alias:'worklog'
describe:'Adds work to your task'
).options('s',
alias:'search'
describe:'Pass a jql string to jira'
).options('h',
alias:'help'
describe:'Shows this help message'
).usage('Usage:\n\tjira -f EG-143\n\tjira -r EG-143')
.boolean('d')
.string('s')
.string('f')
.string('t')
.string('w')
if argv.argv.help
argv.showHelp()
return
args = argv.argv
configFilePath = path.join process.env.HOME, '.jiraclirc.json'
unless fs.existsSync configFilePath
createConfigFile configFilePath
return
configFile = loadConfigFile(configFilePath)
jiraCli = new JiraHelper configFile
if args.o?
if args.o instanceof Array
args.o = args.o.join ','
args.o = " AND project in (#{args.o})"
if args.l
jiraCli.getMyIssues true, args.d, args.o
else if args.c
jiraCli.getMyIssues false, args.d, args.o
else if args.s
return unless paramIsText args.s
if args.o?
args.s += args.o
jiraCli.searchJira args.s, args.d
else if args.p
listProjects()
else if args.a
getProject addItem, configFile.project
else if args.f?
return unless paramIsText args.f
jiraCli.getIssue args.f, args.d
else if args.w?
return unless paramIsText args.w
addWorklog args.w
else if args.t?
return unless paramIsText args.t
transitionItem args.t
else
argv.showHelp()
| true | # #Jira Command Line Client#
#
# This client depends on you having a json file in your home directory
# named '.jiraclirc.json' it must contain:
#
# {
# "user": "USERNAME",
# "password":"PI:PASSWORD:<PASSWORD>END_PI",
# "host":"www.jira.com",
# "port":80,
# "project": 10100
# }
#
# If not present, it will enter an interactive mode to create it with you
#
# JiraCli is on [github](https://github.com/tebriel/jira-cli)
fs = require 'fs'
path = require 'path'
# ## [JiraHelper docs/source](jira-cli.html)
JiraHelper = require('./jira-cli').JiraHelper
# ## [dutils docs/source](data-utils.html)
dutils = require('./data-utils')
# ## Create Config File ##
#
# Creates a config file when one doesn't exist
createConfigFile = (aConfigFile) ->
console.log "No config file found, answer these questions to create one!"
dutils.ask "Username", /.+/, (username) ->
dutils.ask "Password", /.+/, (password) ->
dutils.ask "Jira Host", /.+/, (host) ->
dutils.ask "Jira Port", /.+/, (port) ->
dutils.ask "Default Project", /.*/, (project) ->
config =
user:username
password:PI:PASSWORD:<PASSWORD>END_PI
host:host
port:port
project:project
fs.writeFileSync aConfigFile,
JSON.stringify(config), 'utf8'
console.log "File created and saved as #{aConfigFile}"
process.exit()
# ## Check for Text Parameter ##
#
# Optimist returns a `bool` if the param is given but with nothing following it
paramIsText = (param)->
if typeof(param) is "boolean"
argv.showHelp()
return false
true
# ## Load the Config File ##
#
loadConfigFile = (configFilePath) ->
configFile = fs.readFileSync configFilePath
JSON.parse configFile
# ## Transition Item ##
#
# This takes the issueId, lists the transitions available for the item and then
# lets the user apply that transition to the item. Optionally the user can
# specify a comment which will then prompt for time spent. This adds a work log
# item to the item before the transition.
transitionItem = (issueId) ->
jiraCli.listTransitions issueId, (transitions) ->
transitions.sort dutils.itemSorter
for transition, index in transitions
jiraCli.pp.prettyPrintTransition transition, index + 1
allowedTypes = [1..transitions.length]
#allowedTypes = new RegExp "[#{allowedTypes.join '|'}]"
dutils.ask "Transtion Type ", allowedTypes, (type)->
dutils.ask "Comment for worklog (blank to skip)", /.*/, (comment)->
if comment.length is 0
jiraCli.transitionIssue issueId, transitions[type - 1].id
return
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, false
jiraCli.transitionIssue issueId, transitions[type - 1].id
, allowedTypes
# ## Add Work Log ##
#
# This will add a comment and time spent as a worklog item attached to the
# issue
addWorklog = (issueId) ->
dutils.ask "Comment for worklog", /.+/, (comment)->
dutils.ask "Time Spent (for worklog)", /.+/, (timeSpent)->
jiraCli.addWorklog issueId, comment, timeSpent, true
# ## List Projects ##
#
# This will list all the projects available to you
listProjects = ->
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
# ## Get Project ##
#
# Here we ask the user for their project, giving them an option for the
# default, ? for a list, or they can type in a number directly
#
# It calls itself if we list the projects, so that it can still be used to for
# what it was called to do
getProject = (callback, defaultProj)->
dutils.ask "Project (Enter for Default/? for list) [#{defaultProj}] ",/.*/,
(project) ->
unless project is '?'
callback configFile.project
return
projects = jiraCli.getMyProjects (projects)=>
for project in projects
jiraCli.pp.prettyPrintProject project
getProject callback, defaultProj
# ## Add Item ##
#
# Adds an item to Jira. The project passed in comes from getProject currently.
# Takes a summary and a description then lists the issue types for the user to
# choose from
addItem = (project)->
# Gather the summary, description, an type
dutils.ask "Summary", /.+/, (summary)->
dutils.ask "Description", /.+/, (description)->
jiraCli.getIssueTypes (issueTypes)->
issueTypes.sort dutils.itemSorter
for type, index in issueTypes
jiraCli.pp.prettyPrintIssueTypes type, index + 1
allowedTypes = [1..issueTypes.length]
addIssueCallback = (type)->
jiraCli.addIssue summary, description,
issueTypes[type - 1].id, project
dutils.ask "Type ", allowedTypes, addIssueCallback, allowedTypes
# ## Main entry point ##
#
# Parses the arguments and then calls a function above
if require.main is module
argv = (require 'optimist')
.options('f',
alias:'find'
describe:'Finds the specified Jira ID'
).options('a',
alias:'add'
describe:'Allows you to add a new Jira Task'
).options('t',
alias:'transition'
describe:'Allows you to resolve a specific Jira ID'
).options('l',
alias:'list'
describe:'Lists all your open issues'
).options('c',
alias:'list-all'
describe:'Lists all your issues'
).options('d',
alias:'details'
describe:'Shows extra details (currently only for list)'
).options('p',
alias:'projects'
describe:'Lists all your viewable projects'
).options('o',
describe:'Limits list to only this project'
).options('w',
alias:'worklog'
describe:'Adds work to your task'
).options('s',
alias:'search'
describe:'Pass a jql string to jira'
).options('h',
alias:'help'
describe:'Shows this help message'
).usage('Usage:\n\tjira -f EG-143\n\tjira -r EG-143')
.boolean('d')
.string('s')
.string('f')
.string('t')
.string('w')
if argv.argv.help
argv.showHelp()
return
args = argv.argv
configFilePath = path.join process.env.HOME, '.jiraclirc.json'
unless fs.existsSync configFilePath
createConfigFile configFilePath
return
configFile = loadConfigFile(configFilePath)
jiraCli = new JiraHelper configFile
if args.o?
if args.o instanceof Array
args.o = args.o.join ','
args.o = " AND project in (#{args.o})"
if args.l
jiraCli.getMyIssues true, args.d, args.o
else if args.c
jiraCli.getMyIssues false, args.d, args.o
else if args.s
return unless paramIsText args.s
if args.o?
args.s += args.o
jiraCli.searchJira args.s, args.d
else if args.p
listProjects()
else if args.a
getProject addItem, configFile.project
else if args.f?
return unless paramIsText args.f
jiraCli.getIssue args.f, args.d
else if args.w?
return unless paramIsText args.w
addWorklog args.w
else if args.t?
return unless paramIsText args.t
transitionItem args.t
else
argv.showHelp()
|
[
{
"context": "els.User extends Brainstem.Model\n brainstemKey: \"users\"\n paramRoot: 'user'\n urlRoot: \"/api/users\"\n\ncla",
"end": 68,
"score": 0.7059851884841919,
"start": 63,
"tag": "KEY",
"value": "users"
}
] | spec/helpers/models/user.coffee | CodingZeal/brainstem-js | 0 | class App.Models.User extends Brainstem.Model
brainstemKey: "users"
paramRoot: 'user'
urlRoot: "/api/users"
class App.Collections.Users extends Brainstem.Collection
model: App.Models.User
url: "/api/users"
| 74982 | class App.Models.User extends Brainstem.Model
brainstemKey: "<KEY>"
paramRoot: 'user'
urlRoot: "/api/users"
class App.Collections.Users extends Brainstem.Collection
model: App.Models.User
url: "/api/users"
| true | class App.Models.User extends Brainstem.Model
brainstemKey: "PI:KEY:<KEY>END_PI"
paramRoot: 'user'
urlRoot: "/api/users"
class App.Collections.Users extends Brainstem.Collection
model: App.Models.User
url: "/api/users"
|
[
{
"context": "# jQuery message box plugin\n# @author Travis Haynes <travis.j.haynes@gmail.com>\n# @homepage https://g",
"end": 51,
"score": 0.9998888969421387,
"start": 38,
"tag": "NAME",
"value": "Travis Haynes"
},
{
"context": "Query message box plugin\n# @author Travis Haynes <tra... | spec/public/coffeescripts/jquery.msgbox.coffee | travishaynes/jquery-msgbox | 0 | # jQuery message box plugin
# @author Travis Haynes <travis.j.haynes@gmail.com>
# @homepage https://github.com/travishaynes/jquery.msgbox
$.msgbox = (title, contents, options) ->
# if contents and options are left undefined, title becomes the jQuery.dialog
# options
if contents == undefined && options == undefined
return $("#msgbox").dialog(title)
# force options to be an object
options = options || {}
# default options
options.icon = "alert" if options.icon == undefined
# default dialog options
dialog_options =
title : title
autoOpen : options.autoOpen || true
width : options.width || undefined
height : options.height || 200
modal : options.modal || true
resizable : options.resizable || false
buttons : options.buttons || { "OK": () -> $.msgbox "close" }
# if msgbox already exists, use it, and rebuild its html
if $("#msgbox").length == 1
$msgbox = $("#msgbox")
$contents = $msgbox.find("#msgbox-contents")
else
# no msgbox found, create a new one
$msgbox = $("<div/>")
$contents = $("<div/>")
$msgbox.attr "id", "msgbox"
$contents.attr "id", "msgbox-contents"
# update the contents
$msgbox.html $contents.html contents
# create the icon
unless options.icon == null
$icon = $("<span/>")
$icon.css "float", "left"
$icon.css "margin", "0 0.5em 0 0"
$icon.attr "id", "msgbox-icon"
$icon.attr "class", "ui-icon ui-icon-" + options.icon
dialog_options.title = $icon.wrap("<div>").parent().html() + dialog_options.title
## display msgbox
$msgbox.dialog dialog_options
| 203831 | # jQuery message box plugin
# @author <NAME> <<EMAIL>>
# @homepage https://github.com/travishaynes/jquery.msgbox
$.msgbox = (title, contents, options) ->
# if contents and options are left undefined, title becomes the jQuery.dialog
# options
if contents == undefined && options == undefined
return $("#msgbox").dialog(title)
# force options to be an object
options = options || {}
# default options
options.icon = "alert" if options.icon == undefined
# default dialog options
dialog_options =
title : title
autoOpen : options.autoOpen || true
width : options.width || undefined
height : options.height || 200
modal : options.modal || true
resizable : options.resizable || false
buttons : options.buttons || { "OK": () -> $.msgbox "close" }
# if msgbox already exists, use it, and rebuild its html
if $("#msgbox").length == 1
$msgbox = $("#msgbox")
$contents = $msgbox.find("#msgbox-contents")
else
# no msgbox found, create a new one
$msgbox = $("<div/>")
$contents = $("<div/>")
$msgbox.attr "id", "msgbox"
$contents.attr "id", "msgbox-contents"
# update the contents
$msgbox.html $contents.html contents
# create the icon
unless options.icon == null
$icon = $("<span/>")
$icon.css "float", "left"
$icon.css "margin", "0 0.5em 0 0"
$icon.attr "id", "msgbox-icon"
$icon.attr "class", "ui-icon ui-icon-" + options.icon
dialog_options.title = $icon.wrap("<div>").parent().html() + dialog_options.title
## display msgbox
$msgbox.dialog dialog_options
| true | # jQuery message box plugin
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @homepage https://github.com/travishaynes/jquery.msgbox
$.msgbox = (title, contents, options) ->
# if contents and options are left undefined, title becomes the jQuery.dialog
# options
if contents == undefined && options == undefined
return $("#msgbox").dialog(title)
# force options to be an object
options = options || {}
# default options
options.icon = "alert" if options.icon == undefined
# default dialog options
dialog_options =
title : title
autoOpen : options.autoOpen || true
width : options.width || undefined
height : options.height || 200
modal : options.modal || true
resizable : options.resizable || false
buttons : options.buttons || { "OK": () -> $.msgbox "close" }
# if msgbox already exists, use it, and rebuild its html
if $("#msgbox").length == 1
$msgbox = $("#msgbox")
$contents = $msgbox.find("#msgbox-contents")
else
# no msgbox found, create a new one
$msgbox = $("<div/>")
$contents = $("<div/>")
$msgbox.attr "id", "msgbox"
$contents.attr "id", "msgbox-contents"
# update the contents
$msgbox.html $contents.html contents
# create the icon
unless options.icon == null
$icon = $("<span/>")
$icon.css "float", "left"
$icon.css "margin", "0 0.5em 0 0"
$icon.attr "id", "msgbox-icon"
$icon.attr "class", "ui-icon ui-icon-" + options.icon
dialog_options.title = $icon.wrap("<div>").parent().html() + dialog_options.title
## display msgbox
$msgbox.dialog dialog_options
|
[
{
"context": "==========\n\nkm2 = keymanager = null\npassphrase = \"now is the time for all good men\"\nkeyring = null\np3skb = null\n\n#==================",
"end": 276,
"score": 0.9993175268173218,
"start": 244,
"tag": "PASSWORD",
"value": "now is the time for all good men"
},
{
"co... | test/files/4_gen.iced | AngelKey/Angelkey.nodeclient | 151 |
{TmpKeyRing} = require '../../lib/keyring'
{KeyManager} = require '../../lib/keymanager'
path = require 'path'
{config} = require '../lib/config'
#=======================================================
km2 = keymanager = null
passphrase = "now is the time for all good men"
keyring = null
p3skb = null
#=======================================================
exports.init = (T,cb) ->
dir = path.join __dirname, "scratch"
await TmpKeyRing.make defer err, tmp
keyring = tmp
T.no_error err
cb()
#-----------------------
exports.gen = (T,cb) ->
args =
username : "kahn",
config :
master : bits : 1024
subkey : bits : 1024
expire : "10y"
passphrase : passphrase
ring : keyring
await KeyManager.generate args, defer err, tmp
T.no_error err
keymanager = tmp
cb()
#-----------------------
exports.export_to_p3skb = (T,cb) ->
await keymanager.export_to_p3skb {}, defer err, tmp
T.no_error err
p3skb = tmp
cb()
#-----------------------
exports.import_from_p3skb = (T,cb) ->
args =
raw : p3skb
passphrase : passphrase
tsenc : keymanager.get_tsenc()
await KeyManager.import_from_p3skb args, defer err, tmp
T.no_error err
km2 = tmp
cb()
#-----------------------
exports.save = (T,cb) ->
await TmpKeyRing.make defer err, ring
T.no_error err
await km2.save_to_ring { ring }, defer err
T.no_error err
await ring.list_fingerprints defer err, ret
T.no_error err
T.equal ret.length, 1, "Only got one key back"
T.equal ret[0], keymanager.key.fingerprint(), "The same PGP fingerprint"
await ring.nuke defer err
T.no_error err
cb()
#-----------------------
exports.finish = (T,cb) ->
await keyring.nuke defer err
cb()
#-----------------------
| 7324 |
{TmpKeyRing} = require '../../lib/keyring'
{KeyManager} = require '../../lib/keymanager'
path = require 'path'
{config} = require '../lib/config'
#=======================================================
km2 = keymanager = null
passphrase = "<PASSWORD>"
keyring = null
p3skb = null
#=======================================================
exports.init = (T,cb) ->
dir = path.join __dirname, "scratch"
await TmpKeyRing.make defer err, tmp
keyring = tmp
T.no_error err
cb()
#-----------------------
exports.gen = (T,cb) ->
args =
username : "kahn",
config :
master : bits : 1024
subkey : bits : 1024
expire : "10y"
passphrase : <PASSWORD>
ring : keyring
await KeyManager.generate args, defer err, tmp
T.no_error err
keymanager = tmp
cb()
#-----------------------
exports.export_to_p3skb = (T,cb) ->
await keymanager.export_to_p3skb {}, defer err, tmp
T.no_error err
p3skb = tmp
cb()
#-----------------------
exports.import_from_p3skb = (T,cb) ->
args =
raw : p3skb
passphrase : <PASSWORD>
tsenc : keymanager.get_tsenc()
await KeyManager.import_from_p3skb args, defer err, tmp
T.no_error err
km2 = tmp
cb()
#-----------------------
exports.save = (T,cb) ->
await TmpKeyRing.make defer err, ring
T.no_error err
await km2.save_to_ring { ring }, defer err
T.no_error err
await ring.list_fingerprints defer err, ret
T.no_error err
T.equal ret.length, 1, "Only got one key back"
T.equal ret[0], keymanager.key.fingerprint(), "The same PGP fingerprint"
await ring.nuke defer err
T.no_error err
cb()
#-----------------------
exports.finish = (T,cb) ->
await keyring.nuke defer err
cb()
#-----------------------
| true |
{TmpKeyRing} = require '../../lib/keyring'
{KeyManager} = require '../../lib/keymanager'
path = require 'path'
{config} = require '../lib/config'
#=======================================================
km2 = keymanager = null
passphrase = "PI:PASSWORD:<PASSWORD>END_PI"
keyring = null
p3skb = null
#=======================================================
exports.init = (T,cb) ->
dir = path.join __dirname, "scratch"
await TmpKeyRing.make defer err, tmp
keyring = tmp
T.no_error err
cb()
#-----------------------
exports.gen = (T,cb) ->
args =
username : "kahn",
config :
master : bits : 1024
subkey : bits : 1024
expire : "10y"
passphrase : PI:PASSWORD:<PASSWORD>END_PI
ring : keyring
await KeyManager.generate args, defer err, tmp
T.no_error err
keymanager = tmp
cb()
#-----------------------
exports.export_to_p3skb = (T,cb) ->
await keymanager.export_to_p3skb {}, defer err, tmp
T.no_error err
p3skb = tmp
cb()
#-----------------------
exports.import_from_p3skb = (T,cb) ->
args =
raw : p3skb
passphrase : PI:PASSWORD:<PASSWORD>END_PI
tsenc : keymanager.get_tsenc()
await KeyManager.import_from_p3skb args, defer err, tmp
T.no_error err
km2 = tmp
cb()
#-----------------------
exports.save = (T,cb) ->
await TmpKeyRing.make defer err, ring
T.no_error err
await km2.save_to_ring { ring }, defer err
T.no_error err
await ring.list_fingerprints defer err, ret
T.no_error err
T.equal ret.length, 1, "Only got one key back"
T.equal ret[0], keymanager.key.fingerprint(), "The same PGP fingerprint"
await ring.nuke defer err
T.no_error err
cb()
#-----------------------
exports.finish = (T,cb) ->
await keyring.nuke defer err
cb()
#-----------------------
|
[
{
"context": "should extract the name', ->\n expectedValue = 'John Smith'\n sut = new ContactParser\n result = sut.par",
"end": 1239,
"score": 0.999732255935669,
"start": 1229,
"tag": "NAME",
"value": "John Smith"
},
{
"context": " sut = new ContactParser\n result = sut.... | spec/contact-parser-spec.coffee | thrustlabs/contact-parser | 10 | describe 'Contact Parser', ->
ContactParser = require(__dirname + '/../src/contact-parser.js')
it 'should handle a null, undefined or empty input by returning empty strings and a score of 0', ->
sut = new ContactParser
result = sut.parse(null)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse(undefined)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse('')
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
it 'should extract the name', ->
expectedValue = 'John Smith'
sut = new ContactParser
result = sut.parse('John Smith, 121 John St., Toronto, Ontario')
expect(result.name).toEqual(expectedValue)
it 'should extract the email', ->
expectedValue = 'bob@smith.com'
sut = new ContactParser
result = sut.parse('John Smith, 121 John St., Toronto, Ontario, bob@smith.com')
expect(result.email).toEqual(expectedValue)
it 'should reformat North American phone numbers', ->
expectedValue = '(416) 967-1111'
sut = new ContactParser
result = sut.parse('John Smith, 416.967 1111')
expect(result.phone).toEqual(expectedValue)
it 'should prefix URLs with http://', ->
expectedValue = 'http://www.thrustlabs.com'
sut = new ContactParser
result = sut.parse('John Smith, www.thrustlabs.com')
expect(result.website).toEqual(expectedValue)
it 'should extract the full address', ->
sut = new ContactParser
result = sut.parse('Johnny Smith, 121 John St., Toronto, Ontario, johnny@smith.com')
expect(result.name).toEqual('Johnny Smith')
expect(result.email).toEqual('johnny@smith.com')
expect(result.address).toEqual('121 John St.')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.country).toEqual('Canada')
it 'should parse the Amsterdam brewery', ->
address = """
Amsterdam Brewery
info@amsterdambeer.com
416.504.6882
http://amsterdambeer.com
45 Esandar Dr Toronto, ON M4G 4C5
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Amsterdam Brewery')
expect(result.email).toEqual('info@amsterdambeer.com')
expect(result.address).toEqual('45 Esandar Dr')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M4G 4C5')
expect(result.country).toEqual('Canada')
expect(result.website).toEqual('http://amsterdambeer.com')
expect(result.phone).toEqual('(416) 504-6882')
it 'should parse Secco Italian Bubbles', ->
address = """
Secco Italian Bubbles
19 East Birch Street, Ste 106
Walla Walla, Washington 99362
Phone: 509.526.5230
seccoitalianbubbles@gmail.com
http://www.seccobubbles.com/
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Secco Italian Bubbles')
expect(result.email).toEqual('seccoitalianbubbles@gmail.com')
expect(result.address).toEqual('19 East Birch Street, Ste 106')
expect(result.city).toEqual('Walla Walla')
expect(result.province).toEqual('WA')
expect(result.postal).toEqual('99362')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('http://www.seccobubbles.com/')
expect(result.phone).toEqual('(509) 526-5230')
it 'should parse Saltwater Farm', ->
address = """
Saltwater Farm Vineyard
349 Elm St Stonington, Connecticut 06378
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Saltwater Farm Vineyard')
expect(result.email).toEqual('')
expect(result.address).toEqual('349 Elm St')
expect(result.city).toEqual('Stonington')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06378')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('')
it 'should parse Chamard Vineyards', ->
address = "Chamard Vineyards | 115 Cow Hill Road | Clinton, CT 06413 Phone: 860-664-0299 |"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Chamard Vineyards')
expect(result.email).toEqual('')
expect(result.address).toEqual('115 Cow Hill Road')
expect(result.city).toEqual('Clinton')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06413')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('(860) 664-0299')
it 'should parse shorter blocks', ->
address = "Hello, 123 jones st"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Hello')
expect(result.address).toEqual('123 jones st')
it 'should recognize 5-4 ZIP codes', ->
address = "Mark, 1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.postal).toEqual('12345-1234')
it 'should work when a name is missing', ->
address = "1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('1 17th Street #5')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse PO Boxes', ->
address = "PO Box 12345, Denver CO 45678"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('PO Box 12345')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('45678')
it 'should work with 5 digit street addresses', ->
address = "12345 Andrews Dr, Denver, CO 80239-4458"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('12345 Andrews Dr')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80239-4458')
it 'should take the city out of the address if it finds it by proximity to state', ->
address = " 710 First Street Napa, CA 94559 PHONE 707.256.3111 FAX 707.256.0864 info@heroldwines.com"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
it 'should use labels as hints if available', ->
address = "710 First Street Napa, CA 94559 FAX 707.256.0864 PHONE 707.256.3111"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
it 'should extract identifiable elements from space separated strings', ->
address = "710 First Street Napa, CA 94559 PHONE 707.256.3111 info@testing.com"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('info@testing.com')
it 'should treat bullet characters as separators', ->
address = "710 First Street • Napa, CA 94559 • PHONE 707.256.3111 info@testing.com"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('info@testing.com')
it 'should handle states with spaces in the name', ->
address = "Adair Vineyards • 52 Allhusen Road, New Paltz, New York 12561 • 845-255-1377"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Adair Vineyards')
expect(result.address).toEqual('52 Allhusen Road')
expect(result.city).toEqual('New Paltz')
expect(result.province).toEqual('NY')
expect(result.postal).toEqual('12561')
expect(result.phone).toEqual('(845) 255-1377')
it 'should handle our own darned address', ->
address = "Thrust Labs Inc., 1 Yonge St Suite 1801, Toronto, Ontario, M5E 1W7 Canada"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Thrust Labs Inc.')
expect(result.address).toEqual('1 Yonge St Suite 1801')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M5E 1W7')
expect(result.country).toEqual('Canada')
it 'should give up finding the city name rather than returning a possible wrong result', ->
address = "1 17th Street #5, CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('')
expect(result.address).toEqual('1 17th Street #5')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse when comma between address and city is missing and street name is avenue', ->
address = "12345 East Center Avenue Aurora, CO 80012"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('Aurora')
expect(result.address).toEqual('12345 East Center Avenue')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80012')
it 'should handle semi-numeric street addresses', ->
address = "N95W18000 Appleton Ave, Menomonee Falls, WI"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.city).toEqual('Menomonee Falls')
expect(result.address).toEqual('N95W18000 Appleton Ave')
expect(result.province).toEqual('WI')
expect(result.postal).toEqual('')
expect(result.country).toEqual('USA')
| 51158 | describe 'Contact Parser', ->
ContactParser = require(__dirname + '/../src/contact-parser.js')
it 'should handle a null, undefined or empty input by returning empty strings and a score of 0', ->
sut = new ContactParser
result = sut.parse(null)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse(undefined)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse('')
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
it 'should extract the name', ->
expectedValue = '<NAME>'
sut = new ContactParser
result = sut.parse('<NAME>, 121 <NAME> St., Toronto, Ontario')
expect(result.name).toEqual(expectedValue)
it 'should extract the email', ->
expectedValue = '<EMAIL>'
sut = new ContactParser
result = sut.parse('<NAME>, 121 <NAME> St., Toronto, Ontario, <EMAIL>')
expect(result.email).toEqual(expectedValue)
it 'should reformat North American phone numbers', ->
expectedValue = '(416) 967-1111'
sut = new ContactParser
result = sut.parse('<NAME>, 416.967 1111')
expect(result.phone).toEqual(expectedValue)
it 'should prefix URLs with http://', ->
expectedValue = 'http://www.thrustlabs.com'
sut = new ContactParser
result = sut.parse('<NAME>, www.thrustlabs.com')
expect(result.website).toEqual(expectedValue)
it 'should extract the full address', ->
sut = new ContactParser
result = sut.parse('<NAME>, 121 <NAME>., Toronto, Ontario, <EMAIL>')
expect(result.name).toEqual('<NAME>')
expect(result.email).toEqual('<EMAIL>')
expect(result.address).toEqual('121 <NAME> St.')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.country).toEqual('Canada')
it 'should parse the Amsterdam brewery', ->
address = """
Amsterdam Brewery
<EMAIL>
416.504.6882
http://amsterdambeer.com
45 Esandar Dr Toronto, ON M4G 4C5
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('<NAME>')
expect(result.email).toEqual('<EMAIL>')
expect(result.address).toEqual('45 Esandar Dr')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M4G 4C5')
expect(result.country).toEqual('Canada')
expect(result.website).toEqual('http://amsterdambeer.com')
expect(result.phone).toEqual('(416) 504-6882')
it 'should parse Secco Italian Bubbles', ->
address = """
Secco Italian Bubbles
19 East Birch Street, Ste 106
Walla Walla, Washington 99362
Phone: 509.526.5230
<EMAIL>
http://www.seccobubbles.com/
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('<NAME>')
expect(result.email).toEqual('<EMAIL>')
expect(result.address).toEqual('19 East Birch Street, Ste 106')
expect(result.city).toEqual('Walla Walla')
expect(result.province).toEqual('WA')
expect(result.postal).toEqual('99362')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('http://www.seccobubbles.com/')
expect(result.phone).toEqual('(509) 526-5230')
it 'should parse Saltwater Farm', ->
address = """
Saltwater Farm Vineyard
349 Elm St Stonington, Connecticut 06378
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Saltwater Farm Vineyard')
expect(result.email).toEqual('')
expect(result.address).toEqual('349 Elm St')
expect(result.city).toEqual('Stonington')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06378')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('')
it 'should parse Chamard Vineyards', ->
address = "Chamard Vineyards | 115 Cow Hill Road | Clinton, CT 06413 Phone: 860-664-0299 |"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('<NAME>')
expect(result.email).toEqual('')
expect(result.address).toEqual('115 Cow Hill Road')
expect(result.city).toEqual('Clinton')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06413')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('(860) 664-0299')
it 'should parse shorter blocks', ->
address = "Hello, 123 jones st"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('<NAME>')
expect(result.address).toEqual('123 jones st')
it 'should recognize 5-4 ZIP codes', ->
address = "<NAME>, 1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.postal).toEqual('12345-1234')
it 'should work when a name is missing', ->
address = "1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('1 17th Street #5')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse PO Boxes', ->
address = "PO Box 12345, Denver CO 45678"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('PO Box 12345')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('45678')
it 'should work with 5 digit street addresses', ->
address = "12345 Andrews Dr, Denver, CO 80239-4458"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('12345 Andrews Dr')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80239-4458')
it 'should take the city out of the address if it finds it by proximity to state', ->
address = " 710 First Street Napa, CA 94559 PHONE 707.256.3111 FAX 707.256.0864 <EMAIL>"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
it 'should use labels as hints if available', ->
address = "710 First Street Napa, CA 94559 FAX 707.256.0864 PHONE 707.256.3111"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
it 'should extract identifiable elements from space separated strings', ->
address = "710 First Street Napa, CA 94559 PHONE 707.256.3111 <EMAIL>"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('<EMAIL>')
it 'should treat bullet characters as separators', ->
address = "710 First Street • Napa, CA 94559 • PHONE 707.256.3111 <EMAIL>"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('<EMAIL>')
it 'should handle states with spaces in the name', ->
address = "<NAME>ards • 52 Allhusen Road, New Paltz, New York 12561 • 845-255-1377"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('<NAME>')
expect(result.address).toEqual('52 Allhusen Road')
expect(result.city).toEqual('New Paltz')
expect(result.province).toEqual('NY')
expect(result.postal).toEqual('12561')
expect(result.phone).toEqual('(845) 255-1377')
it 'should handle our own darned address', ->
address = "Thrust Labs Inc., 1 Yonge St Suite 1801, Toronto, Ontario, M5E 1W7 Canada"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Thrust Labs Inc.')
expect(result.address).toEqual('1 Yonge St Suite 1801')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M5E 1W7')
expect(result.country).toEqual('Canada')
it 'should give up finding the city name rather than returning a possible wrong result', ->
address = "1 17th Street #5, CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('')
expect(result.address).toEqual('1 17th Street #5')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse when comma between address and city is missing and street name is avenue', ->
address = "12345 East Center Avenue Aurora, CO 80012"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('Aurora')
expect(result.address).toEqual('12345 East Center Avenue')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80012')
it 'should handle semi-numeric street addresses', ->
address = "N95W18000 Appleton Ave, Menomonee Falls, WI"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.city).toEqual('Menomonee Falls')
expect(result.address).toEqual('N95W18000 Appleton Ave')
expect(result.province).toEqual('WI')
expect(result.postal).toEqual('')
expect(result.country).toEqual('USA')
| true | describe 'Contact Parser', ->
ContactParser = require(__dirname + '/../src/contact-parser.js')
it 'should handle a null, undefined or empty input by returning empty strings and a score of 0', ->
sut = new ContactParser
result = sut.parse(null)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse(undefined)
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
sut = new ContactParser
result = sut.parse('')
expect(result.score()).toEqual(0)
expect(result.name).toEqual('')
expect(result.email).toEqual('')
expect(result.address).toEqual('')
expect(result.city).toEqual('')
expect(result.province).toEqual('')
expect(result.country).toEqual('')
it 'should extract the name', ->
expectedValue = 'PI:NAME:<NAME>END_PI'
sut = new ContactParser
result = sut.parse('PI:NAME:<NAME>END_PI, 121 PI:NAME:<NAME>END_PI St., Toronto, Ontario')
expect(result.name).toEqual(expectedValue)
it 'should extract the email', ->
expectedValue = 'PI:EMAIL:<EMAIL>END_PI'
sut = new ContactParser
result = sut.parse('PI:NAME:<NAME>END_PI, 121 PI:NAME:<NAME>END_PI St., Toronto, Ontario, PI:EMAIL:<EMAIL>END_PI')
expect(result.email).toEqual(expectedValue)
it 'should reformat North American phone numbers', ->
expectedValue = '(416) 967-1111'
sut = new ContactParser
result = sut.parse('PI:NAME:<NAME>END_PI, 416.967 1111')
expect(result.phone).toEqual(expectedValue)
it 'should prefix URLs with http://', ->
expectedValue = 'http://www.thrustlabs.com'
sut = new ContactParser
result = sut.parse('PI:NAME:<NAME>END_PI, www.thrustlabs.com')
expect(result.website).toEqual(expectedValue)
it 'should extract the full address', ->
sut = new ContactParser
result = sut.parse('PI:NAME:<NAME>END_PI, 121 PI:NAME:<NAME>END_PI., Toronto, Ontario, PI:EMAIL:<EMAIL>END_PI')
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.email).toEqual('PI:EMAIL:<EMAIL>END_PI')
expect(result.address).toEqual('121 PI:NAME:<NAME>END_PI St.')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.country).toEqual('Canada')
it 'should parse the Amsterdam brewery', ->
address = """
Amsterdam Brewery
PI:EMAIL:<EMAIL>END_PI
416.504.6882
http://amsterdambeer.com
45 Esandar Dr Toronto, ON M4G 4C5
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.email).toEqual('PI:EMAIL:<EMAIL>END_PI')
expect(result.address).toEqual('45 Esandar Dr')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M4G 4C5')
expect(result.country).toEqual('Canada')
expect(result.website).toEqual('http://amsterdambeer.com')
expect(result.phone).toEqual('(416) 504-6882')
it 'should parse Secco Italian Bubbles', ->
address = """
Secco Italian Bubbles
19 East Birch Street, Ste 106
Walla Walla, Washington 99362
Phone: 509.526.5230
PI:EMAIL:<EMAIL>END_PI
http://www.seccobubbles.com/
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.email).toEqual('PI:EMAIL:<EMAIL>END_PI')
expect(result.address).toEqual('19 East Birch Street, Ste 106')
expect(result.city).toEqual('Walla Walla')
expect(result.province).toEqual('WA')
expect(result.postal).toEqual('99362')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('http://www.seccobubbles.com/')
expect(result.phone).toEqual('(509) 526-5230')
it 'should parse Saltwater Farm', ->
address = """
Saltwater Farm Vineyard
349 Elm St Stonington, Connecticut 06378
"""
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Saltwater Farm Vineyard')
expect(result.email).toEqual('')
expect(result.address).toEqual('349 Elm St')
expect(result.city).toEqual('Stonington')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06378')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('')
it 'should parse Chamard Vineyards', ->
address = "Chamard Vineyards | 115 Cow Hill Road | Clinton, CT 06413 Phone: 860-664-0299 |"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.email).toEqual('')
expect(result.address).toEqual('115 Cow Hill Road')
expect(result.city).toEqual('Clinton')
expect(result.province).toEqual('CT')
expect(result.postal).toEqual('06413')
expect(result.country).toEqual('USA')
expect(result.website).toEqual('')
expect(result.phone).toEqual('(860) 664-0299')
it 'should parse shorter blocks', ->
address = "Hello, 123 jones st"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.address).toEqual('123 jones st')
it 'should recognize 5-4 ZIP codes', ->
address = "PI:NAME:<NAME>END_PI, 1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.postal).toEqual('12345-1234')
it 'should work when a name is missing', ->
address = "1 17th Street #5, Denver CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('1 17th Street #5')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse PO Boxes', ->
address = "PO Box 12345, Denver CO 45678"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('PO Box 12345')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('45678')
it 'should work with 5 digit street addresses', ->
address = "12345 Andrews Dr, Denver, CO 80239-4458"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('12345 Andrews Dr')
expect(result.city).toEqual('Denver')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80239-4458')
it 'should take the city out of the address if it finds it by proximity to state', ->
address = " 710 First Street Napa, CA 94559 PHONE 707.256.3111 FAX 707.256.0864 PI:EMAIL:<EMAIL>END_PI"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
it 'should use labels as hints if available', ->
address = "710 First Street Napa, CA 94559 FAX 707.256.0864 PHONE 707.256.3111"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
it 'should extract identifiable elements from space separated strings', ->
address = "710 First Street Napa, CA 94559 PHONE 707.256.3111 PI:EMAIL:<EMAIL>END_PI"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('PI:EMAIL:<EMAIL>END_PI')
it 'should treat bullet characters as separators', ->
address = "710 First Street • Napa, CA 94559 • PHONE 707.256.3111 PI:EMAIL:<EMAIL>END_PI"
sut = new ContactParser
result = sut.parse(address)
expect(result.address).toEqual('710 First Street')
expect(result.city).toEqual('Napa')
expect(result.province).toEqual('CA')
expect(result.postal).toEqual('94559')
expect(result.phone).toEqual('(707) 256-3111')
expect(result.email).toEqual('PI:EMAIL:<EMAIL>END_PI')
it 'should handle states with spaces in the name', ->
address = "PI:NAME:<NAME>END_PIards • 52 Allhusen Road, New Paltz, New York 12561 • 845-255-1377"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('PI:NAME:<NAME>END_PI')
expect(result.address).toEqual('52 Allhusen Road')
expect(result.city).toEqual('New Paltz')
expect(result.province).toEqual('NY')
expect(result.postal).toEqual('12561')
expect(result.phone).toEqual('(845) 255-1377')
it 'should handle our own darned address', ->
address = "Thrust Labs Inc., 1 Yonge St Suite 1801, Toronto, Ontario, M5E 1W7 Canada"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('Thrust Labs Inc.')
expect(result.address).toEqual('1 Yonge St Suite 1801')
expect(result.city).toEqual('Toronto')
expect(result.province).toEqual('ON')
expect(result.postal).toEqual('M5E 1W7')
expect(result.country).toEqual('Canada')
it 'should give up finding the city name rather than returning a possible wrong result', ->
address = "1 17th Street #5, CO 12345-1234"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('')
expect(result.address).toEqual('1 17th Street #5')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('12345-1234')
it 'should parse when comma between address and city is missing and street name is avenue', ->
address = "12345 East Center Avenue Aurora, CO 80012"
sut = new ContactParser
result = sut.parse(address)
expect(result.city).toEqual('Aurora')
expect(result.address).toEqual('12345 East Center Avenue')
expect(result.province).toEqual('CO')
expect(result.postal).toEqual('80012')
it 'should handle semi-numeric street addresses', ->
address = "N95W18000 Appleton Ave, Menomonee Falls, WI"
sut = new ContactParser
result = sut.parse(address)
expect(result.name).toEqual('')
expect(result.city).toEqual('Menomonee Falls')
expect(result.address).toEqual('N95W18000 Appleton Ave')
expect(result.province).toEqual('WI')
expect(result.postal).toEqual('')
expect(result.country).toEqual('USA')
|
[
{
"context": " line breaks inside function parentheses\n# @author Teddy Katz\n###\n'use strict'\n\n#------------------------------",
"end": 100,
"score": 0.9998084306716919,
"start": 90,
"tag": "NAME",
"value": "Teddy Katz"
}
] | src/rules/function-paren-newline.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview enforce consistent line breaks inside function parentheses
# @author Teddy Katz
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent line breaks inside function parentheses'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/function-paren-newline'
# fixable: 'whitespace'
schema: [
oneOf: [
enum: ['always', 'never', 'consistent', 'multiline']
,
type: 'object'
properties:
minItems:
type: 'integer'
minimum: 0
additionalProperties: no
]
]
messages:
expectedBefore: "Expected newline before ')'."
expectedAfter: "Expected newline after '('."
unexpectedBefore: "Unexpected newline before '('."
unexpectedAfter: "Unexpected newline after ')'."
create: (context) ->
sourceCode = context.getSourceCode()
rawOption = context.options[0] or 'multiline'
multilineOption = rawOption is 'multiline'
consistentOption = rawOption is 'consistent'
if typeof rawOption is 'object'
{minItems} = rawOption
else if rawOption is 'always'
minItems = 0
else if rawOption is 'never'
minItems = Infinity
else
minItems = null
#----------------------------------------------------------------------
# Helpers
#----------------------------------------------------------------------
###*
# Determines whether there should be newlines inside function parens
# @param {ASTNode[]} elements The arguments or parameters in the list
# @param {boolean} hasLeftNewline `true` if the left paren has a newline in the current code.
# @returns {boolean} `true` if there should be newlines inside the function parens
###
shouldHaveNewlines = (elements, hasLeftNewline) ->
if multilineOption
return elements.some (element, index) ->
index isnt elements.length - 1 and
element.loc.end.line isnt elements[index + 1].loc.start.line
return hasLeftNewline if consistentOption
elements.length >= minItems
###*
# Validates a list of arguments or parameters
# @param {Object} parens An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token
# @param {ASTNode[]} elements The arguments or parameters in the list
# @returns {void}
###
validateParens = (parens, elements) ->
{leftParen, rightParen} = parens
return unless leftParen and rightParen
tokenAfterLeftParen = sourceCode.getTokenAfter leftParen
tokenBeforeRightParen = sourceCode.getTokenBefore rightParen
hasLeftNewline = not astUtils.isTokenOnSameLine(
leftParen
tokenAfterLeftParen
)
hasRightNewline = not astUtils.isTokenOnSameLine(
tokenBeforeRightParen
rightParen
)
needsNewlines = shouldHaveNewlines elements, hasLeftNewline
if hasLeftNewline and not needsNewlines
context.report
node: leftParen
messageId: 'unexpectedAfter'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice leftParen.range[1], tokenAfterLeftParen.range[0]
# .trim()
# # If there is a comment between the ( and the first element, don't do a fix.
# null
# else
# fixer.removeRange [
# leftParen.range[1]
# tokenAfterLeftParen.range[0]
# ]
else if not hasLeftNewline and needsNewlines
context.report
node: leftParen
messageId: 'expectedAfter'
# fix: (fixer) -> fixer.insertTextAfter leftParen, '\n'
if hasRightNewline and not needsNewlines
context.report
node: rightParen
messageId: 'unexpectedBefore'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice tokenBeforeRightParen.range[1], rightParen.range[0]
# .trim()
# # If there is a comment between the last element and the ), don't do a fix.
# null
# else
# fixer.removeRange [
# tokenBeforeRightParen.range[1]
# rightParen.range[0]
# ]
else if not hasRightNewline and needsNewlines
context.report
node: rightParen
messageId: 'expectedBefore'
# fix: (fixer) -> fixer.insertTextBefore rightParen, '\n'
###*
# Gets the left paren and right paren tokens of a node.
# @param {ASTNode} node The node with parens
# @returns {Object} An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token.
# Can also return `null` if an expression has no parens (e.g. a NewExpression with no arguments, or an ArrowFunctionExpression
# with a single parameter)
###
getParenTokens = (node) ->
return null if node.implicit
switch node.type
when 'NewExpression', 'CallExpression'
# If the NewExpression does not have parens (e.g. `new Foo`), return null.
return null if (
node.type is 'NewExpression' and
not node.arguments.length and
not (
astUtils.isOpeningParenToken(
sourceCode.getLastToken node, skip: 1
) and astUtils.isClosingParenToken sourceCode.getLastToken node
)
)
return
leftParen: sourceCode.getTokenAfter(
node.callee
astUtils.isOpeningParenToken
)
rightParen: sourceCode.getLastToken node
when 'FunctionDeclaration', 'FunctionExpression'
leftParen =
try
sourceCode.getFirstToken node, astUtils.isOpeningParenToken
return null unless leftParen
rightParen =
if node.params.length
sourceCode.getTokenAfter(
node.params[node.params.length - 1]
astUtils.isClosingParenToken
)
else
sourceCode.getTokenAfter leftParen
return {leftParen, rightParen}
when 'ArrowFunctionExpression'
firstToken = sourceCode.getFirstToken node
# If the ArrowFunctionExpression has a single param without parens, return null.
return null unless astUtils.isOpeningParenToken firstToken
return
leftParen: firstToken
rightParen: sourceCode.getTokenBefore(
node.body
astUtils.isClosingParenToken
)
else
throw new TypeError "unexpected node with type #{node.type}"
###*
# Validates the parentheses for a node
# @param {ASTNode} node The node with parens
# @returns {void}
###
validateNode = (node) ->
parens = getParenTokens node
if parens
validateParens parens,
if astUtils.isFunction node then node.params else node.arguments
#----------------------------------------------------------------------
# Public
#----------------------------------------------------------------------
ArrowFunctionExpression: validateNode
CallExpression: validateNode
FunctionDeclaration: validateNode
FunctionExpression: validateNode
NewExpression: validateNode
| 143951 | ###*
# @fileoverview enforce consistent line breaks inside function parentheses
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent line breaks inside function parentheses'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/function-paren-newline'
# fixable: 'whitespace'
schema: [
oneOf: [
enum: ['always', 'never', 'consistent', 'multiline']
,
type: 'object'
properties:
minItems:
type: 'integer'
minimum: 0
additionalProperties: no
]
]
messages:
expectedBefore: "Expected newline before ')'."
expectedAfter: "Expected newline after '('."
unexpectedBefore: "Unexpected newline before '('."
unexpectedAfter: "Unexpected newline after ')'."
create: (context) ->
sourceCode = context.getSourceCode()
rawOption = context.options[0] or 'multiline'
multilineOption = rawOption is 'multiline'
consistentOption = rawOption is 'consistent'
if typeof rawOption is 'object'
{minItems} = rawOption
else if rawOption is 'always'
minItems = 0
else if rawOption is 'never'
minItems = Infinity
else
minItems = null
#----------------------------------------------------------------------
# Helpers
#----------------------------------------------------------------------
###*
# Determines whether there should be newlines inside function parens
# @param {ASTNode[]} elements The arguments or parameters in the list
# @param {boolean} hasLeftNewline `true` if the left paren has a newline in the current code.
# @returns {boolean} `true` if there should be newlines inside the function parens
###
shouldHaveNewlines = (elements, hasLeftNewline) ->
if multilineOption
return elements.some (element, index) ->
index isnt elements.length - 1 and
element.loc.end.line isnt elements[index + 1].loc.start.line
return hasLeftNewline if consistentOption
elements.length >= minItems
###*
# Validates a list of arguments or parameters
# @param {Object} parens An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token
# @param {ASTNode[]} elements The arguments or parameters in the list
# @returns {void}
###
validateParens = (parens, elements) ->
{leftParen, rightParen} = parens
return unless leftParen and rightParen
tokenAfterLeftParen = sourceCode.getTokenAfter leftParen
tokenBeforeRightParen = sourceCode.getTokenBefore rightParen
hasLeftNewline = not astUtils.isTokenOnSameLine(
leftParen
tokenAfterLeftParen
)
hasRightNewline = not astUtils.isTokenOnSameLine(
tokenBeforeRightParen
rightParen
)
needsNewlines = shouldHaveNewlines elements, hasLeftNewline
if hasLeftNewline and not needsNewlines
context.report
node: leftParen
messageId: 'unexpectedAfter'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice leftParen.range[1], tokenAfterLeftParen.range[0]
# .trim()
# # If there is a comment between the ( and the first element, don't do a fix.
# null
# else
# fixer.removeRange [
# leftParen.range[1]
# tokenAfterLeftParen.range[0]
# ]
else if not hasLeftNewline and needsNewlines
context.report
node: leftParen
messageId: 'expectedAfter'
# fix: (fixer) -> fixer.insertTextAfter leftParen, '\n'
if hasRightNewline and not needsNewlines
context.report
node: rightParen
messageId: 'unexpectedBefore'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice tokenBeforeRightParen.range[1], rightParen.range[0]
# .trim()
# # If there is a comment between the last element and the ), don't do a fix.
# null
# else
# fixer.removeRange [
# tokenBeforeRightParen.range[1]
# rightParen.range[0]
# ]
else if not hasRightNewline and needsNewlines
context.report
node: rightParen
messageId: 'expectedBefore'
# fix: (fixer) -> fixer.insertTextBefore rightParen, '\n'
###*
# Gets the left paren and right paren tokens of a node.
# @param {ASTNode} node The node with parens
# @returns {Object} An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token.
# Can also return `null` if an expression has no parens (e.g. a NewExpression with no arguments, or an ArrowFunctionExpression
# with a single parameter)
###
getParenTokens = (node) ->
return null if node.implicit
switch node.type
when 'NewExpression', 'CallExpression'
# If the NewExpression does not have parens (e.g. `new Foo`), return null.
return null if (
node.type is 'NewExpression' and
not node.arguments.length and
not (
astUtils.isOpeningParenToken(
sourceCode.getLastToken node, skip: 1
) and astUtils.isClosingParenToken sourceCode.getLastToken node
)
)
return
leftParen: sourceCode.getTokenAfter(
node.callee
astUtils.isOpeningParenToken
)
rightParen: sourceCode.getLastToken node
when 'FunctionDeclaration', 'FunctionExpression'
leftParen =
try
sourceCode.getFirstToken node, astUtils.isOpeningParenToken
return null unless leftParen
rightParen =
if node.params.length
sourceCode.getTokenAfter(
node.params[node.params.length - 1]
astUtils.isClosingParenToken
)
else
sourceCode.getTokenAfter leftParen
return {leftParen, rightParen}
when 'ArrowFunctionExpression'
firstToken = sourceCode.getFirstToken node
# If the ArrowFunctionExpression has a single param without parens, return null.
return null unless astUtils.isOpeningParenToken firstToken
return
leftParen: firstToken
rightParen: sourceCode.getTokenBefore(
node.body
astUtils.isClosingParenToken
)
else
throw new TypeError "unexpected node with type #{node.type}"
###*
# Validates the parentheses for a node
# @param {ASTNode} node The node with parens
# @returns {void}
###
validateNode = (node) ->
parens = getParenTokens node
if parens
validateParens parens,
if astUtils.isFunction node then node.params else node.arguments
#----------------------------------------------------------------------
# Public
#----------------------------------------------------------------------
ArrowFunctionExpression: validateNode
CallExpression: validateNode
FunctionDeclaration: validateNode
FunctionExpression: validateNode
NewExpression: validateNode
| true | ###*
# @fileoverview enforce consistent line breaks inside function parentheses
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'enforce consistent line breaks inside function parentheses'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/function-paren-newline'
# fixable: 'whitespace'
schema: [
oneOf: [
enum: ['always', 'never', 'consistent', 'multiline']
,
type: 'object'
properties:
minItems:
type: 'integer'
minimum: 0
additionalProperties: no
]
]
messages:
expectedBefore: "Expected newline before ')'."
expectedAfter: "Expected newline after '('."
unexpectedBefore: "Unexpected newline before '('."
unexpectedAfter: "Unexpected newline after ')'."
create: (context) ->
sourceCode = context.getSourceCode()
rawOption = context.options[0] or 'multiline'
multilineOption = rawOption is 'multiline'
consistentOption = rawOption is 'consistent'
if typeof rawOption is 'object'
{minItems} = rawOption
else if rawOption is 'always'
minItems = 0
else if rawOption is 'never'
minItems = Infinity
else
minItems = null
#----------------------------------------------------------------------
# Helpers
#----------------------------------------------------------------------
###*
# Determines whether there should be newlines inside function parens
# @param {ASTNode[]} elements The arguments or parameters in the list
# @param {boolean} hasLeftNewline `true` if the left paren has a newline in the current code.
# @returns {boolean} `true` if there should be newlines inside the function parens
###
shouldHaveNewlines = (elements, hasLeftNewline) ->
if multilineOption
return elements.some (element, index) ->
index isnt elements.length - 1 and
element.loc.end.line isnt elements[index + 1].loc.start.line
return hasLeftNewline if consistentOption
elements.length >= minItems
###*
# Validates a list of arguments or parameters
# @param {Object} parens An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token
# @param {ASTNode[]} elements The arguments or parameters in the list
# @returns {void}
###
validateParens = (parens, elements) ->
{leftParen, rightParen} = parens
return unless leftParen and rightParen
tokenAfterLeftParen = sourceCode.getTokenAfter leftParen
tokenBeforeRightParen = sourceCode.getTokenBefore rightParen
hasLeftNewline = not astUtils.isTokenOnSameLine(
leftParen
tokenAfterLeftParen
)
hasRightNewline = not astUtils.isTokenOnSameLine(
tokenBeforeRightParen
rightParen
)
needsNewlines = shouldHaveNewlines elements, hasLeftNewline
if hasLeftNewline and not needsNewlines
context.report
node: leftParen
messageId: 'unexpectedAfter'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice leftParen.range[1], tokenAfterLeftParen.range[0]
# .trim()
# # If there is a comment between the ( and the first element, don't do a fix.
# null
# else
# fixer.removeRange [
# leftParen.range[1]
# tokenAfterLeftParen.range[0]
# ]
else if not hasLeftNewline and needsNewlines
context.report
node: leftParen
messageId: 'expectedAfter'
# fix: (fixer) -> fixer.insertTextAfter leftParen, '\n'
if hasRightNewline and not needsNewlines
context.report
node: rightParen
messageId: 'unexpectedBefore'
# fix: (fixer) ->
# if sourceCode
# .getText()
# .slice tokenBeforeRightParen.range[1], rightParen.range[0]
# .trim()
# # If there is a comment between the last element and the ), don't do a fix.
# null
# else
# fixer.removeRange [
# tokenBeforeRightParen.range[1]
# rightParen.range[0]
# ]
else if not hasRightNewline and needsNewlines
context.report
node: rightParen
messageId: 'expectedBefore'
# fix: (fixer) -> fixer.insertTextBefore rightParen, '\n'
###*
# Gets the left paren and right paren tokens of a node.
# @param {ASTNode} node The node with parens
# @returns {Object} An object with keys `leftParen` for the left paren token, and `rightParen` for the right paren token.
# Can also return `null` if an expression has no parens (e.g. a NewExpression with no arguments, or an ArrowFunctionExpression
# with a single parameter)
###
getParenTokens = (node) ->
return null if node.implicit
switch node.type
when 'NewExpression', 'CallExpression'
# If the NewExpression does not have parens (e.g. `new Foo`), return null.
return null if (
node.type is 'NewExpression' and
not node.arguments.length and
not (
astUtils.isOpeningParenToken(
sourceCode.getLastToken node, skip: 1
) and astUtils.isClosingParenToken sourceCode.getLastToken node
)
)
return
leftParen: sourceCode.getTokenAfter(
node.callee
astUtils.isOpeningParenToken
)
rightParen: sourceCode.getLastToken node
when 'FunctionDeclaration', 'FunctionExpression'
leftParen =
try
sourceCode.getFirstToken node, astUtils.isOpeningParenToken
return null unless leftParen
rightParen =
if node.params.length
sourceCode.getTokenAfter(
node.params[node.params.length - 1]
astUtils.isClosingParenToken
)
else
sourceCode.getTokenAfter leftParen
return {leftParen, rightParen}
when 'ArrowFunctionExpression'
firstToken = sourceCode.getFirstToken node
# If the ArrowFunctionExpression has a single param without parens, return null.
return null unless astUtils.isOpeningParenToken firstToken
return
leftParen: firstToken
rightParen: sourceCode.getTokenBefore(
node.body
astUtils.isClosingParenToken
)
else
throw new TypeError "unexpected node with type #{node.type}"
###*
# Validates the parentheses for a node
# @param {ASTNode} node The node with parens
# @returns {void}
###
validateNode = (node) ->
parens = getParenTokens node
if parens
validateParens parens,
if astUtils.isFunction node then node.params else node.arguments
#----------------------------------------------------------------------
# Public
#----------------------------------------------------------------------
ArrowFunctionExpression: validateNode
CallExpression: validateNode
FunctionDeclaration: validateNode
FunctionExpression: validateNode
NewExpression: validateNode
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999147653579712,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/beatmapset-page-hash.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetPageHash
@parse: (hash) ->
[mode, id] = hash[1..].split '/'
playmode: if mode != '' then mode
beatmapId: if id? then parseInt(id, 10)
@generate: ({beatmap, mode}) ->
if beatmap?
"##{beatmap.mode}/#{beatmap.id}"
else
"##{mode}"
| 153530 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetPageHash
@parse: (hash) ->
[mode, id] = hash[1..].split '/'
playmode: if mode != '' then mode
beatmapId: if id? then parseInt(id, 10)
@generate: ({beatmap, mode}) ->
if beatmap?
"##{beatmap.mode}/#{beatmap.id}"
else
"##{mode}"
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @BeatmapsetPageHash
@parse: (hash) ->
[mode, id] = hash[1..].split '/'
playmode: if mode != '' then mode
beatmapId: if id? then parseInt(id, 10)
@generate: ({beatmap, mode}) ->
if beatmap?
"##{beatmap.mode}/#{beatmap.id}"
else
"##{mode}"
|
[
{
"context": "IM'\n # Cookie 加密密钥,请在部署时重新生成\n AUTH_COOKIE_KEY: 'qqqsssssssssssssssss'\n # Cookie 加密密钥,请在部署时重新生成\n AUTH_COOKIE_MAX_AGE:",
"end": 137,
"score": 0.9984978437423706,
"start": 117,
"tag": "KEY",
"value": "qqqsssssssssssssssss"
},
{
"context": "->'\n # N3D 密钥,用来加密所有的 I... | src/conf.coffee | lly5401/dhcseltalkserver | 1 | module.exports =
# 认证 Cookie 名称,根据业务自行修改
AUTH_COOKIE_NAME: 'webIM'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_KEY: 'qqqsssssssssssssssss'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_MAX_AGE: '70000000'
# 融云颁发的 App Key,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_KEY: '6tnym1br65nv7'
# 融云颁发的 App Secret,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_SECRET: 'CvBQdTzWNqZg6G'
# 融云短信服务提供的注册用户短信模板 Id
RONGCLOUD_SMS_REGISTER_TEMPLATE_ID: 'qq'
# 融云短信服务提供的重置密码短信模板 Id
RONGCLOUD_SMS_RESET_PASSWORD_TEMPLATE_ID: 'qq'
# 七牛颁发的 Access Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_ACCESS_KEY: '<-- 此处填写七牛颁发的 Access Key -->'
# 七牛颁发的 Secret Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_SECRET_KEY: '<-- 此处填写七牛颁发的 Secret Key -->'
# 七牛创建的空间名称,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_NAME: '<-- 此处填写七牛创建的空间名称 -->'
# 七牛创建的空间域名,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_DOMAIN: '<-- 此处填写七牛创建的空间域名 -->'
# N3D 密钥,用来加密所有的 Id 数字,不要小于 5 位
N3D_KEY: 'qqqaaa'
# 跨域支持所需配置的主机信息(请求者),包括请求服务器的域名和端口号
CORS_HOSTS: 'http://test.com:8080'
# 本服务部署的 HTTP 端口号
SERVER_PORT: '8585'
# MySQL 数据库名称
DB_NAME: 'IM'
# MySQL 数据库用户名
DB_USER: 'root'
# MySQL 数据库密码
DB_PASSWORD: ''
# MySQL 数据库地址
DB_HOST: 'localhost'
# MySQL 数据库端口号
DB_PORT: '3306'
| 8097 | module.exports =
# 认证 Cookie 名称,根据业务自行修改
AUTH_COOKIE_NAME: 'webIM'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_KEY: '<KEY>'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_MAX_AGE: '70000000'
# 融云颁发的 App Key,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_KEY: '6tnym1br65nv7'
# 融云颁发的 App Secret,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_SECRET: 'CvBQdTzWNqZg6G'
# 融云短信服务提供的注册用户短信模板 Id
RONGCLOUD_SMS_REGISTER_TEMPLATE_ID: 'qq'
# 融云短信服务提供的重置密码短信模板 Id
RONGCLOUD_SMS_RESET_PASSWORD_TEMPLATE_ID: 'qq'
# 七牛颁发的 Access Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_ACCESS_KEY: '<-- 此处填写七牛颁发的 Access Key -->'
# 七牛颁发的 Secret Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_SECRET_KEY: '<-- 此处填写七牛颁发的 Secret Key -->'
# 七牛创建的空间名称,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_NAME: '<-- 此处填写七牛创建的空间名称 -->'
# 七牛创建的空间域名,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_DOMAIN: '<-- 此处填写七牛创建的空间域名 -->'
# N3D 密钥,用来加密所有的 Id 数字,不要小于 5 位
N3D_KEY: '<KEY>aaa'
# 跨域支持所需配置的主机信息(请求者),包括请求服务器的域名和端口号
CORS_HOSTS: 'http://test.com:8080'
# 本服务部署的 HTTP 端口号
SERVER_PORT: '8585'
# MySQL 数据库名称
DB_NAME: 'IM'
# MySQL 数据库用户名
DB_USER: 'root'
# MySQL 数据库密码
DB_PASSWORD: ''
# MySQL 数据库地址
DB_HOST: 'localhost'
# MySQL 数据库端口号
DB_PORT: '3306'
| true | module.exports =
# 认证 Cookie 名称,根据业务自行修改
AUTH_COOKIE_NAME: 'webIM'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_KEY: 'PI:KEY:<KEY>END_PI'
# Cookie 加密密钥,请在部署时重新生成
AUTH_COOKIE_MAX_AGE: '70000000'
# 融云颁发的 App Key,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_KEY: '6tnym1br65nv7'
# 融云颁发的 App Secret,请访问融云开发者后台:https://developer.rongcloud.cn
RONGCLOUD_APP_SECRET: 'CvBQdTzWNqZg6G'
# 融云短信服务提供的注册用户短信模板 Id
RONGCLOUD_SMS_REGISTER_TEMPLATE_ID: 'qq'
# 融云短信服务提供的重置密码短信模板 Id
RONGCLOUD_SMS_RESET_PASSWORD_TEMPLATE_ID: 'qq'
# 七牛颁发的 Access Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_ACCESS_KEY: '<-- 此处填写七牛颁发的 Access Key -->'
# 七牛颁发的 Secret Key,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_SECRET_KEY: '<-- 此处填写七牛颁发的 Secret Key -->'
# 七牛创建的空间名称,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_NAME: '<-- 此处填写七牛创建的空间名称 -->'
# 七牛创建的空间域名,请访问七牛开发者后台:https://portal.qiniu.com
QINIU_BUCKET_DOMAIN: '<-- 此处填写七牛创建的空间域名 -->'
# N3D 密钥,用来加密所有的 Id 数字,不要小于 5 位
N3D_KEY: 'PI:KEY:<KEY>END_PIaaa'
# 跨域支持所需配置的主机信息(请求者),包括请求服务器的域名和端口号
CORS_HOSTS: 'http://test.com:8080'
# 本服务部署的 HTTP 端口号
SERVER_PORT: '8585'
# MySQL 数据库名称
DB_NAME: 'IM'
# MySQL 数据库用户名
DB_USER: 'root'
# MySQL 数据库密码
DB_PASSWORD: ''
# MySQL 数据库地址
DB_HOST: 'localhost'
# MySQL 数据库端口号
DB_PORT: '3306'
|
[
{
"context": "wner, @githubRepo,\n @authorName = 'GithubDev', @authorEmail = 'example@example.net',\n ",
"end": 288,
"score": 0.9995995163917542,
"start": 279,
"tag": "USERNAME",
"value": "GithubDev"
},
{
"context": " @authorName = 'GithubDev', @authorEma... | src/main.coffee | olivierlemasle/github-dev | 1 | git = require 'nodegit'
pomParser = require 'pom-parser'
githubApi = require 'github'
class GithubDev
constructor: (@gitUrl, @baseBranch, @fetchOptions, @pushOptions, @localPath,
@githubAuth, @githubRepoOwner, @githubRepo,
@authorName = 'GithubDev', @authorEmail = 'example@example.net',
@remote = 'origin') ->
fetch: ->
cloneOptions =
checkoutBranch: @baseBranch
fetchOpts: @fetchOptions
console.log "Cloning #{@gitUrl} to #{@localPath}..."
git.Clone(@gitUrl, @localPath, cloneOptions)
.catch (e) =>
console.log "Cannot clone #{@gitUrl} to #{@localPath}: #{e}"
console.log 'Trying to open existing git repository...'
git.Repository.open(@localPath)
.then (repo) =>
repository = repo
console.log 'Fetching from remotes...'
repository.fetchAll(@fetchOptions)
.then =>
@checkoutNewBranchFromRemote repo
.catch =>
console.log 'Local branch already existing'
@checkoutAndPullBranchFromRemote repo
.then ->
repository
.then (repo) =>
@logRepo(repo)
repo
getMvnProjectVersion: ->
pomPath = @localPath + '/pom.xml'
console.log "Path: #{pomPath}"
opts =
filePath: pomPath
new Promise (res, rej) ->
pomParser.parse opts, (err, pomResponse) ->
if err
return rej(err)
version = pomResponse.pomObject.project.version
res(version)
requestChange: (repo, changeBranch, message, prTitle, prBody, pushChange,
change) ->
index = oid = null
@createAndCheckoutNewBranch(repo, changeBranch)
.then ->
change()
.then ->
repo.refreshIndex()
.then (idx) ->
index = idx
index.addAll()
.then ->
index.write()
.then ->
index.writeTree()
.then (oidResult) ->
oid = oidResult
git.Reference.nameToId(repo, 'HEAD')
.then (head) ->
repo.getCommit(head)
.then (parent) =>
author = git.Signature.now(@authorName, @authorEmail)
commiter = git.Signature.now(@authorName, @authorEmail)
repo.createCommit('HEAD', author, commiter, message, oid, [parent])
.then =>
if (!pushChange)
return Promise.resolve(oid.tostrS())
repo.getRemote(@remote)
.then (remote) =>
refSpec = "refs/heads/#{changeBranch}:refs/heads/#{changeBranch}"
remote.push [refSpec], @pushOptions
.then =>
github = new githubApi {Promise: require('bluebird')}
github.authenticate(@githubAuth)
github.pullRequests.create {
owner: @githubRepoOwner,
repo: @githubRepo,
title: prTitle,
head: changeBranch,
base: @baseBranch,
body: prBody
}
checkoutNewBranchFromRemote: (repo) ->
console.log "Creating local branch from #{@remote}/#{@baseBranch}"
repo.getBranchCommit("refs/remotes/#{@remote}/#{@baseBranch}")
.then (commit) =>
repo.createBranch @baseBranch, commit, false
.then (ref) =>
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(ref)
checkoutAndPullBranchFromRemote: (repo) ->
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(@baseBranch)
.then =>
console.log "Pulling from #{@remote}/#{@baseBranch}"
repo.mergeBranches(@baseBranch, "#{@remote}/#{@baseBranch}")
logRepo: (repo) ->
headPromise = repo.head().then (ref) -> ref.name()
headCommitPromise = repo.getHeadCommit().then (commit) -> commit.sha()
Promise.all [headPromise, headCommitPromise]
.then (res) ->
[head, commit] = res
console.log "HEAD=#{head} - #{commit}"
createAndCheckoutNewBranch: (repo, newBranch) ->
repo.getBranchCommit(@baseBranch)
.then (commit) ->
repo.createBranch newBranch, commit, false
.then (ref) ->
console.log "Checkout branch #{newBranch}"
repo.checkoutBranch(ref)
.then ->
repo
module.exports =
GithubDev: GithubDev
| 104277 | git = require 'nodegit'
pomParser = require 'pom-parser'
githubApi = require 'github'
class GithubDev
constructor: (@gitUrl, @baseBranch, @fetchOptions, @pushOptions, @localPath,
@githubAuth, @githubRepoOwner, @githubRepo,
@authorName = 'GithubDev', @authorEmail = '<EMAIL>',
@remote = 'origin') ->
fetch: ->
cloneOptions =
checkoutBranch: @baseBranch
fetchOpts: @fetchOptions
console.log "Cloning #{@gitUrl} to #{@localPath}..."
git.Clone(@gitUrl, @localPath, cloneOptions)
.catch (e) =>
console.log "Cannot clone #{@gitUrl} to #{@localPath}: #{e}"
console.log 'Trying to open existing git repository...'
git.Repository.open(@localPath)
.then (repo) =>
repository = repo
console.log 'Fetching from remotes...'
repository.fetchAll(@fetchOptions)
.then =>
@checkoutNewBranchFromRemote repo
.catch =>
console.log 'Local branch already existing'
@checkoutAndPullBranchFromRemote repo
.then ->
repository
.then (repo) =>
@logRepo(repo)
repo
getMvnProjectVersion: ->
pomPath = @localPath + '/pom.xml'
console.log "Path: #{pomPath}"
opts =
filePath: pomPath
new Promise (res, rej) ->
pomParser.parse opts, (err, pomResponse) ->
if err
return rej(err)
version = pomResponse.pomObject.project.version
res(version)
requestChange: (repo, changeBranch, message, prTitle, prBody, pushChange,
change) ->
index = oid = null
@createAndCheckoutNewBranch(repo, changeBranch)
.then ->
change()
.then ->
repo.refreshIndex()
.then (idx) ->
index = idx
index.addAll()
.then ->
index.write()
.then ->
index.writeTree()
.then (oidResult) ->
oid = oidResult
git.Reference.nameToId(repo, 'HEAD')
.then (head) ->
repo.getCommit(head)
.then (parent) =>
author = git.Signature.now(@authorName, @authorEmail)
commiter = git.Signature.now(@authorName, @authorEmail)
repo.createCommit('HEAD', author, commiter, message, oid, [parent])
.then =>
if (!pushChange)
return Promise.resolve(oid.tostrS())
repo.getRemote(@remote)
.then (remote) =>
refSpec = "refs/heads/#{changeBranch}:refs/heads/#{changeBranch}"
remote.push [refSpec], @pushOptions
.then =>
github = new githubApi {Promise: require('bluebird')}
github.authenticate(@githubAuth)
github.pullRequests.create {
owner: @githubRepoOwner,
repo: @githubRepo,
title: prTitle,
head: changeBranch,
base: @baseBranch,
body: prBody
}
checkoutNewBranchFromRemote: (repo) ->
console.log "Creating local branch from #{@remote}/#{@baseBranch}"
repo.getBranchCommit("refs/remotes/#{@remote}/#{@baseBranch}")
.then (commit) =>
repo.createBranch @baseBranch, commit, false
.then (ref) =>
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(ref)
checkoutAndPullBranchFromRemote: (repo) ->
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(@baseBranch)
.then =>
console.log "Pulling from #{@remote}/#{@baseBranch}"
repo.mergeBranches(@baseBranch, "#{@remote}/#{@baseBranch}")
logRepo: (repo) ->
headPromise = repo.head().then (ref) -> ref.name()
headCommitPromise = repo.getHeadCommit().then (commit) -> commit.sha()
Promise.all [headPromise, headCommitPromise]
.then (res) ->
[head, commit] = res
console.log "HEAD=#{head} - #{commit}"
createAndCheckoutNewBranch: (repo, newBranch) ->
repo.getBranchCommit(@baseBranch)
.then (commit) ->
repo.createBranch newBranch, commit, false
.then (ref) ->
console.log "Checkout branch #{newBranch}"
repo.checkoutBranch(ref)
.then ->
repo
module.exports =
GithubDev: GithubDev
| true | git = require 'nodegit'
pomParser = require 'pom-parser'
githubApi = require 'github'
class GithubDev
constructor: (@gitUrl, @baseBranch, @fetchOptions, @pushOptions, @localPath,
@githubAuth, @githubRepoOwner, @githubRepo,
@authorName = 'GithubDev', @authorEmail = 'PI:EMAIL:<EMAIL>END_PI',
@remote = 'origin') ->
fetch: ->
cloneOptions =
checkoutBranch: @baseBranch
fetchOpts: @fetchOptions
console.log "Cloning #{@gitUrl} to #{@localPath}..."
git.Clone(@gitUrl, @localPath, cloneOptions)
.catch (e) =>
console.log "Cannot clone #{@gitUrl} to #{@localPath}: #{e}"
console.log 'Trying to open existing git repository...'
git.Repository.open(@localPath)
.then (repo) =>
repository = repo
console.log 'Fetching from remotes...'
repository.fetchAll(@fetchOptions)
.then =>
@checkoutNewBranchFromRemote repo
.catch =>
console.log 'Local branch already existing'
@checkoutAndPullBranchFromRemote repo
.then ->
repository
.then (repo) =>
@logRepo(repo)
repo
getMvnProjectVersion: ->
pomPath = @localPath + '/pom.xml'
console.log "Path: #{pomPath}"
opts =
filePath: pomPath
new Promise (res, rej) ->
pomParser.parse opts, (err, pomResponse) ->
if err
return rej(err)
version = pomResponse.pomObject.project.version
res(version)
requestChange: (repo, changeBranch, message, prTitle, prBody, pushChange,
change) ->
index = oid = null
@createAndCheckoutNewBranch(repo, changeBranch)
.then ->
change()
.then ->
repo.refreshIndex()
.then (idx) ->
index = idx
index.addAll()
.then ->
index.write()
.then ->
index.writeTree()
.then (oidResult) ->
oid = oidResult
git.Reference.nameToId(repo, 'HEAD')
.then (head) ->
repo.getCommit(head)
.then (parent) =>
author = git.Signature.now(@authorName, @authorEmail)
commiter = git.Signature.now(@authorName, @authorEmail)
repo.createCommit('HEAD', author, commiter, message, oid, [parent])
.then =>
if (!pushChange)
return Promise.resolve(oid.tostrS())
repo.getRemote(@remote)
.then (remote) =>
refSpec = "refs/heads/#{changeBranch}:refs/heads/#{changeBranch}"
remote.push [refSpec], @pushOptions
.then =>
github = new githubApi {Promise: require('bluebird')}
github.authenticate(@githubAuth)
github.pullRequests.create {
owner: @githubRepoOwner,
repo: @githubRepo,
title: prTitle,
head: changeBranch,
base: @baseBranch,
body: prBody
}
checkoutNewBranchFromRemote: (repo) ->
console.log "Creating local branch from #{@remote}/#{@baseBranch}"
repo.getBranchCommit("refs/remotes/#{@remote}/#{@baseBranch}")
.then (commit) =>
repo.createBranch @baseBranch, commit, false
.then (ref) =>
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(ref)
checkoutAndPullBranchFromRemote: (repo) ->
console.log "Checkout branch #{@baseBranch}"
repo.checkoutBranch(@baseBranch)
.then =>
console.log "Pulling from #{@remote}/#{@baseBranch}"
repo.mergeBranches(@baseBranch, "#{@remote}/#{@baseBranch}")
logRepo: (repo) ->
headPromise = repo.head().then (ref) -> ref.name()
headCommitPromise = repo.getHeadCommit().then (commit) -> commit.sha()
Promise.all [headPromise, headCommitPromise]
.then (res) ->
[head, commit] = res
console.log "HEAD=#{head} - #{commit}"
createAndCheckoutNewBranch: (repo, newBranch) ->
repo.getBranchCommit(@baseBranch)
.then (commit) ->
repo.createBranch newBranch, commit, false
.then (ref) ->
console.log "Checkout branch #{newBranch}"
repo.checkoutBranch(ref)
.then ->
repo
module.exports =
GithubDev: GithubDev
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998874068260193,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Je... | node_modules/konsserto/lib/src/Konsserto/Component/DependencyInjection/ServiceArgument.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
CONFIG = use('/app/config/config')
# ServiceArgument
#
# @author Jessym Reziga <jessym@konsserto.com>
class ServiceArgument
# Class Constructor
# @param {String} value Value of the ServiceArgument
constructor: (@value) ->
return this
# @return {String} The value of the ServiceArgument
getValue: () ->
return @value
# @param {String} Value of the ServiceArgument
setValue: (@value) ->
return this
# Is the value a configuration ?
isConfiguration: () ->
if @getValue().charAt(0) == '%'
return true
return false
# @return {String} The configuration value
getConfigurationValue: () ->
value = @getValue()
value.substr(1, value.length - 2)
if CONFIG[value]?
return CONFIG[value].replace(/'/g, "\\'")
return 'null'
# Get the configuration value or throw an error
getConfiguration: () ->
if (@isConfiguration())
@getConfigurationValue()
else
throw new Error('You can\'t get the configuration dependency \'' + @getValue() + '\', because the argument is not wrapped by %')
# @param {String} parameterContainer The parameter container
# @return {String} The parameter container with the configuration value
getCompilerArgument: (parameterContainer) ->
if @isConfiguration()
return @getConfiguration()
else
return parameterContainer + ".get('" + @getValue().toLowerCase() + "')"
module.exports = ServiceArgument | 136871 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
CONFIG = use('/app/config/config')
# ServiceArgument
#
# @author <NAME> <<EMAIL>>
class ServiceArgument
# Class Constructor
# @param {String} value Value of the ServiceArgument
constructor: (@value) ->
return this
# @return {String} The value of the ServiceArgument
getValue: () ->
return @value
# @param {String} Value of the ServiceArgument
setValue: (@value) ->
return this
# Is the value a configuration ?
isConfiguration: () ->
if @getValue().charAt(0) == '%'
return true
return false
# @return {String} The configuration value
getConfigurationValue: () ->
value = @getValue()
value.substr(1, value.length - 2)
if CONFIG[value]?
return CONFIG[value].replace(/'/g, "\\'")
return 'null'
# Get the configuration value or throw an error
getConfiguration: () ->
if (@isConfiguration())
@getConfigurationValue()
else
throw new Error('You can\'t get the configuration dependency \'' + @getValue() + '\', because the argument is not wrapped by %')
# @param {String} parameterContainer The parameter container
# @return {String} The parameter container with the configuration value
getCompilerArgument: (parameterContainer) ->
if @isConfiguration()
return @getConfiguration()
else
return parameterContainer + ".get('" + @getValue().toLowerCase() + "')"
module.exports = ServiceArgument | true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
CONFIG = use('/app/config/config')
# ServiceArgument
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
class ServiceArgument
# Class Constructor
# @param {String} value Value of the ServiceArgument
constructor: (@value) ->
return this
# @return {String} The value of the ServiceArgument
getValue: () ->
return @value
# @param {String} Value of the ServiceArgument
setValue: (@value) ->
return this
# Is the value a configuration ?
isConfiguration: () ->
if @getValue().charAt(0) == '%'
return true
return false
# @return {String} The configuration value
getConfigurationValue: () ->
value = @getValue()
value.substr(1, value.length - 2)
if CONFIG[value]?
return CONFIG[value].replace(/'/g, "\\'")
return 'null'
# Get the configuration value or throw an error
getConfiguration: () ->
if (@isConfiguration())
@getConfigurationValue()
else
throw new Error('You can\'t get the configuration dependency \'' + @getValue() + '\', because the argument is not wrapped by %')
# @param {String} parameterContainer The parameter container
# @return {String} The parameter container with the configuration value
getCompilerArgument: (parameterContainer) ->
if @isConfiguration()
return @getConfiguration()
else
return parameterContainer + ".get('" + @getValue().toLowerCase() + "')"
module.exports = ServiceArgument |
[
{
"context": "nit\n principal: 'admin'\n password: 'adm1n_p4ssw0rd'\n realm: 'KRB.LOCAL'\n , (err, ccname)",
"end": 244,
"score": 0.9994093179702759,
"start": 230,
"tag": "PASSWORD",
"value": "adm1n_p4ssw0rd"
},
{
"context": "nit\n principal: 'admin'\... | test/kinit.coffee | pdkovacs/forked-node-krb5 | 28 | krb5 = require '../lib/'
describe 'kinit', ->
describe 'function with callback', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns default credential cache path (password provided using default realm)', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (password provided using realm in principal)', (done) ->
krb5.kinit
principal: 'admin@KRB.LOCAL'
password: 'adm1n_p4ssw0rd'
realm: 'to_override'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns given credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.be.eql('/tmp/customcc')
done()
describe 'function with promise', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: 'adm1n_p4ssw0rd'
realm: 'KRB.LOCAL'
.then (ccname) ->
ccname.should.startWith('/tmp')
done()
.catch done
return
| 119201 | krb5 = require '../lib/'
describe 'kinit', ->
describe 'function with callback', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns default credential cache path (password provided using default realm)', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (password provided using realm in principal)', (done) ->
krb5.kinit
principal: 'admin<EMAIL>'
password: '<PASSWORD>'
realm: 'to_override'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns given credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.be.eql('/tmp/customcc')
done()
describe 'function with promise', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: '<PASSWORD>'
realm: 'KRB.LOCAL'
.then (ccname) ->
ccname.should.startWith('/tmp')
done()
.catch done
return
| true | krb5 = require '../lib/'
describe 'kinit', ->
describe 'function with callback', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns default credential cache path (password provided using default realm)', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (password provided using realm in principal)', (done) ->
krb5.kinit
principal: 'adminPI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'to_override'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done err
it 'returns default credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.startWith('/tmp')
done()
it 'returns given credential cache path (keytab provided)', (done) ->
krb5.kinit
principal: 'rest/rest.krb.local'
keytab: '/tmp/krb5_test/rest.service.keytab'
realm: 'KRB.LOCAL'
ccname: '/tmp/customcc'
, (err, ccname) ->
(err is undefined).should.be.true()
ccname.should.be.eql('/tmp/customcc')
done()
describe 'function with promise', ->
it 'returns default credential cache path (password provided)', (done) ->
krb5.kinit
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
realm: 'KRB.LOCAL'
.then (ccname) ->
ccname.should.startWith('/tmp')
done()
.catch done
return
|
[
{
"context": "=================================\n# Copyright 2014 Hatio, Lab.\n# Licensed under The MIT License\n# http",
"end": 63,
"score": 0.5141833424568176,
"start": 62,
"tag": "NAME",
"value": "H"
}
] | src/dou.coffee | heartyoh/dou | 1 | # ==========================================
# Copyright 2014 Hatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
'./lifecycle'
'./property'
'./serialize'
'./event'
'./utils'
'./collection'
'./disposer'
], (
compose,
advice,
lifecycle,
property,
serialize,
event,
utils,
collection,
disposer
) ->
"use strict"
define = (options, constructor, prototype) ->
constructor || (constructor = ->)
if options.extends
class Component extends options.extends
constructor: constructor
else
class Component
constructor: constructor
# for class members ( same as prototype )
if options.members
(Component.prototype[name] = value) for own name, value of options.members
if prototype
(Component.prototype[name] = value) for own name, value of prototype
# for mixins
if options.mixins
compose.mixin Component.prototype, options.mixins
# I want to change class name (function name), but not possible
if options.name
Component.name = options.name
Component
mixin = (target, withs) ->
compose.mixin (if typeof target is 'function' then target.prototype else target), withs
target
{
define: define
mixin: mixin
with:
advice: advice.withAdvice
property: property
disposer: disposer
lifecycle: lifecycle
event: event.withEvent
serialize: serialize
collection: collection
util: utils
}
| 110050 | # ==========================================
# Copyright 2014 <NAME>atio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
'./lifecycle'
'./property'
'./serialize'
'./event'
'./utils'
'./collection'
'./disposer'
], (
compose,
advice,
lifecycle,
property,
serialize,
event,
utils,
collection,
disposer
) ->
"use strict"
define = (options, constructor, prototype) ->
constructor || (constructor = ->)
if options.extends
class Component extends options.extends
constructor: constructor
else
class Component
constructor: constructor
# for class members ( same as prototype )
if options.members
(Component.prototype[name] = value) for own name, value of options.members
if prototype
(Component.prototype[name] = value) for own name, value of prototype
# for mixins
if options.mixins
compose.mixin Component.prototype, options.mixins
# I want to change class name (function name), but not possible
if options.name
Component.name = options.name
Component
mixin = (target, withs) ->
compose.mixin (if typeof target is 'function' then target.prototype else target), withs
target
{
define: define
mixin: mixin
with:
advice: advice.withAdvice
property: property
disposer: disposer
lifecycle: lifecycle
event: event.withEvent
serialize: serialize
collection: collection
util: utils
}
| true | # ==========================================
# Copyright 2014 PI:NAME:<NAME>END_PIatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./compose'
'./advice'
'./lifecycle'
'./property'
'./serialize'
'./event'
'./utils'
'./collection'
'./disposer'
], (
compose,
advice,
lifecycle,
property,
serialize,
event,
utils,
collection,
disposer
) ->
"use strict"
define = (options, constructor, prototype) ->
constructor || (constructor = ->)
if options.extends
class Component extends options.extends
constructor: constructor
else
class Component
constructor: constructor
# for class members ( same as prototype )
if options.members
(Component.prototype[name] = value) for own name, value of options.members
if prototype
(Component.prototype[name] = value) for own name, value of prototype
# for mixins
if options.mixins
compose.mixin Component.prototype, options.mixins
# I want to change class name (function name), but not possible
if options.name
Component.name = options.name
Component
mixin = (target, withs) ->
compose.mixin (if typeof target is 'function' then target.prototype else target), withs
target
{
define: define
mixin: mixin
with:
advice: advice.withAdvice
property: property
disposer: disposer
lifecycle: lifecycle
event: event.withEvent
serialize: serialize
collection: collection
util: utils
}
|
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.9999202489852905,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/controllers/run_experiment_ctrl.js.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'RunExperimentCtrl', [
'$scope'
'$stateParams'
'$state'
'Experiment'
'ChoosenChartService'
'$rootScope'
($scope, $stateParams, $state, Experiment, ChoosenChartService, $rootScope) ->
$scope.chart = $stateParams.chart
changeChart = (chart) ->
$state.go 'run-experiment', {id: $stateParams.id, chart: chart}, notify: false
$scope.chart = chart
hasChart = (chart) ->
switch chart
when 'amplification'
return Experiment.hasAmplificationCurve($scope.experiment)
when 'standard-curve'
return Experiment.hasStandardCurve($scope.experiment)
when 'melt-curve'
return Experiment.hasMeltCurve($scope.experiment)
when 'temperature-logs'
return true;
else
return false;
ChoosenChartService.setCallback(changeChart)
Experiment.get(id: $stateParams.id).then (data) ->
$scope.experiment = data.experiment
$rootScope.pageTitle = data.experiment.name + " | Open qPCR"
if !hasChart($scope.chart)
chart = null
chart = 'amplification' if Experiment.hasAmplificationCurve($scope.experiment)
chart = 'standard-curve' if Experiment.hasStandardCurve($scope.experiment)
chart = 'melt-curve' if Experiment.hasMeltCurve($scope.experiment) and !chart
chart = 'temperature-logs' if !chart
changeChart(chart)
]
| 173072 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'RunExperimentCtrl', [
'$scope'
'$stateParams'
'$state'
'Experiment'
'ChoosenChartService'
'$rootScope'
($scope, $stateParams, $state, Experiment, ChoosenChartService, $rootScope) ->
$scope.chart = $stateParams.chart
changeChart = (chart) ->
$state.go 'run-experiment', {id: $stateParams.id, chart: chart}, notify: false
$scope.chart = chart
hasChart = (chart) ->
switch chart
when 'amplification'
return Experiment.hasAmplificationCurve($scope.experiment)
when 'standard-curve'
return Experiment.hasStandardCurve($scope.experiment)
when 'melt-curve'
return Experiment.hasMeltCurve($scope.experiment)
when 'temperature-logs'
return true;
else
return false;
ChoosenChartService.setCallback(changeChart)
Experiment.get(id: $stateParams.id).then (data) ->
$scope.experiment = data.experiment
$rootScope.pageTitle = data.experiment.name + " | Open qPCR"
if !hasChart($scope.chart)
chart = null
chart = 'amplification' if Experiment.hasAmplificationCurve($scope.experiment)
chart = 'standard-curve' if Experiment.hasStandardCurve($scope.experiment)
chart = 'melt-curve' if Experiment.hasMeltCurve($scope.experiment) and !chart
chart = 'temperature-logs' if !chart
changeChart(chart)
]
| true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'RunExperimentCtrl', [
'$scope'
'$stateParams'
'$state'
'Experiment'
'ChoosenChartService'
'$rootScope'
($scope, $stateParams, $state, Experiment, ChoosenChartService, $rootScope) ->
$scope.chart = $stateParams.chart
changeChart = (chart) ->
$state.go 'run-experiment', {id: $stateParams.id, chart: chart}, notify: false
$scope.chart = chart
hasChart = (chart) ->
switch chart
when 'amplification'
return Experiment.hasAmplificationCurve($scope.experiment)
when 'standard-curve'
return Experiment.hasStandardCurve($scope.experiment)
when 'melt-curve'
return Experiment.hasMeltCurve($scope.experiment)
when 'temperature-logs'
return true;
else
return false;
ChoosenChartService.setCallback(changeChart)
Experiment.get(id: $stateParams.id).then (data) ->
$scope.experiment = data.experiment
$rootScope.pageTitle = data.experiment.name + " | Open qPCR"
if !hasChart($scope.chart)
chart = null
chart = 'amplification' if Experiment.hasAmplificationCurve($scope.experiment)
chart = 'standard-curve' if Experiment.hasStandardCurve($scope.experiment)
chart = 'melt-curve' if Experiment.hasMeltCurve($scope.experiment) and !chart
chart = 'temperature-logs' if !chart
changeChart(chart)
]
|
[
{
"context": "t(2)\n\n config = getConfig()\n config.password = 'bad-password'\n\n connection = new Connection(config)\n\n connec",
"end": 1328,
"score": 0.9993850588798523,
"start": 1316,
"tag": "PASSWORD",
"value": "bad-password"
}
] | test/integration/connection-test.coffee | chrisinajar/tedious | 1 | Connection = require('../../lib/connection')
Request = require('../../lib/request')
fs = require('fs')
getConfig = ->
config = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).config
instanceName = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
config.options.debug =
packet: true
data: true
payload: true
token: false
log: true
config
getInstanceName = ->
JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
exports.badServer = (test) ->
config = getConfig()
config.server = 'bad-server'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badPort = (test) ->
config = getConfig()
config.options.port = -1
config.options.connectTimeout = 200
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badCredentials = (test) ->
test.expect(2)
config = getConfig()
config.password = 'bad-password'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(~error.message.indexOf('failed'))
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByPort = (test) ->
test.expect(2)
config = getConfig()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByInstanceName = (test) ->
if !getInstanceName()
# Config says don't do this test (probably because SQL Server Browser is not available).
console.log('Skipping connectByInstanceName test')
test.done()
return
test.expect(2)
config = getConfig()
delete config.options.port
config.options.instanceName = getInstanceName()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSql = (test) ->
test.expect(8)
config = getConfig()
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
test.strictEqual(columns[0].value, 8)
test.strictEqual(columns.C1.value, 8)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlWithOrder = (test) ->
test.expect(10)
config = getConfig()
sql = "select top 2 object_id, name, column_id, system_type_id from sys.columns order by name, system_type_id"
request = new Request(sql, (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 2)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 4)
)
request.on('order', (orderColumns) ->
test.strictEqual(orderColumns.length, 2)
test.strictEqual(orderColumns[0], 2)
test.strictEqual(orderColumns[1], 4)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 4)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlMultipleTimes = (test) ->
test.expect(20)
requestsToMake = 5;
config = getConfig()
makeRequest = ->
if requestsToMake == 0
connection.close()
return
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
requestsToMake--
makeRequest()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
#makeRequest()
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
)
connection.execSql(request)
connection = new Connection(config)
connection.on('connect', (err) ->
makeRequest()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execBadSql = (test) ->
test.expect(2)
config = getConfig()
request = new Request('bad syntax here', (err) ->
test.ok(err)
connection.close()
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(error)
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.sqlWithMultipleResultSets = (test) ->
test.expect(8)
config = getConfig()
row = 0
request = new Request('select 1; select 2;', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns[0].value, ++row)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execProcAsSql = (test) ->
test.expect(7)
config = getConfig()
request = new Request('exec sp_help int', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneProc', (rowCount, more, returnStatus) ->
test.ok(!more)
test.strictEqual(returnStatus, 0)
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
)
request.on('row', (columns) ->
test.ok(true)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
| 120765 | Connection = require('../../lib/connection')
Request = require('../../lib/request')
fs = require('fs')
getConfig = ->
config = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).config
instanceName = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
config.options.debug =
packet: true
data: true
payload: true
token: false
log: true
config
getInstanceName = ->
JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
exports.badServer = (test) ->
config = getConfig()
config.server = 'bad-server'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badPort = (test) ->
config = getConfig()
config.options.port = -1
config.options.connectTimeout = 200
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badCredentials = (test) ->
test.expect(2)
config = getConfig()
config.password = '<PASSWORD>'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(~error.message.indexOf('failed'))
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByPort = (test) ->
test.expect(2)
config = getConfig()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByInstanceName = (test) ->
if !getInstanceName()
# Config says don't do this test (probably because SQL Server Browser is not available).
console.log('Skipping connectByInstanceName test')
test.done()
return
test.expect(2)
config = getConfig()
delete config.options.port
config.options.instanceName = getInstanceName()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSql = (test) ->
test.expect(8)
config = getConfig()
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
test.strictEqual(columns[0].value, 8)
test.strictEqual(columns.C1.value, 8)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlWithOrder = (test) ->
test.expect(10)
config = getConfig()
sql = "select top 2 object_id, name, column_id, system_type_id from sys.columns order by name, system_type_id"
request = new Request(sql, (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 2)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 4)
)
request.on('order', (orderColumns) ->
test.strictEqual(orderColumns.length, 2)
test.strictEqual(orderColumns[0], 2)
test.strictEqual(orderColumns[1], 4)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 4)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlMultipleTimes = (test) ->
test.expect(20)
requestsToMake = 5;
config = getConfig()
makeRequest = ->
if requestsToMake == 0
connection.close()
return
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
requestsToMake--
makeRequest()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
#makeRequest()
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
)
connection.execSql(request)
connection = new Connection(config)
connection.on('connect', (err) ->
makeRequest()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execBadSql = (test) ->
test.expect(2)
config = getConfig()
request = new Request('bad syntax here', (err) ->
test.ok(err)
connection.close()
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(error)
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.sqlWithMultipleResultSets = (test) ->
test.expect(8)
config = getConfig()
row = 0
request = new Request('select 1; select 2;', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns[0].value, ++row)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execProcAsSql = (test) ->
test.expect(7)
config = getConfig()
request = new Request('exec sp_help int', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneProc', (rowCount, more, returnStatus) ->
test.ok(!more)
test.strictEqual(returnStatus, 0)
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
)
request.on('row', (columns) ->
test.ok(true)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
| true | Connection = require('../../lib/connection')
Request = require('../../lib/request')
fs = require('fs')
getConfig = ->
config = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).config
instanceName = JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
config.options.debug =
packet: true
data: true
payload: true
token: false
log: true
config
getInstanceName = ->
JSON.parse(fs.readFileSync(process.env.HOME + '/.tedious/test-connection.json', 'utf8')).instanceName
exports.badServer = (test) ->
config = getConfig()
config.server = 'bad-server'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badPort = (test) ->
config = getConfig()
config.options.port = -1
config.options.connectTimeout = 200
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
)
connection.on('end', (info) ->
test.done()
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.badCredentials = (test) ->
test.expect(2)
config = getConfig()
config.password = 'PI:PASSWORD:<PASSWORD>END_PI'
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(~error.message.indexOf('failed'))
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByPort = (test) ->
test.expect(2)
config = getConfig()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.connectByInstanceName = (test) ->
if !getInstanceName()
# Config says don't do this test (probably because SQL Server Browser is not available).
console.log('Skipping connectByInstanceName test')
test.done()
return
test.expect(2)
config = getConfig()
delete config.options.port
config.options.instanceName = getInstanceName()
connection = new Connection(config)
connection.on('connect', (err) ->
test.ok(!err)
connection.close()
)
connection.on('end', (info) ->
test.done()
)
connection.on('databaseChange', (database) ->
test.strictEqual(database, config.options.database)
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSql = (test) ->
test.expect(8)
config = getConfig()
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
test.strictEqual(columns[0].value, 8)
test.strictEqual(columns.C1.value, 8)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlWithOrder = (test) ->
test.expect(10)
config = getConfig()
sql = "select top 2 object_id, name, column_id, system_type_id from sys.columns order by name, system_type_id"
request = new Request(sql, (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
test.strictEqual(rowCount, 2)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 4)
)
request.on('order', (orderColumns) ->
test.strictEqual(orderColumns.length, 2)
test.strictEqual(orderColumns[0], 2)
test.strictEqual(orderColumns[1], 4)
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 4)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execSqlMultipleTimes = (test) ->
test.expect(20)
requestsToMake = 5;
config = getConfig()
makeRequest = ->
if requestsToMake == 0
connection.close()
return
request = new Request('select 8 as C1', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
requestsToMake--
makeRequest()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
#makeRequest()
)
request.on('row', (columns) ->
test.strictEqual(columns.length, 1)
)
connection.execSql(request)
connection = new Connection(config)
connection.on('connect', (err) ->
makeRequest()
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execBadSql = (test) ->
test.expect(2)
config = getConfig()
request = new Request('bad syntax here', (err) ->
test.ok(err)
connection.close()
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('errorMessage', (error) ->
#console.log("#{error.number} : #{error.message}")
test.ok(error)
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.sqlWithMultipleResultSets = (test) ->
test.expect(8)
config = getConfig()
row = 0
request = new Request('select 1; select 2;', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 2)
connection.close()
)
request.on('doneInProc', (rowCount, more) ->
test.strictEqual(rowCount, 1)
)
request.on('columnMetadata', (columnsMetadata) ->
test.strictEqual(columnsMetadata.length, 1)
)
request.on('row', (columns) ->
test.strictEqual(columns[0].value, ++row)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
exports.execProcAsSql = (test) ->
test.expect(7)
config = getConfig()
request = new Request('exec sp_help int', (err, rowCount) ->
test.ok(!err)
test.strictEqual(rowCount, 1)
connection.close()
)
request.on('doneProc', (rowCount, more, returnStatus) ->
test.ok(!more)
test.strictEqual(returnStatus, 0)
)
request.on('doneInProc', (rowCount, more) ->
test.ok(more)
)
request.on('row', (columns) ->
test.ok(true)
)
connection = new Connection(config)
connection.on('connect', (err) ->
connection.execSql(request)
)
connection.on('end', (info) ->
test.done()
)
connection.on('infoMessage', (info) ->
#console.log("#{info.number} : #{info.message}")
)
connection.on('debug', (text) ->
#console.log(text)
)
|
[
{
"context": " doc =\n email: user1\n password: '12345678'\n profile:\n first_name: faker.nam",
"end": 1923,
"score": 0.9993353486061096,
"start": 1915,
"tag": "PASSWORD",
"value": "12345678"
},
{
"context": "doc =\n email: user2\n p... | app_tests/client/providers.app-tests.coffee | Phaze1D/SA-Units | 0 | faker = require 'faker'
{ chai, assert, expect } = require 'meteor/practicalmeteor:chai'
{ Meteor } = require 'meteor/meteor'
{ Accounts } = require 'meteor/accounts-base'
{ resetDatabase } = require 'meteor/xolvio:cleaner'
{ _ } = require 'meteor/underscore'
ProviderModule = require '../../imports/api/collections/providers/providers.coffee'
OrganizationModule = require '../../imports/api/collections/organizations/organizations.coffee'
{ insert, update } = require '../../imports/api/collections/providers/methods.coffee'
{
inviteUser
} = require '../../imports/api/collections/users/methods.coffee'
OMethods = require '../../imports/api/collections/organizations/methods.coffee'
xdescribe "Provider Full App Tests Client", ->
before( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
after( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
organizationID = ''
providerID = ''
user1 = faker.internet.email()
user2 = faker.internet.email()
describe 'Provider Inserts Test', () ->
it 'Insert not valid', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','validation-error')
done()
it 'Insert not logged in', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notLoggedIn')
done()
it 'loggedIn', (done) ->
doc =
email: user1
password: '12345678'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (error) ->
expect(error).to.not.exist
done()
it 'Insert organ id not auth', (done) ->
expect(Meteor.user()).to.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notAuthorized')
done()
it 'Create organization', (done) ->
expect(Meteor.user()).to.exist
organ_doc =
name: faker.company.companyName()
email: faker.internet.email()
OMethods.insert.call organ_doc, (err, res) ->
expect(err).to.not.exist
organizationID = res
done()
it 'Subscribe to ', (done) ->
callbacks =
onStop: (err) ->
console.log err
onReady: () ->
done()
Meteor.subscribe("providers", organizationID, callbacks)
it 'Insert provider ', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(0)
organization_id = organizationID
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
providerID = res
expect(ProviderModule.Providers.find().count()).to.equal(1)
expect(err).to.not.exist
done()
describe 'Providers Update Tests', ->
it 'Log Out and create new user log out again', (done) ->
Meteor.logout((err) ->
expect(err).to.not.exist
doc =
email: user2
password: '12345678'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (err) ->
expect(err).to.not.exist
Meteor.logout( (err) ->
expect(err).to.not.exist
done()
)
)
it 'Login and invite user', (done) ->
Meteor.loginWithPassword user1, '12345678', (err) ->
expect(err).to.not.exist
invited_user_doc =
emails:
[
address: user2
]
profile:
first_name: faker.name.firstName()
organization_id = organizationID
permission =
owner: false
viewer: false
expenses_manager: false
sells_manager: false
units_manager: false
inventories_manager: true
users_manager: false
inviteUser.call {invited_user_doc, organization_id, permission}, (err, res) ->
expect(err).to.not.exist
done()
it 'Log out and login with non auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user2, '12345678', (err) ->
expect(err).to.not.exist
done()
)
it 'Is not owner or expenses_manager but belongs to organ', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'permissionDenied')
done()
it 'Log out and login with auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user1, '12345678', (err) ->
expect(err).to.not.exist
done()
)
it 'Not valid update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: () ->
console.log "hacking"
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'validation-error')
done()
it 'Not auth update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = "NONONOOONOO"
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Non existent update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Update with address success', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(ProviderModule.Providers.findOne().addresses.length).to.equal(1)
done()
| 179131 | faker = require 'faker'
{ chai, assert, expect } = require 'meteor/practicalmeteor:chai'
{ Meteor } = require 'meteor/meteor'
{ Accounts } = require 'meteor/accounts-base'
{ resetDatabase } = require 'meteor/xolvio:cleaner'
{ _ } = require 'meteor/underscore'
ProviderModule = require '../../imports/api/collections/providers/providers.coffee'
OrganizationModule = require '../../imports/api/collections/organizations/organizations.coffee'
{ insert, update } = require '../../imports/api/collections/providers/methods.coffee'
{
inviteUser
} = require '../../imports/api/collections/users/methods.coffee'
OMethods = require '../../imports/api/collections/organizations/methods.coffee'
xdescribe "Provider Full App Tests Client", ->
before( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
after( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
organizationID = ''
providerID = ''
user1 = faker.internet.email()
user2 = faker.internet.email()
describe 'Provider Inserts Test', () ->
it 'Insert not valid', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','validation-error')
done()
it 'Insert not logged in', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notLoggedIn')
done()
it 'loggedIn', (done) ->
doc =
email: user1
password: '<PASSWORD>'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (error) ->
expect(error).to.not.exist
done()
it 'Insert organ id not auth', (done) ->
expect(Meteor.user()).to.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notAuthorized')
done()
it 'Create organization', (done) ->
expect(Meteor.user()).to.exist
organ_doc =
name: faker.company.companyName()
email: faker.internet.email()
OMethods.insert.call organ_doc, (err, res) ->
expect(err).to.not.exist
organizationID = res
done()
it 'Subscribe to ', (done) ->
callbacks =
onStop: (err) ->
console.log err
onReady: () ->
done()
Meteor.subscribe("providers", organizationID, callbacks)
it 'Insert provider ', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(0)
organization_id = organizationID
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
providerID = res
expect(ProviderModule.Providers.find().count()).to.equal(1)
expect(err).to.not.exist
done()
describe 'Providers Update Tests', ->
it 'Log Out and create new user log out again', (done) ->
Meteor.logout((err) ->
expect(err).to.not.exist
doc =
email: user2
password: '<PASSWORD>'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (err) ->
expect(err).to.not.exist
Meteor.logout( (err) ->
expect(err).to.not.exist
done()
)
)
it 'Login and invite user', (done) ->
Meteor.loginWithPassword user1, '<PASSWORD>', (err) ->
expect(err).to.not.exist
invited_user_doc =
emails:
[
address: user2
]
profile:
first_name: faker.name.firstName()
organization_id = organizationID
permission =
owner: false
viewer: false
expenses_manager: false
sells_manager: false
units_manager: false
inventories_manager: true
users_manager: false
inviteUser.call {invited_user_doc, organization_id, permission}, (err, res) ->
expect(err).to.not.exist
done()
it 'Log out and login with non auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user2, '<PASSWORD>', (err) ->
expect(err).to.not.exist
done()
)
it 'Is not owner or expenses_manager but belongs to organ', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'permissionDenied')
done()
it 'Log out and login with auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user1, '<PASSWORD>', (err) ->
expect(err).to.not.exist
done()
)
it 'Not valid update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: () ->
console.log "hack<PASSWORD>"
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'validation-error')
done()
it 'Not auth update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = "NONONOOONOO"
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Non existent update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Update with address success', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(ProviderModule.Providers.findOne().addresses.length).to.equal(1)
done()
| true | faker = require 'faker'
{ chai, assert, expect } = require 'meteor/practicalmeteor:chai'
{ Meteor } = require 'meteor/meteor'
{ Accounts } = require 'meteor/accounts-base'
{ resetDatabase } = require 'meteor/xolvio:cleaner'
{ _ } = require 'meteor/underscore'
ProviderModule = require '../../imports/api/collections/providers/providers.coffee'
OrganizationModule = require '../../imports/api/collections/organizations/organizations.coffee'
{ insert, update } = require '../../imports/api/collections/providers/methods.coffee'
{
inviteUser
} = require '../../imports/api/collections/users/methods.coffee'
OMethods = require '../../imports/api/collections/organizations/methods.coffee'
xdescribe "Provider Full App Tests Client", ->
before( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
after( (done) ->
Meteor.logout( (err) ->
done()
)
return
)
organizationID = ''
providerID = ''
user1 = faker.internet.email()
user2 = faker.internet.email()
describe 'Provider Inserts Test', () ->
it 'Insert not valid', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','validation-error')
done()
it 'Insert not logged in', (done) ->
expect(Meteor.user()).to.not.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notLoggedIn')
done()
it 'loggedIn', (done) ->
doc =
email: user1
password: 'PI:PASSWORD:<PASSWORD>END_PI'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (error) ->
expect(error).to.not.exist
done()
it 'Insert organ id not auth', (done) ->
expect(Meteor.user()).to.exist
organization_id = "NONONOOONOO"
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error','notAuthorized')
done()
it 'Create organization', (done) ->
expect(Meteor.user()).to.exist
organ_doc =
name: faker.company.companyName()
email: faker.internet.email()
OMethods.insert.call organ_doc, (err, res) ->
expect(err).to.not.exist
organizationID = res
done()
it 'Subscribe to ', (done) ->
callbacks =
onStop: (err) ->
console.log err
onReady: () ->
done()
Meteor.subscribe("providers", organizationID, callbacks)
it 'Insert provider ', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(0)
organization_id = organizationID
provider_doc =
first_name: faker.name.firstName()
last_name: faker.name.lastName()
organization_id: organization_id
insert.call {organization_id, provider_doc}, (err, res) ->
providerID = res
expect(ProviderModule.Providers.find().count()).to.equal(1)
expect(err).to.not.exist
done()
describe 'Providers Update Tests', ->
it 'Log Out and create new user log out again', (done) ->
Meteor.logout((err) ->
expect(err).to.not.exist
doc =
email: user2
password: 'PI:PASSWORD:<PASSWORD>END_PI'
profile:
first_name: faker.name.firstName()
last_name: faker.name.lastName()
Accounts.createUser doc, (err) ->
expect(err).to.not.exist
Meteor.logout( (err) ->
expect(err).to.not.exist
done()
)
)
it 'Login and invite user', (done) ->
Meteor.loginWithPassword user1, 'PI:PASSWORD:<PASSWORD>END_PI', (err) ->
expect(err).to.not.exist
invited_user_doc =
emails:
[
address: user2
]
profile:
first_name: faker.name.firstName()
organization_id = organizationID
permission =
owner: false
viewer: false
expenses_manager: false
sells_manager: false
units_manager: false
inventories_manager: true
users_manager: false
inviteUser.call {invited_user_doc, organization_id, permission}, (err, res) ->
expect(err).to.not.exist
done()
it 'Log out and login with non auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user2, 'PI:PASSWORD:<PASSWORD>END_PI', (err) ->
expect(err).to.not.exist
done()
)
it 'Is not owner or expenses_manager but belongs to organ', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'permissionDenied')
done()
it 'Log out and login with auth user', (done) ->
Meteor.logout( (err) ->
expect(err).to.not.exist
Meteor.loginWithPassword user1, 'PI:PASSWORD:<PASSWORD>END_PI', (err) ->
expect(err).to.not.exist
done()
)
it 'Not valid update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: () ->
console.log "hackPI:PASSWORD:<PASSWORD>END_PI"
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'validation-error')
done()
it 'Not auth update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = "NONONOOONOO"
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Non existent update', (done) ->
expect(Meteor.user()).to.exist
expect(ProviderModule.Providers.find().count()).to.equal(1)
organization_id = organizationID
provider_id = "NONONO"
provider_doc =
last_name: faker.name.lastName()
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(err).to.have.property('error', 'notAuthorized')
done()
it 'Update with address success', (done) ->
organization_id = organizationID
provider_id = providerID
cus = ProviderModule.Providers.findOne()
expect(cus.addresses.length).to.equal(0)
provider_doc =
addresses: [
street: faker.address.streetName()
city: faker.address.city()
state: faker.address.state()
country: faker.address.country()
zip_code: faker.address.zipCode()
]
update.call {organization_id, provider_id, provider_doc}, (err, res) ->
expect(ProviderModule.Providers.findOne().addresses.length).to.equal(1)
done()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.